Convert rest of the actions
This commit is contained in:
parent
aac5eb2aea
commit
217483dfd6
59 changed files with 1630 additions and 915 deletions
32
lib/analysis-paths.js
generated
32
lib/analysis-paths.js
generated
|
|
@ -1,13 +1,5 @@
|
|||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
function isInterpretedLanguage(language) {
|
||||
return language === 'javascript' || language === 'python';
|
||||
}
|
||||
|
|
@ -23,6 +15,16 @@ function buildIncludeExcludeEnvVar(paths) {
|
|||
}
|
||||
return paths.join('\n');
|
||||
}
|
||||
function printPathFiltersWarning(config, logger) {
|
||||
// Index include/exclude/filters only work in javascript and python.
|
||||
// If any other languages are detected/configured then show a warning.
|
||||
if ((config.paths.length !== 0 ||
|
||||
config.pathsIgnore.length !== 0) &&
|
||||
!config.languages.every(isInterpretedLanguage)) {
|
||||
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
|
||||
}
|
||||
}
|
||||
exports.printPathFiltersWarning = printPathFiltersWarning;
|
||||
function includeAndExcludeAnalysisPaths(config) {
|
||||
// The 'LGTM_INDEX_INCLUDE' and 'LGTM_INDEX_EXCLUDE' environment variables
|
||||
// control which files/directories are traversed when scanning.
|
||||
|
|
@ -32,10 +34,10 @@ function includeAndExcludeAnalysisPaths(config) {
|
|||
// traverse the entire file tree to determine which files are matched.
|
||||
// Any paths containing "*" are not included in these.
|
||||
if (config.paths.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_INCLUDE', buildIncludeExcludeEnvVar(config.paths));
|
||||
process.env['LGTM_INDEX_INCLUDE'] = buildIncludeExcludeEnvVar(config.paths);
|
||||
}
|
||||
if (config.pathsIgnore.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_EXCLUDE', buildIncludeExcludeEnvVar(config.pathsIgnore));
|
||||
process.env['LGTM_INDEX_EXCLUDE'] = buildIncludeExcludeEnvVar(config.pathsIgnore);
|
||||
}
|
||||
// The 'LGTM_INDEX_FILTERS' environment variable controls which files are
|
||||
// extracted or ignored. It does not control which directories are traversed.
|
||||
|
|
@ -44,15 +46,7 @@ function includeAndExcludeAnalysisPaths(config) {
|
|||
filters.push(...config.paths.map(p => 'include:' + p));
|
||||
filters.push(...config.pathsIgnore.map(p => 'exclude:' + p));
|
||||
if (filters.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_FILTERS', filters.join('\n'));
|
||||
}
|
||||
// Index include/exclude/filters only work in javascript and python.
|
||||
// If any other languages are detected/configured then show a warning.
|
||||
if ((config.paths.length !== 0 ||
|
||||
config.pathsIgnore.length !== 0 ||
|
||||
filters.length !== 0) &&
|
||||
!config.languages.every(isInterpretedLanguage)) {
|
||||
core.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
|
||||
process.env['LGTM_INDEX_FILTERS'] = filters.join('\n');
|
||||
}
|
||||
}
|
||||
exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths;
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEjD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACrE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;KACpF;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC;KAC1F;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IACvD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IAC7D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC/D;IAED,oEAAoE;IACpE,sEAAsE;IACtE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;QACxB,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC;QAC/B,OAAO,CAAC,MAAM,KAAK,CAAC,CAAC;QACvB,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAClD,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC5G;AACH,CAAC;AAjCD,wEAiCC"}
|
||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;AAGA,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEjD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACrE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CAAC,MAA0B,EAAE,MAAc;IAChF,oEAAoE;IACpE,sEAAsE;IACtE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;QAC5B,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAChC,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAEhD,MAAM,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC9G;AACH,CAAC;AATD,0DASC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;KACnF;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IACvD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IAC7D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AAxBD,wEAwBC"}
|
||||
85
lib/analyze-action.js
generated
85
lib/analyze-action.js
generated
|
|
@ -8,106 +8,37 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const languages_1 = require("./languages");
|
||||
const analyze_1 = require("./analyze");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function sendStatusReport(startedAt, queriesStats, uploadStats, error) {
|
||||
async function sendStatusReport(startedAt, stats, error) {
|
||||
var _a, _b, _c;
|
||||
const status = ((_a = queriesStats) === null || _a === void 0 ? void 0 : _a.analyze_failure_language) !== undefined || error !== undefined ? 'failure' : 'success';
|
||||
const status = ((_a = stats) === null || _a === void 0 ? void 0 : _a.analyze_failure_language) !== undefined || error !== undefined ? 'failure' : 'success';
|
||||
const statusReportBase = await util.createStatusReportBase('finish', status, startedAt, (_b = error) === null || _b === void 0 ? void 0 : _b.message, (_c = error) === null || _c === void 0 ? void 0 : _c.stack);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
...(queriesStats || {}),
|
||||
...(uploadStats || {}),
|
||||
...(stats || {}),
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function createdDBForScannedLanguages(config) {
|
||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
||||
for (const language of config.languages) {
|
||||
if (languages_1.isScannedLanguage(language)) {
|
||||
core.startGroup('Extracting ' + language);
|
||||
await codeql.extractScannedLanguage(util.getCodeQLDatabasePath(config.tempDir, language), language);
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
}
|
||||
async function finalizeDatabaseCreation(config) {
|
||||
await createdDBForScannedLanguages(config);
|
||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
||||
for (const language of config.languages) {
|
||||
core.startGroup('Finalizing ' + language);
|
||||
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config.tempDir, language));
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(sarifFolder, config) {
|
||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
||||
for (let language of config.languages) {
|
||||
core.startGroup('Analyzing ' + language);
|
||||
const queries = config.queries[language] || [];
|
||||
if (queries.length === 0) {
|
||||
throw new Error('Unable to analyse ' + language + ' as no queries were selected for this language');
|
||||
}
|
||||
try {
|
||||
const databasePath = util.getCodeQLDatabasePath(config.tempDir, language);
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuite = databasePath + '-queries.qls';
|
||||
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||
fs.writeFileSync(querySuite, querySuiteContents);
|
||||
core.debug('Query suite file for ' + language + '...\n' + querySuiteContents);
|
||||
const sarifFile = path.join(sarifFolder, language + '.sarif');
|
||||
await codeql.databaseAnalyze(databasePath, sarifFile, querySuite);
|
||||
core.debug('SARIF results for database ' + language + ' created at "' + sarifFile + '"');
|
||||
core.endGroup();
|
||||
}
|
||||
catch (e) {
|
||||
// For now the fields about query performance are not populated
|
||||
return {
|
||||
analyze_failure_language: language,
|
||||
};
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let queriesStats = undefined;
|
||||
let uploadStats = undefined;
|
||||
let stats = undefined;
|
||||
try {
|
||||
util.prepareLocalRunEnvironment();
|
||||
if (!await util.sendStatusReport(await util.createStatusReportBase('finish', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
const config = await configUtils.getConfig(util.getRequiredEnvParam('RUNNER_TEMP'));
|
||||
core.exportVariable(sharedEnv.ODASA_TRACER_CONFIGURATION, '');
|
||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||
const sarifFolder = core.getInput('output');
|
||||
fs.mkdirSync(sarifFolder, { recursive: true });
|
||||
core.info('Finalizing database creation');
|
||||
await finalizeDatabaseCreation(config);
|
||||
core.info('Analyzing database');
|
||||
queriesStats = await runQueries(sarifFolder, config);
|
||||
if ('true' === core.getInput('upload')) {
|
||||
uploadStats = await upload_lib.upload(sarifFolder, repository_1.parseRepositoryNwo(util.getRequiredEnvParam('GITHUB_REPOSITORY')), await util.getCommitOid(), util.getRef(), await util.getAnalysisKey(), util.getRequiredEnvParam('GITHUB_WORKFLOW'), util.getWorkflowRunID(), core.getInput('checkout_path'), core.getInput('matrix'), core.getInput('token'), util.getRequiredEnvParam('GITHUB_API_URL'), 'actions', logging_1.getActionsLogger());
|
||||
}
|
||||
stats = await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(util.getRequiredEnvParam('GITHUB_REPOSITORY')), await util.getCommitOid(), util.getRef(), await util.getAnalysisKey(), util.getRequiredEnvParam('GITHUB_WORKFLOW'), util.getWorkflowRunID(), core.getInput('checkout_path'), core.getInput('matrix'), core.getInput('token'), util.getRequiredEnvParam('GITHUB_API_URL'), core.getInput('upload') === 'true', 'actions', core.getInput('output'), util.getRequiredEnvParam('RUNNER_TEMP'), logging_1.getActionsLogger());
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
console.log(error);
|
||||
await sendStatusReport(startedAt, queriesStats, uploadStats, error);
|
||||
await sendStatusReport(startedAt, stats, error);
|
||||
return;
|
||||
}
|
||||
await sendStatusReport(startedAt, queriesStats, uploadStats);
|
||||
await sendStatusReport(startedAt, stats);
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("analyze action failed: " + e);
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAE7B,qCAAqC;AACrC,4DAA8C;AAC9C,2CAAgD;AAChD,uCAA6C;AAC7C,6CAAkD;AAClD,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAiC/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,YAA6C,EAC7C,WAAsD,EACtD,KAAa;;IAEb,MAAM,MAAM,GAAG,OAAA,YAAY,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IACnH,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,QAAQ,EAAE,MAAM,EAAE,SAAS,QAAE,KAAK,0CAAE,OAAO,QAAE,KAAK,0CAAE,KAAK,CAAC,CAAC;IACtH,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,YAAY,IAAI,EAAE,CAAC;QACvB,GAAG,CAAC,WAAW,IAAI,EAAE,CAAC;KACvB,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,4BAA4B,CAAC,MAA0B;IACpE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,6BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,IAAI,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;YAC1C,MAAM,MAAM,CAAC,sBAAsB,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE,QAAQ,CAAC,CAAC;YACpG,IAAI,CAAC,QAAQ,EAAE,CAAC;SACjB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CAAC,MAA0B;IAChE,MAAM,4BAA4B,CAAC,MAAM,CAAC,CAAC;IAE3C,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;QAC1C,MAAM,MAAM,CAAC,gBAAgB,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC,CAAC;QACpF,IAAI,CAAC,QAAQ,EAAE,CAAC;KACjB;AACH,CAAC;AAED,2DAA2D;AAC3D,KAAK,UAAU,UAAU,CACvB,WAAmB,EACnB,MAA0B;IAE1B,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,IAAI,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACrC,IAAI,CAAC,UAAU,CAAC,YAAY,GAAG,QAAQ,CAAC,CAAC;QAEzC,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;QAC/C,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;YACxB,MAAM,IAAI,KAAK,CAAC,oBAAoB,GAAG,QAAQ,GAAG,gDAAgD,CAAC,CAAC;SACrG;QAED,IAAI;YACF,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;YAC1E,uEAAuE;YACvE,2EAA2E;YAC3E,MAAM,UAAU,GAAG,YAAY,GAAG,cAAc,CAAC;YACjD,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACxE,EAAE,CAAC,aAAa,CAAC,UAAU,EAAE,kBAAkB,CAAC,CAAC;YACjD,IAAI,CAAC,KAAK,CAAC,uBAAuB,GAAG,QAAQ,GAAG,OAAO,GAAG,kBAAkB,CAAC,CAAC;YAE9E,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,QAAQ,GAAG,QAAQ,CAAC,CAAC;YAE9D,MAAM,MAAM,CAAC,eAAe,CAAC,YAAY,EAAE,SAAS,EAAE,UAAU,CAAC,CAAC;YAElE,IAAI,CAAC,KAAK,CAAC,6BAA6B,GAAG,QAAQ,GAAG,eAAe,GAAG,SAAS,GAAG,GAAG,CAAC,CAAC;YACzF,IAAI,CAAC,QAAQ,EAAE,CAAC;SAEjB;QAAC,OAAO,CAAC,EAAE;YACV,+DAA+D;YAC/D,OAAO;gBACL,wBAAwB,EAAE,QAAQ;aACnC,CAAC;SACH;KACF;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAoC,SAAS,CAAC;IAC9D,IAAI,WAAW,GAA8C,SAAS,CAAC;IACvE,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,QAAQ,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YAC1G,OAAO;SACR;QACD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC,CAAC;QAEpF,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,0BAA0B,EAAE,EAAE,CAAC,CAAC;QAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;QAEzD,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;QAC5C,EAAE,CAAC,SAAS,CAAC,WAAW,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAE/C,IAAI,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;QAC1C,MAAM,wBAAwB,CAAC,MAAM,CAAC,CAAC;QAEvC,IAAI,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QAChC,YAAY,GAAG,MAAM,UAAU,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;QAErD,IAAI,MAAM,KAAK,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACtC,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACnC,WAAW,EACX,+BAAkB,CAAC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACjE,MAAM,IAAI,CAAC,YAAY,EAAE,EACzB,IAAI,CAAC,MAAM,EAAE,EACb,MAAM,IAAI,CAAC,cAAc,EAAE,EAC3B,IAAI,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAC3C,IAAI,CAAC,gBAAgB,EAAE,EACvB,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC,EAC9B,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EACvB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,mBAAmB,CAAC,gBAAgB,CAAC,EAC1C,SAAS,EACT,0BAAgB,EAAE,CAAC,CAAC;SACvB;KAEF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,gBAAgB,CAAC,SAAS,EAAE,YAAY,EAAE,WAAW,EAAE,KAAK,CAAC,CAAC;QACpE,OAAO;KACR;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,YAAY,EAAE,WAAW,CAAC,CAAC;AAC/D,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,yBAAyB,GAAG,CAAC,CAAC,CAAC;IAC9C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,uCAA6D;AAC7D,uCAA6C;AAC7C,6CAAkD;AAClD,6CAA+B;AAI/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GAAG,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IAC5G,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,QAAQ,EAAE,MAAM,EAAE,SAAS,QAAE,KAAK,0CAAE,OAAO,QAAE,KAAK,0CAAE,KAAK,CAAC,CAAC;IACtH,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,QAAQ,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YAC1G,OAAO;SACR;QACD,KAAK,GAAG,MAAM,oBAAU,CACtB,+BAAkB,CAAC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACjE,MAAM,IAAI,CAAC,YAAY,EAAE,EACzB,IAAI,CAAC,MAAM,EAAE,EACb,MAAM,IAAI,CAAC,cAAc,EAAE,EAC3B,IAAI,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAC3C,IAAI,CAAC,gBAAgB,EAAE,EACvB,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC,EAC9B,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EACvB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,mBAAmB,CAAC,gBAAgB,CAAC,EAC1C,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,KAAK,MAAM,EAClC,SAAS,EACT,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EACvB,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,EACvC,0BAAgB,EAAE,CAAC,CAAC;KAEvB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,yBAAyB,GAAG,CAAC,CAAC,CAAC;IAC9C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
89
lib/analyze.js
generated
Normal file
89
lib/analyze.js
generated
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const languages_1 = require("./languages");
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function createdDBForScannedLanguages(config, logger) {
|
||||
// Insert the LGTM_INDEX_X env vars at this point so they are set when
|
||||
// we extract any scanned languages.
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
||||
for (const language of config.languages) {
|
||||
if (languages_1.isScannedLanguage(language)) {
|
||||
logger.startGroup('Extracting ' + language);
|
||||
await codeql.extractScannedLanguage(util.getCodeQLDatabasePath(config.tempDir, language), language);
|
||||
logger.endGroup();
|
||||
}
|
||||
}
|
||||
}
|
||||
async function finalizeDatabaseCreation(config, logger) {
|
||||
await createdDBForScannedLanguages(config, logger);
|
||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
||||
for (const language of config.languages) {
|
||||
logger.startGroup('Finalizing ' + language);
|
||||
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config.tempDir, language));
|
||||
logger.endGroup();
|
||||
}
|
||||
}
|
||||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(sarifFolder, config, logger) {
|
||||
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
|
||||
for (let language of config.languages) {
|
||||
logger.startGroup('Analyzing ' + language);
|
||||
const queries = config.queries[language] || [];
|
||||
if (queries.length === 0) {
|
||||
throw new Error('Unable to analyse ' + language + ' as no queries were selected for this language');
|
||||
}
|
||||
try {
|
||||
const databasePath = util.getCodeQLDatabasePath(config.tempDir, language);
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuite = databasePath + '-queries.qls';
|
||||
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||
fs.writeFileSync(querySuite, querySuiteContents);
|
||||
logger.debug('Query suite file for ' + language + '...\n' + querySuiteContents);
|
||||
const sarifFile = path.join(sarifFolder, language + '.sarif');
|
||||
await codeql.databaseAnalyze(databasePath, sarifFile, querySuite);
|
||||
logger.debug('SARIF results for database ' + language + ' created at "' + sarifFile + '"');
|
||||
logger.endGroup();
|
||||
}
|
||||
catch (e) {
|
||||
// For now the fields about query performance are not populated
|
||||
return {
|
||||
analyze_failure_language: language,
|
||||
};
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
async function runAnalyze(repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, doUpload, mode, outputDir, tempDir, logger) {
|
||||
const config = await configUtils.getConfig(tempDir, logger);
|
||||
// Delete the tracer config env var to avoid tracing ourselves
|
||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||
fs.mkdirSync(outputDir, { recursive: true });
|
||||
logger.info('Finalizing database creation');
|
||||
await finalizeDatabaseCreation(config, logger);
|
||||
logger.info('Analyzing database');
|
||||
const queriesStats = await runQueries(outputDir, config, logger);
|
||||
if (!doUpload) {
|
||||
logger.info('Not uploading results');
|
||||
return { ...queriesStats };
|
||||
}
|
||||
const uploadStats = await upload_lib.upload(outputDir, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger);
|
||||
return { ...queriesStats, ...uploadStats };
|
||||
}
|
||||
exports.runAnalyze = runAnalyze;
|
||||
//# sourceMappingURL=analyze.js.map
|
||||
1
lib/analyze.js.map
Normal file
1
lib/analyze.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"analyze.js","sourceRoot":"","sources":["../src/analyze.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,gEAAkD;AAClD,qCAAqC;AACrC,4DAA8C;AAC9C,2CAAgD;AAGhD,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAiC/B,KAAK,UAAU,4BAA4B,CACzC,MAA0B,EAC1B,MAAc;IAEd,sEAAsE;IACtE,oCAAoC;IACpC,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IAErD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,6BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,MAAM,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;YAC5C,MAAM,MAAM,CAAC,sBAAsB,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE,QAAQ,CAAC,CAAC;YACpG,MAAM,CAAC,QAAQ,EAAE,CAAC;SACnB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CACrC,MAA0B,EAC1B,MAAc;IAEd,MAAM,4BAA4B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,gBAAgB,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC,CAAC;QACpF,MAAM,CAAC,QAAQ,EAAE,CAAC;KACnB;AACH,CAAC;AAED,2DAA2D;AAC3D,KAAK,UAAU,UAAU,CACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,IAAI,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACrC,MAAM,CAAC,UAAU,CAAC,YAAY,GAAG,QAAQ,CAAC,CAAC;QAE3C,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;QAC/C,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;YACxB,MAAM,IAAI,KAAK,CAAC,oBAAoB,GAAG,QAAQ,GAAG,gDAAgD,CAAC,CAAC;SACrG;QAED,IAAI;YACF,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;YAC1E,uEAAuE;YACvE,2EAA2E;YAC3E,MAAM,UAAU,GAAG,YAAY,GAAG,cAAc,CAAC;YACjD,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACxE,EAAE,CAAC,aAAa,CAAC,UAAU,EAAE,kBAAkB,CAAC,CAAC;YACjD,MAAM,CAAC,KAAK,CAAC,uBAAuB,GAAG,QAAQ,GAAG,OAAO,GAAG,kBAAkB,CAAC,CAAC;YAEhF,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,QAAQ,GAAG,QAAQ,CAAC,CAAC;YAE9D,MAAM,MAAM,CAAC,eAAe,CAAC,YAAY,EAAE,SAAS,EAAE,UAAU,CAAC,CAAC;YAElE,MAAM,CAAC,KAAK,CAAC,6BAA6B,GAAG,QAAQ,GAAG,eAAe,GAAG,SAAS,GAAG,GAAG,CAAC,CAAC;YAC3F,MAAM,CAAC,QAAQ,EAAE,CAAC;SAEnB;QAAC,OAAO,CAAC,EAAE;YACV,+DAA+D;YAC/D,OAAO;gBACL,wBAAwB,EAAE,QAAQ;aACnC,CAAC;SACH;KACF;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAEM,KAAK,UAAU,UAAU,CAC9B,aAA4B,EAC5B,SAAiB,EACjB,GAAW,EACX,WAA+B,EAC/B,YAAgC,EAChC,aAAiC,EACjC,YAAoB,EACpB,WAA+B,EAC/B,UAAkB,EAClB,SAAiB,EACjB,QAAiB,EACjB,IAAe,EACf,SAAiB,EACjB,OAAe,EACf,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IAE5D,8DAA8D;IAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;IAEzD,EAAE,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7C,MAAM,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;IAC5C,MAAM,wBAAwB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE/C,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAClC,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,SAAS,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;IAEjE,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;QACrC,OAAO,EAAE,GAAG,YAAY,EAAE,CAAC;KAC5B;IAED,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,SAAS,EACT,aAAa,EACb,SAAS,EACT,GAAG,EACH,WAAW,EACX,YAAY,EACZ,aAAa,EACb,YAAY,EACZ,WAAW,EACX,UAAU,EACV,SAAS,EACT,IAAI,EACJ,MAAM,CAAC,CAAC;IAEV,OAAO,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;AAC7C,CAAC;AAnDD,gCAmDC"}
|
||||
16
lib/api-client.js
generated
16
lib/api-client.js
generated
|
|
@ -13,18 +13,30 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||
const core = __importStar(require("@actions/core"));
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||
const path = __importStar(require("path"));
|
||||
const util_1 = require("./util");
|
||||
exports.getApiClient = function (githubAuth, githubApiUrl, allowLocalRun = false) {
|
||||
exports.getApiClient = function (githubAuth, githubUrl, allowLocalRun = false) {
|
||||
if (util_1.isLocalRun() && !allowLocalRun) {
|
||||
throw new Error('Invalid API call in local run');
|
||||
}
|
||||
return new github.GitHub({
|
||||
auth: parseAuth(githubAuth),
|
||||
baseUrl: githubApiUrl,
|
||||
baseUrl: getApiUrl(githubUrl),
|
||||
userAgent: "CodeQL Action",
|
||||
log: console_log_level_1.default({ level: "debug" })
|
||||
});
|
||||
};
|
||||
function getApiUrl(githubUrl) {
|
||||
const url = new URL(githubUrl);
|
||||
// If we detect this is trying to be to github.com
|
||||
// then return with a fixed canonical URL.
|
||||
if (url.hostname === 'github.com' || url.hostname === 'api.github.com') {
|
||||
return 'https://api.github.com';
|
||||
}
|
||||
// Add the /api/v3 API prefix
|
||||
url.pathname = path.join(url.pathname, 'api', 'v3');
|
||||
return url.toString();
|
||||
}
|
||||
// Parses the user input as either a single token,
|
||||
// or a username and password / PAT.
|
||||
function parseAuth(auth) {
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,wDAA0C;AAC1C,0EAAgD;AAEhD,iCAAyD;AAE5C,QAAA,YAAY,GAAG,UAAS,UAAkB,EAAE,YAAoB,EAAE,aAAa,GAAG,KAAK;IAClG,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,OAAO,IAAI,MAAM,CAAC,MAAM,CACtB;QACE,IAAI,EAAE,SAAS,CAAC,UAAU,CAAC;QAC3B,OAAO,EAAE,YAAY;QACrB,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CAAC;AACP,CAAC,CAAC;AAEF,kDAAkD;AAClD,oCAAoC;AACpC,SAAS,SAAS,CAAC,IAAY;IAC7B,yCAAyC;IACzC,MAAM,CAAC,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5B,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE;QACZ,OAAO,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;KACxD;IAED,mCAAmC;IACnC,OAAO,IAAI,CAAC;AACd,CAAC;AAED,uFAAuF;AACvF,oFAAoF;AACpF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,OAAO,oBAAY,CACjB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,0BAAmB,CAAC,gBAAgB,CAAC,EACrC,aAAa,CAAC,CAAC;AACnB,CAAC;AALD,kDAKC"}
|
||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,wDAA0C;AAC1C,0EAAgD;AAChD,2CAA6B;AAE7B,iCAAyD;AAE5C,QAAA,YAAY,GAAG,UAAS,UAAkB,EAAE,SAAiB,EAAE,aAAa,GAAG,KAAK;IAC/F,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,OAAO,IAAI,MAAM,CAAC,MAAM,CACtB;QACE,IAAI,EAAE,SAAS,CAAC,UAAU,CAAC;QAC3B,OAAO,EAAE,SAAS,CAAC,SAAS,CAAC;QAC7B,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CAAC;AACP,CAAC,CAAC;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,kDAAkD;IAClD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,kDAAkD;AAClD,oCAAoC;AACpC,SAAS,SAAS,CAAC,IAAY;IAC7B,yCAAyC;IACzC,MAAM,CAAC,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5B,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE;QACZ,OAAO,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;KACxD;IAED,mCAAmC;IACnC,OAAO,IAAI,CAAC;AACd,CAAC;AAED,uFAAuF;AACvF,oFAAoF;AACpF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,OAAO,oBAAY,CACjB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,0BAAmB,CAAC,gBAAgB,CAAC,EACrC,aAAa,CAAC,CAAC;AACnB,CAAC;AALD,kDAKC"}
|
||||
4
lib/autobuild-action.js
generated
4
lib/autobuild-action.js
generated
|
|
@ -11,6 +11,7 @@ const core = __importStar(require("@actions/core"));
|
|||
const codeql_1 = require("./codeql");
|
||||
const config_utils = __importStar(require("./config-utils"));
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const util = __importStar(require("./util"));
|
||||
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
|
||||
var _a, _b;
|
||||
|
|
@ -24,6 +25,7 @@ async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguag
|
|||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
const logger = logging_1.getActionsLogger();
|
||||
const startedAt = new Date();
|
||||
let language;
|
||||
try {
|
||||
|
|
@ -31,7 +33,7 @@ async function run() {
|
|||
if (!await util.sendStatusReport(await util.createStatusReportBase('autobuild', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
const config = await config_utils.getConfig(util.getRequiredEnvParam('RUNNER_TEMP'));
|
||||
const config = await config_utils.getConfig(util.getRequiredEnvParam('RUNNER_TEMP'), logger);
|
||||
// Attempt to find a language to autobuild
|
||||
// We want pick the dominant language in the repo from the ones we're able to build
|
||||
// The languages are sorted in order specified by user or by lines of code if we got
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,qCAAqC;AACrC,6DAA+C;AAC/C,2CAA+C;AAC/C,6CAA+B;AAS/B,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GAAG,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IAC5F,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CAAC,CAAC;IAChB,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,CAAC;IACb,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YAC7G,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CAAC,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC,CAAC;QAErF,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;QACrE,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEjC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;QAC3C,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAEpC,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;QACxE,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,qCAAqC;AACrC,6DAA+C;AAC/C,2CAA+C;AAC/C,uCAA6C;AAC7C,6CAA+B;AAS/B,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GAAG,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IAC5F,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CAAC,CAAC;IAChB,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,CAAC;IACb,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YAC7G,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CAAC,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAAE,MAAM,CAAC,CAAC;QAE7F,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;QACrE,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEjC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;QAC3C,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAEpC,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;QACxE,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
24
lib/autobuild.js
generated
Normal file
24
lib/autobuild.js
generated
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils = __importStar(require("./config-utils"));
|
||||
const languages_1 = require("./languages");
|
||||
async function runAutobuild(language, tmpDir, logger) {
|
||||
if (!languages_1.isTracedLanguage(language)) {
|
||||
throw new Error(`Cannot build "${language}" as it is not a traced language`);
|
||||
}
|
||||
const config = await config_utils.getConfig(tmpDir, logger);
|
||||
logger.startGroup(`Attempting to automatically build ${language} code`);
|
||||
const codeQL = codeql_1.getCodeQL(config.codeQLCmd);
|
||||
await codeQL.runAutobuild(language);
|
||||
logger.endGroup();
|
||||
}
|
||||
exports.runAutobuild = runAutobuild;
|
||||
//# sourceMappingURL=autobuild.js.map
|
||||
1
lib/autobuild.js.map
Normal file
1
lib/autobuild.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,qCAAqC;AACrC,6DAA+C;AAC/C,2CAAyD;AAGlD,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAAc,EACd,MAAc;IAEd,IAAI,CAAC,4BAAgB,CAAC,QAAQ,CAAC,EAAE;QAC/B,MAAM,IAAI,KAAK,CAAC,iBAAiB,QAAQ,kCAAkC,CAAC,CAAC;KAC9E;IAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE5D,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAfD,oCAeC"}
|
||||
90
lib/codeql.js
generated
90
lib/codeql.js
generated
|
|
@ -10,7 +10,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const http = __importStar(require("@actions/http-client"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
|
|
@ -31,30 +30,35 @@ let cachedCodeQL = undefined;
|
|||
const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
|
||||
const CODEQL_BUNDLE_NAME = "codeql-bundle.tar.gz";
|
||||
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||
function getCodeQLActionRepository() {
|
||||
// Actions do not know their own repository name,
|
||||
// so we currently use this hack to find the name based on where our files are.
|
||||
// This can be removed once the change to the runner in https://github.com/actions/runner/pull/585 is deployed.
|
||||
const runnerTemp = util.getRequiredEnvParam("RUNNER_TEMP");
|
||||
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
|
||||
const relativeScriptPath = path.relative(actionsDirectory, __filename);
|
||||
// This handles the case where the Action does not come from an Action repository,
|
||||
// e.g. our integration tests which use the Action code from the current checkout.
|
||||
if (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath)) {
|
||||
function getCodeQLActionRepository(mode) {
|
||||
if (mode === 'actions') {
|
||||
// Actions do not know their own repository name,
|
||||
// so we currently use this hack to find the name based on where our files are.
|
||||
// This can be removed once the change to the runner in https://github.com/actions/runner/pull/585 is deployed.
|
||||
const runnerTemp = util.getRequiredEnvParam("RUNNER_TEMP");
|
||||
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
|
||||
const relativeScriptPath = path.relative(actionsDirectory, __filename);
|
||||
// This handles the case where the Action does not come from an Action repository,
|
||||
// e.g. our integration tests which use the Action code from the current checkout.
|
||||
if (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath)) {
|
||||
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
||||
}
|
||||
const relativeScriptPathParts = relativeScriptPath.split(path.sep);
|
||||
return relativeScriptPathParts[0] + "/" + relativeScriptPathParts[1];
|
||||
}
|
||||
else {
|
||||
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
||||
}
|
||||
const relativeScriptPathParts = relativeScriptPath.split(path.sep);
|
||||
return relativeScriptPathParts[0] + "/" + relativeScriptPathParts[1];
|
||||
}
|
||||
async function getCodeQLBundleDownloadURL() {
|
||||
const codeQLActionRepository = getCodeQLActionRepository();
|
||||
async function getCodeQLBundleDownloadURL(githubUrl, mode, logger) {
|
||||
const codeQLActionRepository = getCodeQLActionRepository(mode);
|
||||
const potentialDownloadSources = [
|
||||
// This GitHub instance, and this Action.
|
||||
[util.getInstanceAPIURL(), codeQLActionRepository],
|
||||
[githubUrl, codeQLActionRepository],
|
||||
// This GitHub instance, and the canonical Action.
|
||||
[util.getInstanceAPIURL(), CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
[githubUrl, CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
// GitHub.com, and the canonical Action.
|
||||
[util.GITHUB_DOTCOM_API_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
[util.GITHUB_DOTCOM_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
];
|
||||
// We now filter out any duplicates.
|
||||
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
|
||||
|
|
@ -62,7 +66,7 @@ async function getCodeQLBundleDownloadURL() {
|
|||
for (let downloadSource of uniqueDownloadSources) {
|
||||
let [apiURL, repository] = downloadSource;
|
||||
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
|
||||
if (apiURL === util.GITHUB_DOTCOM_API_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
|
||||
if (apiURL === util.GITHUB_DOTCOM_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
|
||||
break;
|
||||
}
|
||||
let [repositoryOwner, repositoryName] = repository.split("/");
|
||||
|
|
@ -74,59 +78,59 @@ async function getCodeQLBundleDownloadURL() {
|
|||
});
|
||||
for (let asset of release.data.assets) {
|
||||
if (asset.name === CODEQL_BUNDLE_NAME) {
|
||||
core.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
|
||||
logger.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
|
||||
return asset.url;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
core.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
|
||||
logger.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
|
||||
}
|
||||
}
|
||||
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${CODEQL_BUNDLE_NAME}`;
|
||||
}
|
||||
// We have to download CodeQL manually because the toolcache doesn't support Accept headers.
|
||||
// This can be removed once https://github.com/actions/toolkit/pull/530 is merged and released.
|
||||
async function toolcacheDownloadTool(url, headers) {
|
||||
async function toolcacheDownloadTool(url, headers, tempDir, logger) {
|
||||
const client = new http.HttpClient('CodeQL Action');
|
||||
const dest = path.join(util.getRequiredEnvParam('RUNNER_TEMP'), v4_1.default());
|
||||
const dest = path.join(tempDir, v4_1.default());
|
||||
const response = await client.get(url, headers);
|
||||
if (response.message.statusCode !== 200) {
|
||||
const err = new toolcache.HTTPError(response.message.statusCode);
|
||||
core.info(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
||||
throw err;
|
||||
logger.info(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
||||
throw new Error(`Unexpected HTTP response: ${response.message.statusCode}`);
|
||||
}
|
||||
const pipeline = globalutil.promisify(stream.pipeline);
|
||||
fs.mkdirSync(path.dirname(dest), { recursive: true });
|
||||
await pipeline(response.message, fs.createWriteStream(dest));
|
||||
return dest;
|
||||
}
|
||||
async function setupCodeQL() {
|
||||
async function setupCodeQL(codeqlURL, githubAuth, githubUrl, tempDir, toolsDir, mode, logger) {
|
||||
// Setting these two env vars makes the toolcache code safe to use,
|
||||
// but this is obviously not a great thing we're doing and it would
|
||||
// be better to write our own implementation to use outside of actions.
|
||||
process.env['RUNNER_TEMP'] = tempDir;
|
||||
process.env['RUNNER_TOOL_CACHE'] = toolsDir;
|
||||
try {
|
||||
let codeqlURL = core.getInput('tools');
|
||||
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
|
||||
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`, logger);
|
||||
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
|
||||
if (codeqlFolder) {
|
||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
}
|
||||
else {
|
||||
if (!codeqlURL) {
|
||||
codeqlURL = await getCodeQLBundleDownloadURL();
|
||||
codeqlURL = await getCodeQLBundleDownloadURL(githubUrl, mode, logger);
|
||||
}
|
||||
const headers = { accept: 'application/octet-stream' };
|
||||
// We only want to provide an authorization header if we are downloading
|
||||
// from the same GitHub instance the Action is running on.
|
||||
// This avoids leaking Enterprise tokens to dotcom.
|
||||
if (codeqlURL.startsWith(util.getInstanceAPIURL() + "/")) {
|
||||
core.debug('Downloading CodeQL bundle with token.');
|
||||
let token = core.getInput('token', { required: true });
|
||||
headers.authorization = `token ${token}`;
|
||||
if (codeqlURL.startsWith(githubUrl + "/")) {
|
||||
logger.debug('Downloading CodeQL bundle with token.');
|
||||
headers.authorization = `token ${githubAuth}`;
|
||||
}
|
||||
else {
|
||||
core.debug('Downloading CodeQL bundle without token.');
|
||||
logger.debug('Downloading CodeQL bundle without token.');
|
||||
}
|
||||
let codeqlPath = await toolcacheDownloadTool(codeqlURL, headers);
|
||||
core.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
||||
let codeqlPath = await toolcacheDownloadTool(codeqlURL, headers, tempDir, logger);
|
||||
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
|
||||
}
|
||||
|
|
@ -141,19 +145,19 @@ async function setupCodeQL() {
|
|||
return cachedCodeQL;
|
||||
}
|
||||
catch (e) {
|
||||
core.error(e);
|
||||
logger.error(e);
|
||||
throw new Error("Unable to download and extract CodeQL CLI");
|
||||
}
|
||||
}
|
||||
exports.setupCodeQL = setupCodeQL;
|
||||
function getCodeQLURLVersion(url) {
|
||||
function getCodeQLURLVersion(url, logger) {
|
||||
const match = url.match(/\/codeql-bundle-(.*)\//);
|
||||
if (match === null || match.length < 2) {
|
||||
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
|
||||
}
|
||||
let version = match[1];
|
||||
if (!semver.valid(version)) {
|
||||
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||
logger.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||
version = '0.0.0-' + version;
|
||||
}
|
||||
const s = semver.clean(version);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
9
lib/codeql.test.js
generated
9
lib/codeql.test.js
generated
|
|
@ -15,22 +15,19 @@ const ava_1 = __importDefault(require("ava"));
|
|||
const nock_1 = __importDefault(require("nock"));
|
||||
const path = __importStar(require("path"));
|
||||
const codeql = __importStar(require("./codeql"));
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
ava_1.default('download codeql bundle cache', async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
|
||||
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
|
||||
const versions = ['20200601', '20200610'];
|
||||
for (let i = 0; i < versions.length; i++) {
|
||||
const version = versions[i];
|
||||
nock_1.default('https://example.com')
|
||||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
|
||||
await codeql.setupCodeQL();
|
||||
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, 'token', 'https://github.example.com', tmpDir, tmpDir, 'runner', logging_1.getRunnerLogger());
|
||||
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
|
||||
}
|
||||
const cachedVersions = toolcache.findAllVersions('CodeQL');
|
||||
|
|
@ -49,7 +46,7 @@ ava_1.default('parse codeql bundle url version', t => {
|
|||
for (const [version, expectedVersion] of Object.entries(tests)) {
|
||||
const url = `https://github.com/.../codeql-bundle-${version}/...`;
|
||||
try {
|
||||
const parsedVersion = codeql.getCodeQLURLVersion(url);
|
||||
const parsedVersion = codeql.getCodeQLURLVersion(url, logging_1.getRunnerLogger());
|
||||
t.deepEqual(parsedVersion, expectedVersion);
|
||||
}
|
||||
catch (e) {
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"codeql.test.js","sourceRoot":"","sources":["../src/codeql.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,iDAAmC;AACnC,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEnC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGrF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,MAAM,CAAC,WAAW,EAAE,CAAC;YAE3B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,MAAM,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YACtD,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,0CAA0C,EAAE,CAAC,CAAC,EAAE;IACnD,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;IAEzD,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;IAEtE,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAC,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;AACtF,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qCAAqC,EAAE,CAAC,CAAC,EAAE;IAC9C,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;AACxE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,wDAAwD,EAAE,CAAC,CAAC,EAAE;IACjE,IAAI,EAAE,GAAG,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,CAAC;IAChC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;IAEnE,IAAI,EAAE,GAAG,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,CAAC;IAChC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;IAEpE,IAAI,EAAE,GAAG,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAC,CAAC;IACnD,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;IACvB,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;AACrE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,wCAAwC,EAAE,CAAC,CAAC,EAAE;IACjD,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,EAAE,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IAE/D,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,EAAE,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IAE/D,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,EAAE,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAC,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;AACtG,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"codeql.test.js","sourceRoot":"","sources":["../src/codeql.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,iDAAmC;AACnC,uCAA4C;AAC5C,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACnC,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAErF,MAAM,MAAM,CAAC,WAAW,CACtB,8CAA8C,OAAO,uBAAuB,EAC5E,OAAO,EACP,4BAA4B,EAC5B,MAAM,EACN,MAAM,EACN,QAAQ,EACR,yBAAe,EAAE,CAAC,CAAC;YAErB,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,MAAM,CAAC,mBAAmB,CAAC,GAAG,EAAE,yBAAe,EAAE,CAAC,CAAC;YACzE,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,0CAA0C,EAAE,CAAC,CAAC,EAAE;IACnD,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;IAEzD,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;IAEtE,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAC,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;AACtF,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qCAAqC,EAAE,CAAC,CAAC,EAAE;IAC9C,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;AACxE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,wDAAwD,EAAE,CAAC,CAAC,EAAE;IACjE,IAAI,EAAE,GAAG,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,CAAC;IAChC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;IAEnE,IAAI,EAAE,GAAG,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,CAAC;IAChC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;IAEpE,IAAI,EAAE,GAAG,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAC,CAAC;IACnD,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;IACvB,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;AACrE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,wCAAwC,EAAE,CAAC,CAAC,EAAE;IACjD,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,EAAE,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IAE/D,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,EAAE,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IAE/D,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,EAAE,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAC,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;AACtG,CAAC,CAAC,CAAC"}
|
||||
126
lib/config-utils.js
generated
126
lib/config-utils.js
generated
|
|
@ -7,14 +7,12 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const path = __importStar(require("path"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const languages_1 = require("./languages");
|
||||
const util = __importStar(require("./util"));
|
||||
// Property names from the user-supplied config file.
|
||||
const NAME_PROPERTY = 'name';
|
||||
const DISABLE_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
|
||||
|
|
@ -100,11 +98,10 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, suiteName, c
|
|||
/**
|
||||
* Retrieve the set of queries at localQueryPath and add them to resultMap.
|
||||
*/
|
||||
async function addLocalQueries(codeQL, resultMap, localQueryPath, configFile) {
|
||||
async function addLocalQueries(codeQL, resultMap, localQueryPath, checkoutPath, configFile) {
|
||||
// Resolve the local path against the workspace so that when this is
|
||||
// passed to codeql it resolves to exactly the path we expect it to resolve to.
|
||||
const workspacePath = fs.realpathSync(util.getRequiredEnvParam('GITHUB_WORKSPACE'));
|
||||
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
|
||||
let absoluteQueryPath = path.join(checkoutPath, localQueryPath);
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(absoluteQueryPath)) {
|
||||
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
|
||||
|
|
@ -112,17 +109,15 @@ async function addLocalQueries(codeQL, resultMap, localQueryPath, configFile) {
|
|||
// Call this after checking file exists, because it'll fail if file doesn't exist
|
||||
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
|
||||
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
||||
if (!(absoluteQueryPath + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
if (!(absoluteQueryPath + path.sep).startsWith(fs.realpathSync(checkoutPath) + path.sep)) {
|
||||
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
||||
}
|
||||
// Get the root of the current repo to use when resolving query dependencies
|
||||
const rootOfRepo = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
await runResolveQueries(codeQL, resultMap, [absoluteQueryPath], rootOfRepo, true);
|
||||
await runResolveQueries(codeQL, resultMap, [absoluteQueryPath], checkoutPath, true);
|
||||
}
|
||||
/**
|
||||
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
|
||||
*/
|
||||
async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, configFile) {
|
||||
async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, configFile) {
|
||||
let tok = queryUses.split('@');
|
||||
if (tok.length !== 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
|
|
@ -141,11 +136,11 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, configFil
|
|||
}
|
||||
const nwo = tok[0] + '/' + tok[1];
|
||||
// Checkout the external repository
|
||||
const rootOfRepo = await externalQueries.checkoutExternalRepository(nwo, ref, tempDir);
|
||||
const checkoutPath = await externalQueries.checkoutExternalRepository(nwo, ref, githubUrl, tempDir);
|
||||
const queryPath = tok.length > 2
|
||||
? path.join(rootOfRepo, tok.slice(2).join('/'))
|
||||
: rootOfRepo;
|
||||
await runResolveQueries(codeQL, resultMap, [queryPath], rootOfRepo, true);
|
||||
? path.join(checkoutPath, tok.slice(2).join('/'))
|
||||
: checkoutPath;
|
||||
await runResolveQueries(codeQL, resultMap, [queryPath], checkoutPath, true);
|
||||
}
|
||||
/**
|
||||
* Parse a query 'uses' field to a discrete set of query files and update resultMap.
|
||||
|
|
@ -155,14 +150,14 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, configFil
|
|||
* local paths starting with './', or references to remote repos, or
|
||||
* a finite set of hardcoded terms for builtin suites.
|
||||
*/
|
||||
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, configFile) {
|
||||
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, checkoutPath, githubUrl, configFile) {
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), configFile);
|
||||
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), checkoutPath, configFile);
|
||||
return;
|
||||
}
|
||||
// Check for one of the builtin suites
|
||||
|
|
@ -171,7 +166,7 @@ async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir,
|
|||
return;
|
||||
}
|
||||
// Otherwise, must be a reference to another repo
|
||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, configFile);
|
||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, configFile);
|
||||
}
|
||||
// Regex validating stars in paths or paths-ignore entries.
|
||||
// The intention is to only allow ** to appear when immediately
|
||||
|
|
@ -182,7 +177,7 @@ const pathStarsRegex = /.*(?:\*\*[^/].*|\*\*$|[^/]\*\*.*)/;
|
|||
const filterPatternCharactersRegex = /.*[\?\+\[\]!].*/;
|
||||
// Checks that a paths of paths-ignore entry is valid, possibly modifying it
|
||||
// to make it valid, or if not possible then throws an error.
|
||||
function validateAndSanitisePath(originalPath, propertyName, configFile) {
|
||||
function validateAndSanitisePath(originalPath, propertyName, configFile, logger) {
|
||||
// Take a copy so we don't modify the original path, so we can still construct error messages
|
||||
let path = originalPath;
|
||||
// All paths are relative to the src root, so strip off leading slashes.
|
||||
|
|
@ -206,7 +201,7 @@ function validateAndSanitisePath(originalPath, propertyName, configFile) {
|
|||
// Check for other regex characters that we don't support.
|
||||
// Output a warning so the user knows, but otherwise continue normally.
|
||||
if (path.match(filterPatternCharactersRegex)) {
|
||||
core.warning(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an unsupported character. ' +
|
||||
logger.warning(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an unsupported character. ' +
|
||||
'The filter pattern characters ?, +, [, ], ! are not supported and will be matched literally.'));
|
||||
}
|
||||
// Ban any uses of backslash for now.
|
||||
|
|
@ -297,18 +292,18 @@ exports.getUnknownLanguagesError = getUnknownLanguagesError;
|
|||
/**
|
||||
* Gets the set of languages in the current repository
|
||||
*/
|
||||
async function getLanguagesInRepo() {
|
||||
async function getLanguagesInRepo(githubAuth, githubUrl, logger) {
|
||||
var _a;
|
||||
let repo_nwo = (_a = process.env['GITHUB_REPOSITORY']) === null || _a === void 0 ? void 0 : _a.split("/");
|
||||
if (repo_nwo) {
|
||||
let owner = repo_nwo[0];
|
||||
let repo = repo_nwo[1];
|
||||
core.debug(`GitHub repo ${owner} ${repo}`);
|
||||
const response = await api.getActionsApiClient(true).repos.listLanguages({
|
||||
logger.debug(`GitHub repo ${owner} ${repo}`);
|
||||
const response = await api.getApiClient(githubAuth, githubUrl, true).repos.listLanguages({
|
||||
owner,
|
||||
repo
|
||||
});
|
||||
core.debug("Languages API response: " + JSON.stringify(response));
|
||||
logger.debug("Languages API response: " + JSON.stringify(response));
|
||||
// The GitHub API is going to return languages in order of popularity,
|
||||
// When we pick a language to autobuild we want to pick the most popular traced language
|
||||
// Since sets in javascript maintain insertion order, using a set here and then splatting it
|
||||
|
|
@ -336,17 +331,17 @@ async function getLanguagesInRepo() {
|
|||
* If no languages could be detected from either the workflow or the repository
|
||||
* then throw an error.
|
||||
*/
|
||||
async function getLanguages() {
|
||||
async function getLanguages(languagesInput, githubAuth, githubUrl, logger) {
|
||||
// Obtain from action input 'languages' if set
|
||||
let languages = core.getInput('languages', { required: false })
|
||||
let languages = (languagesInput || "")
|
||||
.split(',')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
core.info("Languages from configuration: " + JSON.stringify(languages));
|
||||
logger.info("Languages from configuration: " + JSON.stringify(languages));
|
||||
if (languages.length === 0) {
|
||||
// Obtain languages as all languages in the repo that can be analysed
|
||||
languages = await getLanguagesInRepo();
|
||||
core.info("Automatically detected languages: " + JSON.stringify(languages));
|
||||
languages = await getLanguagesInRepo(githubAuth, githubUrl, logger);
|
||||
logger.info("Automatically detected languages: " + JSON.stringify(languages));
|
||||
}
|
||||
// If the languages parameter was not given and no languages were
|
||||
// detected then fail here as this is a workflow configuration error.
|
||||
|
|
@ -374,24 +369,21 @@ async function getLanguages() {
|
|||
* Returns true if queries were provided in the workflow file
|
||||
* (and thus added), otherwise false
|
||||
*/
|
||||
async function addQueriesFromWorkflowIfRequired(codeQL, languages, resultMap, tempDir) {
|
||||
const queryUses = core.getInput('queries');
|
||||
if (queryUses) {
|
||||
for (const query of queryUses.split(',')) {
|
||||
await parseQueryUses(languages, codeQL, resultMap, query, tempDir);
|
||||
}
|
||||
return true;
|
||||
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, checkoutPath, githubUrl) {
|
||||
for (const query of queriesInput.split(',')) {
|
||||
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, checkoutPath, githubUrl);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Get the default config for when the user has not supplied one.
|
||||
*/
|
||||
async function getDefaultConfig(tempDir, toolCacheDir, codeQL) {
|
||||
const languages = await getLanguages();
|
||||
async function getDefaultConfig(languagesInput, queriesInput, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger) {
|
||||
const languages = await getLanguages(languagesInput, githubAuth, githubUrl, logger);
|
||||
const queries = {};
|
||||
await addDefaultQueries(codeQL, languages, queries);
|
||||
await addQueriesFromWorkflowIfRequired(codeQL, languages, queries, tempDir);
|
||||
if (queriesInput) {
|
||||
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, githubUrl);
|
||||
}
|
||||
return {
|
||||
languages: languages,
|
||||
queries: queries,
|
||||
|
|
@ -407,16 +399,15 @@ exports.getDefaultConfig = getDefaultConfig;
|
|||
/**
|
||||
* Load the config from the given file.
|
||||
*/
|
||||
async function loadConfig(configFile, tempDir, toolCacheDir, codeQL) {
|
||||
async function loadConfig(languagesInput, queriesInput, configFile, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger) {
|
||||
let parsedYAML;
|
||||
if (isLocal(configFile)) {
|
||||
// Treat the config file as relative to the workspace
|
||||
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
configFile = path.resolve(workspacePath, configFile);
|
||||
parsedYAML = getLocalConfig(configFile, workspacePath);
|
||||
configFile = path.resolve(checkoutPath, configFile);
|
||||
parsedYAML = getLocalConfig(configFile, checkoutPath);
|
||||
}
|
||||
else {
|
||||
parsedYAML = await getRemoteConfig(configFile);
|
||||
parsedYAML = await getRemoteConfig(configFile, githubAuth, githubUrl);
|
||||
}
|
||||
// Validate that the 'name' property is syntactically correct,
|
||||
// even though we don't use the value yet.
|
||||
|
|
@ -428,7 +419,7 @@ async function loadConfig(configFile, tempDir, toolCacheDir, codeQL) {
|
|||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
}
|
||||
const languages = await getLanguages();
|
||||
const languages = await getLanguages(languagesInput, githubAuth, githubUrl, logger);
|
||||
const queries = {};
|
||||
const pathsIgnore = [];
|
||||
const paths = [];
|
||||
|
|
@ -444,8 +435,10 @@ async function loadConfig(configFile, tempDir, toolCacheDir, codeQL) {
|
|||
}
|
||||
// If queries were provided using `with` in the action configuration,
|
||||
// they should take precedence over the queries in the config file
|
||||
const addedQueriesFromAction = await addQueriesFromWorkflowIfRequired(codeQL, languages, queries, tempDir);
|
||||
if (!addedQueriesFromAction && QUERIES_PROPERTY in parsedYAML) {
|
||||
if (queriesInput) {
|
||||
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, githubUrl);
|
||||
}
|
||||
else if (QUERIES_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getQueriesInvalid(configFile));
|
||||
}
|
||||
|
|
@ -453,7 +446,7 @@ async function loadConfig(configFile, tempDir, toolCacheDir, codeQL) {
|
|||
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, configFile);
|
||||
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, checkoutPath, githubUrl, configFile);
|
||||
}
|
||||
}
|
||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||
|
|
@ -464,7 +457,7 @@ async function loadConfig(configFile, tempDir, toolCacheDir, codeQL) {
|
|||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile));
|
||||
pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile, logger));
|
||||
});
|
||||
}
|
||||
if (PATHS_PROPERTY in parsedYAML) {
|
||||
|
|
@ -475,7 +468,7 @@ async function loadConfig(configFile, tempDir, toolCacheDir, codeQL) {
|
|||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile));
|
||||
paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile, logger));
|
||||
});
|
||||
}
|
||||
// The list of queries should not be empty for any language. If it is then
|
||||
|
|
@ -503,19 +496,18 @@ async function loadConfig(configFile, tempDir, toolCacheDir, codeQL) {
|
|||
* This will parse the config from the user input if present, or generate
|
||||
* a default config. The parsed config is then stored to a known location.
|
||||
*/
|
||||
async function initConfig(tempDir, toolCacheDir, codeQL) {
|
||||
const configFile = core.getInput('config-file');
|
||||
async function initConfig(languagesInput, queriesInput, configFile, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger) {
|
||||
let config;
|
||||
// If no config file was provided create an empty one
|
||||
if (configFile === '') {
|
||||
core.debug('No configuration file was provided');
|
||||
config = await getDefaultConfig(tempDir, toolCacheDir, codeQL);
|
||||
if (!configFile) {
|
||||
logger.debug('No configuration file was provided');
|
||||
config = await getDefaultConfig(languagesInput, queriesInput, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger);
|
||||
}
|
||||
else {
|
||||
config = await loadConfig(configFile, tempDir, toolCacheDir, codeQL);
|
||||
config = await loadConfig(languagesInput, queriesInput, configFile, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger);
|
||||
}
|
||||
// Save the config so we can easily access it again in the future
|
||||
await saveConfig(config);
|
||||
await saveConfig(config, logger);
|
||||
return config;
|
||||
}
|
||||
exports.initConfig = initConfig;
|
||||
|
|
@ -526,9 +518,9 @@ function isLocal(configPath) {
|
|||
}
|
||||
return (configPath.indexOf("@") === -1);
|
||||
}
|
||||
function getLocalConfig(configFile, workspacePath) {
|
||||
function getLocalConfig(configFile, checkoutPath) {
|
||||
// Error if the config file is now outside of the workspace
|
||||
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
if (!(configFile + path.sep).startsWith(checkoutPath + path.sep)) {
|
||||
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||
}
|
||||
// Error if the file does not exist
|
||||
|
|
@ -537,7 +529,7 @@ function getLocalConfig(configFile, workspacePath) {
|
|||
}
|
||||
return yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
}
|
||||
async function getRemoteConfig(configFile) {
|
||||
async function getRemoteConfig(configFile, githubAuth, githubUrl) {
|
||||
// retrieve the various parts of the config location, and ensure they're present
|
||||
const format = new RegExp('(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)');
|
||||
const pieces = format.exec(configFile);
|
||||
|
|
@ -545,7 +537,7 @@ async function getRemoteConfig(configFile) {
|
|||
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
|
||||
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
|
||||
}
|
||||
const response = await api.getActionsApiClient(true).repos.getContents({
|
||||
const response = await api.getApiClient(githubAuth, githubUrl, true).repos.getContents({
|
||||
owner: pieces.groups.owner,
|
||||
repo: pieces.groups.repo,
|
||||
path: pieces.groups.path,
|
||||
|
|
@ -573,13 +565,13 @@ exports.getPathToParsedConfigFile = getPathToParsedConfigFile;
|
|||
/**
|
||||
* Store the given config to the path returned from getPathToParsedConfigFile.
|
||||
*/
|
||||
async function saveConfig(config) {
|
||||
async function saveConfig(config, logger) {
|
||||
const configString = JSON.stringify(config);
|
||||
const configFile = getPathToParsedConfigFile(config.tempDir);
|
||||
fs.mkdirSync(path.dirname(configFile), { recursive: true });
|
||||
fs.writeFileSync(configFile, configString, 'utf8');
|
||||
core.debug('Saved config:');
|
||||
core.debug(configString);
|
||||
logger.debug('Saved config:');
|
||||
logger.debug(configString);
|
||||
}
|
||||
/**
|
||||
* Get the config.
|
||||
|
|
@ -589,14 +581,14 @@ async function saveConfig(config) {
|
|||
* stored to a known location. On the second and further calls, this will
|
||||
* return the contents of the parsed config from the known location.
|
||||
*/
|
||||
async function getConfig(tempDir) {
|
||||
async function getConfig(tempDir, logger) {
|
||||
const configFile = getPathToParsedConfigFile(tempDir);
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||
}
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
core.debug(configString);
|
||||
logger.debug('Loaded config:');
|
||||
logger.debug(configString);
|
||||
return JSON.parse(configString);
|
||||
}
|
||||
exports.getConfig = getConfig;
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
158
lib/config-utils.test.js
generated
158
lib/config-utils.test.js
generated
|
|
@ -19,20 +19,10 @@ const api = __importStar(require("./api-client"));
|
|||
const codeql_1 = require("./codeql");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
function setInput(name, value) {
|
||||
// Transformation copied from
|
||||
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
|
||||
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
|
||||
if (value !== undefined) {
|
||||
process.env[envVar] = value;
|
||||
}
|
||||
else {
|
||||
delete process.env[envVar];
|
||||
}
|
||||
}
|
||||
function mockGetContents(content) {
|
||||
// Passing an auth token is required, so we just use a dummy value
|
||||
let client = new github.GitHub('123');
|
||||
|
|
@ -57,10 +47,8 @@ function mockListLanguages(languages) {
|
|||
}
|
||||
ava_1.default("load empty config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', undefined);
|
||||
setInput('languages', 'javascript,python');
|
||||
const logger = logging_1.getRunnerLogger();
|
||||
const languages = 'javascript,python';
|
||||
const codeQL = codeql_1.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
|
|
@ -70,16 +58,13 @@ ava_1.default("load empty config", async (t) => {
|
|||
};
|
||||
},
|
||||
});
|
||||
const config = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
t.deepEqual(config, await configUtils.getDefaultConfig(tmpDir, tmpDir, codeQL));
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logger);
|
||||
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logger));
|
||||
});
|
||||
});
|
||||
ava_1.default("loading config saves config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', undefined);
|
||||
setInput('languages', 'javascript,python');
|
||||
const logger = logging_1.getRunnerLogger();
|
||||
const codeQL = codeql_1.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
|
|
@ -92,22 +77,19 @@ ava_1.default("loading config saves config", async (t) => {
|
|||
// Sanity check the saved config file does not already exist
|
||||
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||
// Sanity check that getConfig throws before we have called initConfig
|
||||
await t.throwsAsync(() => configUtils.getConfig(tmpDir));
|
||||
const config1 = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
await t.throwsAsync(() => configUtils.getConfig(tmpDir, logger));
|
||||
const config1 = await configUtils.initConfig('javascript,python', undefined, undefined, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logger);
|
||||
// The saved config file should now exist
|
||||
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||
// And that same newly-initialised config should now be returned by getConfig
|
||||
const config2 = await configUtils.getConfig(tmpDir);
|
||||
const config2 = await configUtils.getConfig(tmpDir, logger);
|
||||
t.deepEqual(config1, config2);
|
||||
});
|
||||
});
|
||||
ava_1.default("load input outside of workspace", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', '../input');
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, codeql_1.getCachedCodeQL());
|
||||
await configUtils.initConfig(undefined, undefined, '../input', tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger());
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
|
|
@ -117,12 +99,10 @@ ava_1.default("load input outside of workspace", async (t) => {
|
|||
});
|
||||
ava_1.default("load non-local input with invalid repo syntax", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
// no filename given, just a repo
|
||||
setInput('config-file', 'octo-org/codeql-config@main');
|
||||
const configFile = 'octo-org/codeql-config@main';
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, codeql_1.getCachedCodeQL());
|
||||
await configUtils.initConfig(undefined, undefined, configFile, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger());
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
|
|
@ -132,13 +112,11 @@ ava_1.default("load non-local input with invalid repo syntax", async (t) => {
|
|||
});
|
||||
ava_1.default("load non-existent input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
t.false(fs.existsSync(path.join(tmpDir, 'input')));
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
const languages = 'javascript';
|
||||
const configFile = 'input';
|
||||
t.false(fs.existsSync(path.join(tmpDir, configFile)));
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, codeql_1.getCachedCodeQL());
|
||||
await configUtils.initConfig(languages, undefined, configFile, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger());
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
|
|
@ -148,8 +126,6 @@ ava_1.default("load non-existent input", async (t) => {
|
|||
});
|
||||
ava_1.default("load non-empty input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const codeQL = codeql_1.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
|
|
@ -193,18 +169,16 @@ ava_1.default("load non-empty input", async (t) => {
|
|||
toolCacheDir: tmpDir,
|
||||
codeQLCmd: codeQL.getPath(),
|
||||
};
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
const actualConfig = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
const languages = 'javascript';
|
||||
const configFile = 'input';
|
||||
fs.writeFileSync(path.join(tmpDir, configFile), inputFileContents, 'utf8');
|
||||
const actualConfig = await configUtils.initConfig(languages, undefined, configFile, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger());
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
});
|
||||
ava_1.default("default queries are used", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
// Check that the default behaviour is to add the default queries.
|
||||
// In this case if a config file is specified but does not include
|
||||
// the disable-default-queries field.
|
||||
|
|
@ -232,10 +206,10 @@ ava_1.default("default queries are used", async (t) => {
|
|||
paths:
|
||||
- foo`;
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
const languages = 'javascript';
|
||||
const configFile = 'input';
|
||||
fs.writeFileSync(path.join(tmpDir, configFile), inputFileContents, 'utf8');
|
||||
await configUtils.initConfig(languages, undefined, configFile, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger());
|
||||
// Check resolve queries was called correctly
|
||||
t.deepEqual(resolveQueriesArgs.length, 1);
|
||||
t.deepEqual(resolveQueriesArgs[0].queries, ['javascript-code-scanning.qls']);
|
||||
|
|
@ -244,14 +218,12 @@ ava_1.default("default queries are used", async (t) => {
|
|||
});
|
||||
ava_1.default("Queries can be specified in config file", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
queries:
|
||||
- uses: ./foo`;
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
const configFile = path.join(tmpDir, 'input');
|
||||
fs.writeFileSync(configFile, inputFileContents, 'utf8');
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
const resolveQueriesArgs = [];
|
||||
const codeQL = codeql_1.setCodeQL({
|
||||
|
|
@ -271,8 +243,8 @@ ava_1.default("Queries can be specified in config file", async (t) => {
|
|||
};
|
||||
},
|
||||
});
|
||||
setInput('languages', 'javascript');
|
||||
const config = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
const languages = 'javascript';
|
||||
const config = await configUtils.initConfig(languages, undefined, configFile, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger());
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries
|
||||
// and once for `./foo` from the config file.
|
||||
|
|
@ -287,16 +259,14 @@ ava_1.default("Queries can be specified in config file", async (t) => {
|
|||
});
|
||||
ava_1.default("Queries from config file can be overridden in workflow file", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
queries:
|
||||
- uses: ./foo`;
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
const configFile = path.join(tmpDir, 'input');
|
||||
fs.writeFileSync(configFile, inputFileContents, 'utf8');
|
||||
// This config item should take precedence over the config file but shouldn't affect the default queries.
|
||||
setInput('queries', './override');
|
||||
const queries = './override';
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
fs.mkdirSync(path.join(tmpDir, 'override'));
|
||||
const resolveQueriesArgs = [];
|
||||
|
|
@ -317,8 +287,8 @@ ava_1.default("Queries from config file can be overridden in workflow file", asy
|
|||
};
|
||||
},
|
||||
});
|
||||
setInput('languages', 'javascript');
|
||||
const config = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
const languages = 'javascript';
|
||||
const config = await configUtils.initConfig(languages, queries, configFile, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger());
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries and once for `./override`,
|
||||
// but won't be called for './foo' from the config file.
|
||||
|
|
@ -333,11 +303,9 @@ ava_1.default("Queries from config file can be overridden in workflow file", asy
|
|||
});
|
||||
ava_1.default("Multiple queries can be specified in workflow file, no config file required", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
fs.mkdirSync(path.join(tmpDir, 'override1'));
|
||||
fs.mkdirSync(path.join(tmpDir, 'override2'));
|
||||
setInput('queries', './override1,./override2');
|
||||
const queries = './override1,./override2';
|
||||
const resolveQueriesArgs = [];
|
||||
const codeQL = codeql_1.setCodeQL({
|
||||
resolveQueries: async function (queries, extraSearchPath) {
|
||||
|
|
@ -356,8 +324,8 @@ ava_1.default("Multiple queries can be specified in workflow file, no config fil
|
|||
};
|
||||
},
|
||||
});
|
||||
setInput('languages', 'javascript');
|
||||
const config = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
const languages = 'javascript';
|
||||
const config = await configUtils.initConfig(languages, queries, undefined, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger());
|
||||
// Check resolveQueries was called correctly:
|
||||
// It'll be called once for the default queries,
|
||||
// and then once for each of the two queries from the workflow
|
||||
|
|
@ -375,10 +343,8 @@ ava_1.default("Multiple queries can be specified in workflow file, no config fil
|
|||
});
|
||||
ava_1.default("Invalid queries in workflow file handled correctly", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('queries', 'foo/bar@v1@v3');
|
||||
setInput('languages', 'javascript');
|
||||
const queries = 'foo/bar@v1@v3';
|
||||
const languages = 'javascript';
|
||||
// This function just needs to be type-correct; it doesn't need to do anything,
|
||||
// since we're deliberately passing in invalid data
|
||||
const codeQL = codeql_1.setCodeQL({
|
||||
|
|
@ -393,7 +359,7 @@ ava_1.default("Invalid queries in workflow file handled correctly", async (t) =>
|
|||
},
|
||||
});
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
await configUtils.initConfig(languages, queries, undefined, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger());
|
||||
t.fail('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
|
|
@ -403,8 +369,6 @@ ava_1.default("Invalid queries in workflow file handled correctly", async (t) =>
|
|||
});
|
||||
ava_1.default("API client used when reading remote config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const codeQL = codeql_1.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
|
|
@ -436,22 +400,19 @@ ava_1.default("API client used when reading remote config", async (t) => {
|
|||
const spyGetContents = mockGetContents(dummyResponse);
|
||||
// Create checkout directory for remote queries repository
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo/bar/dev'), { recursive: true });
|
||||
setInput('config-file', 'octo-org/codeql-config/config.yaml@main');
|
||||
setInput('languages', 'javascript');
|
||||
await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
const configFile = 'octo-org/codeql-config/config.yaml@main';
|
||||
const languages = 'javascript';
|
||||
await configUtils.initConfig(languages, undefined, configFile, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger());
|
||||
t.assert(spyGetContents.called);
|
||||
});
|
||||
});
|
||||
ava_1.default("Remote config handles the case where a directory is provided", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const dummyResponse = []; // directories are returned as arrays
|
||||
mockGetContents(dummyResponse);
|
||||
const repoReference = 'octo-org/codeql-config/config.yaml@main';
|
||||
setInput('config-file', repoReference);
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, codeql_1.getCachedCodeQL());
|
||||
await configUtils.initConfig(undefined, undefined, repoReference, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger());
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
|
|
@ -461,16 +422,13 @@ ava_1.default("Remote config handles the case where a directory is provided", as
|
|||
});
|
||||
ava_1.default("Invalid format of remote config handled correctly", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const dummyResponse = {
|
||||
// note no "content" property here
|
||||
};
|
||||
mockGetContents(dummyResponse);
|
||||
const repoReference = 'octo-org/codeql-config/config.yaml@main';
|
||||
setInput('config-file', repoReference);
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, codeql_1.getCachedCodeQL());
|
||||
await configUtils.initConfig(undefined, undefined, repoReference, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger());
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
|
|
@ -480,11 +438,9 @@ ava_1.default("Invalid format of remote config handled correctly", async (t) =>
|
|||
});
|
||||
ava_1.default("No detected languages", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
mockListLanguages([]);
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, codeql_1.getCachedCodeQL());
|
||||
await configUtils.initConfig(undefined, undefined, undefined, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger());
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
|
|
@ -494,11 +450,9 @@ ava_1.default("No detected languages", async (t) => {
|
|||
});
|
||||
ava_1.default("Unknown languages", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('languages', 'ruby,english');
|
||||
const languages = 'ruby,english';
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, codeql_1.getCachedCodeQL());
|
||||
await configUtils.initConfig(languages, undefined, undefined, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger());
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
|
|
@ -509,8 +463,6 @@ ava_1.default("Unknown languages", async (t) => {
|
|||
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
|
||||
ava_1.default("load invalid input - " + testName, async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const codeQL = codeql_1.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
|
|
@ -520,12 +472,12 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
|
|||
};
|
||||
},
|
||||
});
|
||||
const inputFile = path.join(tmpDir, 'input');
|
||||
const languages = 'javascript';
|
||||
const configFile = 'input';
|
||||
const inputFile = path.join(tmpDir, configFile);
|
||||
fs.writeFileSync(inputFile, inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
await configUtils.initConfig(languages, undefined, configFile, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger());
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
|
|
@ -585,10 +537,10 @@ ava_1.default('path validations', t => {
|
|||
const propertyName = 'paths';
|
||||
const configFile = './.github/codeql/config.yml';
|
||||
for (const path of validPaths) {
|
||||
t.truthy(configUtils.validateAndSanitisePath(path, propertyName, configFile));
|
||||
t.truthy(configUtils.validateAndSanitisePath(path, propertyName, configFile, logging_1.getRunnerLogger()));
|
||||
}
|
||||
for (const path of invalidPaths) {
|
||||
t.throws(() => configUtils.validateAndSanitisePath(path, propertyName, configFile));
|
||||
t.throws(() => configUtils.validateAndSanitisePath(path, propertyName, configFile, logging_1.getRunnerLogger()));
|
||||
}
|
||||
});
|
||||
ava_1.default('path sanitisation', t => {
|
||||
|
|
@ -596,8 +548,8 @@ ava_1.default('path sanitisation', t => {
|
|||
const propertyName = 'paths';
|
||||
const configFile = './.github/codeql/config.yml';
|
||||
// Valid paths are not modified
|
||||
t.deepEqual(configUtils.validateAndSanitisePath('foo/bar', propertyName, configFile), 'foo/bar');
|
||||
t.deepEqual(configUtils.validateAndSanitisePath('foo/bar', propertyName, configFile, logging_1.getRunnerLogger()), 'foo/bar');
|
||||
// Trailing stars are stripped
|
||||
t.deepEqual(configUtils.validateAndSanitisePath('foo/**', propertyName, configFile), 'foo/');
|
||||
t.deepEqual(configUtils.validateAndSanitisePath('foo/**', propertyName, configFile, logging_1.getRunnerLogger()), 'foo/');
|
||||
});
|
||||
//# sourceMappingURL=config-utils.test.js.map
|
||||
File diff suppressed because one or more lines are too long
4
lib/external-queries.js
generated
4
lib/external-queries.js
generated
|
|
@ -14,7 +14,7 @@ const path = __importStar(require("path"));
|
|||
/**
|
||||
* Check out repository at the given ref, and return the directory of the checkout.
|
||||
*/
|
||||
async function checkoutExternalRepository(repository, ref, tempDir) {
|
||||
async function checkoutExternalRepository(repository, ref, githubUrl, tempDir) {
|
||||
core.info('Checking out ' + repository);
|
||||
const checkoutLocation = path.join(tempDir, repository, ref);
|
||||
if (!checkoutLocation.startsWith(tempDir)) {
|
||||
|
|
@ -22,7 +22,7 @@ async function checkoutExternalRepository(repository, ref, tempDir) {
|
|||
throw new Error(`'${repository}@${ref}' is not a valid repository and reference.`);
|
||||
}
|
||||
if (!fs.existsSync(checkoutLocation)) {
|
||||
const repoURL = 'https://github.com/' + repository + '.git';
|
||||
const repoURL = githubUrl + '/' + repository + '.git';
|
||||
await exec.exec('git', ['clone', repoURL, checkoutLocation]);
|
||||
await exec.exec('git', [
|
||||
'--work-tree=' + checkoutLocation,
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAE7B;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAAC,UAAkB,EAAE,GAAW,EAAE,OAAe;IAC/F,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,UAAU,CAAC,CAAC;IAExC,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CAAC,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAAC,CAAC;KACpF;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,UAAU,GAAG,MAAM,CAAC;QAC5D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;QAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;YACrB,cAAc,GAAG,gBAAgB;YACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;YACzC,UAAU,EAAE,GAAG;SAChB,CAAC,CAAC;KACJ;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AArBD,gEAqBC"}
|
||||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAE7B;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,SAAiB,EACjB,OAAe;IAEf,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,UAAU,CAAC,CAAC;IAExC,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CAAC,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAAC,CAAC;KACpF;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,SAAS,GAAG,GAAG,GAAG,UAAU,GAAG,MAAM,CAAC;QACtD,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;QAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;YACrB,cAAc,GAAG,gBAAgB;YACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;YACzC,UAAU,EAAE,GAAG;SAChB,CAAC,CAAC;KACJ;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AA1BD,gEA0BC"}
|
||||
2
lib/external-queries.test.js
generated
2
lib/external-queries.test.js
generated
|
|
@ -20,7 +20,7 @@ testing_utils_1.setupTests(ava_1.default);
|
|||
ava_1.default("checkoutExternalQueries", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
const ref = "df4c6869212341b601005567381944ed90906b6b";
|
||||
await externalQueries.checkoutExternalRepository("github/codeql-go", ref, tmpDir);
|
||||
await externalQueries.checkoutExternalRepository("github/codeql-go", ref, 'https://github.com', tmpDir);
|
||||
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in the default branch
|
||||
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", ref, "COPYRIGHT")));
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,oEAAsD;AACtD,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACxC,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACnC,MAAM,GAAG,GAAG,0CAA0C,CAAC;QACvD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,kBAAkB,EAClB,GAAG,EACH,MAAM,CAAC,CAAC;QAEV,mGAAmG;QACnG,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,GAAG,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IACpF,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,oEAAsD;AACtD,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACxC,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACnC,MAAM,GAAG,GAAG,0CAA0C,CAAC;QACvD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,kBAAkB,EAClB,GAAG,EACH,oBAAoB,EACpB,MAAM,CAAC,CAAC;QAEV,mGAAmG;QACnG,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,GAAG,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IACpF,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
8
lib/fingerprints.js
generated
8
lib/fingerprints.js
generated
|
|
@ -156,7 +156,7 @@ function locationUpdateCallback(result, location, logger) {
|
|||
// the source file so we can hash it.
|
||||
// If possible returns a absolute file path for the source file,
|
||||
// or if not possible then returns undefined.
|
||||
function resolveUriToFile(location, artifacts, logger) {
|
||||
function resolveUriToFile(location, artifacts, checkoutPath, logger) {
|
||||
// This may be referencing an artifact
|
||||
if (!location.uri && location.index !== undefined) {
|
||||
if (typeof location.index !== 'number' ||
|
||||
|
|
@ -184,7 +184,7 @@ function resolveUriToFile(location, artifacts, logger) {
|
|||
return undefined;
|
||||
}
|
||||
// Discard any absolute paths that aren't in the src root
|
||||
const srcRootPrefix = process.env['GITHUB_WORKSPACE'] + '/';
|
||||
const srcRootPrefix = checkoutPath + '/';
|
||||
if (uri.startsWith('/') && !uri.startsWith(srcRootPrefix)) {
|
||||
logger.debug(`Ignoring location URI "${uri}" as it is outside of the src root`);
|
||||
return undefined;
|
||||
|
|
@ -205,7 +205,7 @@ function resolveUriToFile(location, artifacts, logger) {
|
|||
exports.resolveUriToFile = resolveUriToFile;
|
||||
// Compute fingerprints for results in the given sarif file
|
||||
// and return an updated sarif file contents.
|
||||
function addFingerprints(sarifContents, logger) {
|
||||
function addFingerprints(sarifContents, checkoutPath, logger) {
|
||||
var _a, _b;
|
||||
let sarif = JSON.parse(sarifContents);
|
||||
// Gather together results for the same file and construct
|
||||
|
|
@ -221,7 +221,7 @@ function addFingerprints(sarifContents, logger) {
|
|||
logger.debug(`Unable to compute fingerprint for invalid location: ${JSON.stringify(primaryLocation)}`);
|
||||
continue;
|
||||
}
|
||||
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts, logger);
|
||||
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts, checkoutPath, logger);
|
||||
if (!filepath) {
|
||||
continue;
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
11
lib/fingerprints.test.js
generated
11
lib/fingerprints.test.js
generated
|
|
@ -99,7 +99,7 @@ ava_1.default('hash', (t) => {
|
|||
function testResolveUriToFile(uri, index, artifactsURIs) {
|
||||
const location = { "uri": uri, "index": index };
|
||||
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
|
||||
return fingerprints.resolveUriToFile(location, artifacts, logging_1.getRunnerLogger());
|
||||
return fingerprints.resolveUriToFile(location, artifacts, process.cwd(), logging_1.getRunnerLogger());
|
||||
}
|
||||
ava_1.default('resolveUriToFile', t => {
|
||||
// The resolveUriToFile method checks that the file exists and is in the right directory
|
||||
|
|
@ -110,7 +110,6 @@ ava_1.default('resolveUriToFile', t => {
|
|||
const filepath = __filename;
|
||||
t.true(filepath.startsWith(cwd + '/'));
|
||||
const relativeFilepaht = filepath.substring(cwd.length + 1);
|
||||
process.env['GITHUB_WORKSPACE'] = cwd;
|
||||
// Absolute paths are unmodified
|
||||
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
|
||||
t.is(testResolveUriToFile('file://' + filepath, undefined, []), filepath);
|
||||
|
|
@ -143,8 +142,8 @@ ava_1.default('addFingerprints', t => {
|
|||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||
t.deepEqual(fingerprints.addFingerprints(input, logging_1.getRunnerLogger()), expected);
|
||||
const checkoutPath = path.normalize(__dirname + '/../src/testdata');
|
||||
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, logging_1.getRunnerLogger()), expected);
|
||||
});
|
||||
ava_1.default('missingRegions', t => {
|
||||
// Run an end-to-end test on a test file
|
||||
|
|
@ -154,7 +153,7 @@ ava_1.default('missingRegions', t => {
|
|||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||
t.deepEqual(fingerprints.addFingerprints(input, logging_1.getRunnerLogger()), expected);
|
||||
const checkoutPath = path.normalize(__dirname + '/../src/testdata');
|
||||
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, logging_1.getRunnerLogger()), expected);
|
||||
});
|
||||
//# sourceMappingURL=fingerprints.test.js.map
|
||||
File diff suppressed because one or more lines are too long
36
lib/init-action.js
generated
36
lib/init-action.js
generated
|
|
@ -8,13 +8,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const tracer_config_1 = require("./tracer-config");
|
||||
const init_1 = require("./init");
|
||||
const logging_1 = require("./logging");
|
||||
const util = __importStar(require("./util"));
|
||||
async function sendSuccessStatusReport(startedAt, config) {
|
||||
const statusReportBase = await util.createStatusReportBase('init', 'success', startedAt);
|
||||
|
|
@ -37,6 +32,7 @@ async function sendSuccessStatusReport(startedAt, config) {
|
|||
}
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
const logger = logging_1.getActionsLogger();
|
||||
let config;
|
||||
let codeql;
|
||||
try {
|
||||
|
|
@ -44,14 +40,8 @@ async function run() {
|
|||
if (!await util.sendStatusReport(await util.createStatusReportBase('init', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
core.startGroup('Setup CodeQL tools');
|
||||
codeql = await codeql_1.setupCodeQL();
|
||||
await codeql.printVersion();
|
||||
core.endGroup();
|
||||
core.startGroup('Load language configuration');
|
||||
config = await configUtils.initConfig(util.getRequiredEnvParam('RUNNER_TEMP'), util.getRequiredEnvParam('RUNNER_TOOL_CACHE'), codeql);
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
core.endGroup();
|
||||
codeql = await init_1.initCodeQL(core.getInput('tools'), core.getInput('token'), util.getRequiredEnvParam('GITHUB_API_URL'), util.getRequiredEnvParam('RUNNER_TEMP'), util.getRequiredEnvParam('RUNNER_TOOL_CACHE'), 'actions', logger);
|
||||
config = await init_1.initConfig(core.getInput('languages'), core.getInput('queries'), core.getInput('config-file'), util.getRequiredEnvParam('RUNNER_TEMP'), util.getRequiredEnvParam('RUNNER_TOOL_CACHE'), codeql, util.getRequiredEnvParam('GITHUB_WORKSPACE'), core.getInput('token'), util.getRequiredEnvParam('GITHUB_API_URL'), logger);
|
||||
}
|
||||
catch (e) {
|
||||
core.setFailed(e.message);
|
||||
|
|
@ -60,7 +50,6 @@ async function run() {
|
|||
return;
|
||||
}
|
||||
try {
|
||||
const sourceRoot = path.resolve();
|
||||
// Forward Go flags
|
||||
const goFlags = process.env['GOFLAGS'];
|
||||
if (goFlags) {
|
||||
|
|
@ -70,21 +59,8 @@ async function run() {
|
|||
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
||||
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
|
||||
core.exportVariable('CODEQL_RAM', codeqlRam);
|
||||
fs.mkdirSync(util.getCodeQLDatabasesDir(config.tempDir), { recursive: true });
|
||||
// TODO: replace this code once CodeQL supports multi-language tracing
|
||||
for (let language of config.languages) {
|
||||
// Init language database
|
||||
await codeql.databaseInit(util.getCodeQLDatabasePath(config.tempDir, language), language, sourceRoot);
|
||||
}
|
||||
const tracerConfig = await tracer_config_1.getCombinedTracerConfig(config, codeql);
|
||||
const tracerConfig = await init_1.runInit(codeql, config);
|
||||
if (tracerConfig !== undefined) {
|
||||
if (process.platform === 'win32') {
|
||||
await exec.exec('powershell', [
|
||||
path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
|
||||
path.resolve(path.dirname(codeql.getPath()), 'tools', 'win64', 'tracer.exe'),
|
||||
], { env: { 'ODASA_TRACER_CONFIGURATION': tracerConfig.spec } });
|
||||
}
|
||||
// NB: in CLI mode these will be output to a file rather than exported with core.exportVariable
|
||||
Object.entries(tracerConfig.env).forEach(([key, value]) => core.exportVariable(key, value));
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"init-action.js","sourceRoot":"","sources":["../src/init-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAE7B,gEAAkD;AAClD,qCAA+C;AAC/C,4DAA8C;AAC9C,mDAA0D;AAC1D,6CAA+B;AAkB/B,KAAK,UAAU,uBAAuB,CAAC,SAAe,EAAE,MAA0B;IAChF,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;IAEzF,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC7C,MAAM,iBAAiB,GAAG,IAAI,CAAC,QAAQ,CAAC,WAAW,EAAE,EAAE,QAAQ,EAAE,KAAK,EAAE,CAAC,CAAC;IAC1E,MAAM,KAAK,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/D,MAAM,WAAW,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/E,MAAM,qBAAqB,GAAG,MAAM,CAAC,iBAAiB,CAAC,yBAAyB,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC;IACnG,MAAM,OAAO,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,OAAO,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAEpF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,SAAS,EAAE,SAAS;QACpB,kBAAkB,EAAE,iBAAiB;QACrC,KAAK,EAAE,KAAK;QACZ,YAAY,EAAE,WAAW;QACzB,uBAAuB,EAAE,qBAAqB;QAC9C,OAAO,EAAE,OAAO;KACjB,CAAC;IAEF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAEhB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,MAA0B,CAAC;IAC/B,IAAI,MAAc,CAAC;IAEnB,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YACxG,OAAO;SACR;QAED,IAAI,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;QACtC,MAAM,GAAG,MAAM,oBAAW,EAAE,CAAC;QAC7B,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;QAC5B,IAAI,CAAC,QAAQ,EAAE,CAAC;QAEhB,IAAI,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;QAC/C,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACnC,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,EACvC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,MAAM,CAAC,CAAC;QACV,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACf,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;QACxG,OAAO;KACR;IAED,IAAI;QAEF,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;QAElC,mBAAmB;QACnB,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;QACvC,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;YACxC,IAAI,CAAC,OAAO,CAAC,6GAA6G,CAAC,CAAC;SAC7H;QAED,mGAAmG;QACnG,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,MAAM,CAAC;QACtD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;QAE7C,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAE9E,sEAAsE;QACtE,KAAK,IAAI,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;YACrC,yBAAyB;YACzB,MAAM,MAAM,CAAC,YAAY,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC;SACvG;QAED,MAAM,YAAY,GAAG,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACnE,IAAI,YAAY,KAAK,SAAS,EAAE;YAC9B,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,IAAI,CAAC,IAAI,CACb,YAAY,EACZ;oBACE,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,IAAI,EAAE,KAAK,EAAE,mBAAmB,CAAC;oBACzD,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,YAAY,CAAC;iBAC7E,EACD,EAAE,GAAG,EAAE,EAAE,4BAA4B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;aACjE;YAED,+FAA+F;YAC/F,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,cAAc,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC;SAC7F;KAEF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAC3D,MAAM,EACN,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;QAChB,OAAO;KACR;IACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;AACnD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,sBAAsB,GAAG,CAAC,CAAC,CAAC;IAC3C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"init-action.js","sourceRoot":"","sources":["../src/init-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,iCAAyD;AACzD,uCAA6C;AAC7C,6CAA+B;AAkB/B,KAAK,UAAU,uBAAuB,CAAC,SAAe,EAAE,MAA0B;IAChF,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;IAEzF,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC7C,MAAM,iBAAiB,GAAG,IAAI,CAAC,QAAQ,CAAC,WAAW,EAAE,EAAE,QAAQ,EAAE,KAAK,EAAE,CAAC,CAAC;IAC1E,MAAM,KAAK,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/D,MAAM,WAAW,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/E,MAAM,qBAAqB,GAAG,MAAM,CAAC,iBAAiB,CAAC,yBAAyB,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC;IACnG,MAAM,OAAO,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,OAAO,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAEpF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,SAAS,EAAE,SAAS;QACpB,kBAAkB,EAAE,iBAAiB;QACrC,KAAK,EAAE,KAAK;QACZ,YAAY,EAAE,WAAW;QACzB,uBAAuB,EAAE,qBAAqB;QAC9C,OAAO,EAAE,OAAO;KACjB,CAAC;IAEF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,IAAI,MAA0B,CAAC;IAC/B,IAAI,MAAc,CAAC;IAEnB,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YACxG,OAAO;SACR;QAED,MAAM,GAAG,MAAM,iBAAU,CACvB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,mBAAmB,CAAC,gBAAgB,CAAC,EAC1C,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,EACvC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,SAAS,EACT,MAAM,CAAC,CAAC;QACV,MAAM,GAAG,MAAM,iBAAU,CACvB,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,EAC1B,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,EACxB,IAAI,CAAC,QAAQ,CAAC,aAAa,CAAC,EAC5B,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,EACvC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,MAAM,EACN,IAAI,CAAC,mBAAmB,CAAC,kBAAkB,CAAC,EAC5C,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,mBAAmB,CAAC,gBAAgB,CAAC,EAC1C,MAAM,CAAC,CAAC;KAEX;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACf,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;QACxG,OAAO;KACR;IAED,IAAI;QAEF,mBAAmB;QACnB,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;QACvC,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;YACxC,IAAI,CAAC,OAAO,CAAC,6GAA6G,CAAC,CAAC;SAC7H;QAED,mGAAmG;QACnG,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,MAAM,CAAC;QACtD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;QAE7C,MAAM,YAAY,GAAG,MAAM,cAAO,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACnD,IAAI,YAAY,KAAK,SAAS,EAAE;YAC9B,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,cAAc,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC;SAC7F;KAEF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAC3D,MAAM,EACN,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;QAChB,OAAO;KACR;IACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;AACnD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,sBAAsB,GAAG,CAAC,CAAC,CAAC;IAC3C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
54
lib/init.js
generated
Normal file
54
lib/init.js
generated
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const tracer_config_1 = require("./tracer-config");
|
||||
const util = __importStar(require("./util"));
|
||||
async function initCodeQL(codeqlURL, githubAuth, githubUrl, tempDir, toolsDir, mode, logger) {
|
||||
logger.startGroup('Setup CodeQL tools');
|
||||
const codeql = await codeql_1.setupCodeQL(codeqlURL, githubAuth, githubUrl, tempDir, toolsDir, mode, logger);
|
||||
await codeql.printVersion();
|
||||
logger.endGroup();
|
||||
return codeql;
|
||||
}
|
||||
exports.initCodeQL = initCodeQL;
|
||||
async function initConfig(languagesInput, queriesInput, configFile, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger) {
|
||||
logger.startGroup('Load language configuration');
|
||||
const config = await configUtils.initConfig(languagesInput, queriesInput, configFile, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger);
|
||||
analysisPaths.printPathFiltersWarning(config, logger);
|
||||
logger.endGroup();
|
||||
return config;
|
||||
}
|
||||
exports.initConfig = initConfig;
|
||||
async function runInit(codeql, config) {
|
||||
const sourceRoot = path.resolve();
|
||||
fs.mkdirSync(util.getCodeQLDatabasesDir(config.tempDir), { recursive: true });
|
||||
// TODO: replace this code once CodeQL supports multi-language tracing
|
||||
for (let language of config.languages) {
|
||||
// Init language database
|
||||
await codeql.databaseInit(util.getCodeQLDatabasePath(config.tempDir, language), language, sourceRoot);
|
||||
}
|
||||
const tracerConfig = await tracer_config_1.getCombinedTracerConfig(config, codeql);
|
||||
if (tracerConfig !== undefined) {
|
||||
if (process.platform === 'win32') {
|
||||
await exec.exec('powershell', [
|
||||
path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
|
||||
path.resolve(path.dirname(codeql.getPath()), 'tools', 'win64', 'tracer.exe'),
|
||||
], { env: { 'ODASA_TRACER_CONFIGURATION': tracerConfig.spec } });
|
||||
}
|
||||
}
|
||||
return tracerConfig;
|
||||
}
|
||||
exports.runInit = runInit;
|
||||
//# sourceMappingURL=init.js.map
|
||||
1
lib/init.js.map
Normal file
1
lib/init.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAE7B,gEAAkD;AAClD,qCAA+C;AAC/C,4DAA8C;AAE9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAAkB,EAClB,SAAiB,EACjB,OAAe,EACf,QAAgB,EAChB,IAAe,EACf,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,MAAM,GAAG,MAAM,oBAAW,CAC9B,SAAS,EACT,UAAU,EACV,SAAS,EACT,OAAO,EACP,QAAQ,EACR,IAAI,EACJ,MAAM,CAAC,CAAC;IACV,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,YAAoB,EACpB,UAAkB,EAClB,SAAiB,EACjB,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,YAAY,EACZ,UAAU,EACV,SAAS,EACT,MAAM,CAAC,CAAC;IACV,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA3BD,gCA2BC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B;IAE1B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;IAElC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE9E,sEAAsE;IACtE,KAAK,IAAI,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACrC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC;KACvG;IAED,MAAM,YAAY,GAAG,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACnE,IAAI,YAAY,KAAK,SAAS,EAAE;QAC9B,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,CAAC,IAAI,CACb,YAAY,EACZ;gBACE,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,IAAI,EAAE,KAAK,EAAE,mBAAmB,CAAC;gBACzD,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,YAAY,CAAC;aAC7E,EACD,EAAE,GAAG,EAAE,EAAE,4BAA4B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;SACjE;KACF;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AA3BD,0BA2BC"}
|
||||
111
lib/runner.js
generated
111
lib/runner.js
generated
|
|
@ -8,23 +8,30 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const commander_1 = require("commander");
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const analyze_1 = require("./analyze");
|
||||
const autobuild_1 = require("./autobuild");
|
||||
const codeql_1 = require("./codeql");
|
||||
const init_1 = require("./init");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const program = new commander_1.Command();
|
||||
program.version('0.0.1');
|
||||
function parseGithubApiUrl(inputUrl) {
|
||||
function parseGithubUrl(inputUrl) {
|
||||
try {
|
||||
const url = new URL(inputUrl);
|
||||
// If we detect this is trying to be to github.com
|
||||
// then return with a fixed canonical URL.
|
||||
if (url.hostname === 'github.com' || url.hostname === 'api.github.com') {
|
||||
return 'https://api.github.com';
|
||||
return 'https://github.com';
|
||||
}
|
||||
// Add the API path if it's not already present.
|
||||
if (url.pathname.indexOf('/api/v3') === -1) {
|
||||
url.pathname = path.join(url.pathname, 'api', 'v3');
|
||||
// Remove the API prefix if it's present
|
||||
if (url.pathname.indexOf('/api/v3') !== -1) {
|
||||
url.pathname = url.pathname.substring(0, url.pathname.indexOf('/api/v3'));
|
||||
}
|
||||
return url.toString();
|
||||
}
|
||||
|
|
@ -32,7 +39,99 @@ function parseGithubApiUrl(inputUrl) {
|
|||
throw new Error(`"${inputUrl}" is not a valid URL`);
|
||||
}
|
||||
}
|
||||
function getTempDir(userInput) {
|
||||
const tempDir = path.join(userInput || os.tmpdir(), 'codeql-runner-temp');
|
||||
if (!fs.existsSync(tempDir)) {
|
||||
fs.mkdirSync(tempDir, { recursive: true });
|
||||
}
|
||||
return tempDir;
|
||||
}
|
||||
function getToolsDir(userInput, tmpDir) {
|
||||
const toolsDir = path.join(userInput || path.dirname(tmpDir), 'codeql-runner-tools');
|
||||
if (!fs.existsSync(toolsDir)) {
|
||||
fs.mkdirSync(toolsDir, { recursive: true });
|
||||
}
|
||||
return toolsDir;
|
||||
}
|
||||
const logger = logging_1.getRunnerLogger();
|
||||
program
|
||||
.command('init')
|
||||
.description('Initializes CodeQL')
|
||||
.requiredOption('--github-url <url>', 'URL of GitHub instance')
|
||||
.requiredOption('--github-auth <auth>', 'GitHub Apps token, or of the form "username:token" if using a personal access token')
|
||||
.option('--languages <languages>', 'Comma-separated list of languages to analyze. Defaults to trying to detect languages from the repo.')
|
||||
.option('--queries <queries>', 'Comma-separated list of additional queries to run. By default, this overrides the same setting in a configuration file.')
|
||||
.option('--config-file <file>', 'Path to config file')
|
||||
.option('--codeql-path <path>', 'Path to a copy of the CodeQL CLI executable to use. Otherwise downloads a copy.')
|
||||
.option('--temp-dir <dir>', 'Directory to use for temporary files. Defaults to OS temp dir.')
|
||||
.option('--tools-dir <dir>', 'Directory to use for CodeQL tools and other files to store between runs. Defaults to same as temp dir.')
|
||||
.option('--checkout-path <path>', 'Checkout path (default: current working directory)')
|
||||
.action(async (cmd) => {
|
||||
try {
|
||||
const tempDir = getTempDir(cmd.tempDir);
|
||||
const toolsDir = getToolsDir(cmd.toolsDir, tempDir);
|
||||
// Wipe the temp dir
|
||||
fs.rmdirSync(tempDir, { recursive: true });
|
||||
fs.mkdirSync(tempDir, { recursive: true });
|
||||
let codeql;
|
||||
if (cmd.codeqlPath !== undefined) {
|
||||
codeql = codeql_1.getCodeQL(cmd.codeqlPath);
|
||||
}
|
||||
else {
|
||||
codeql = await init_1.initCodeQL(undefined, cmd.githubAuth, parseGithubUrl(cmd.githubUrl), tempDir, toolsDir, 'runner', logger);
|
||||
}
|
||||
const config = await init_1.initConfig(cmd.languages, cmd.queries, cmd.configFile, tempDir, toolsDir, codeql, cmd.checkoutPath || process.cwd(), cmd.githubAuth, parseGithubUrl(cmd.githubUrl), logger);
|
||||
await init_1.runInit(codeql, config);
|
||||
}
|
||||
catch (e) {
|
||||
logger.error('Init failed');
|
||||
logger.error(e);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
});
|
||||
program
|
||||
.command('autobuild')
|
||||
.description('Attempts to automatically build code')
|
||||
.requiredOption('--language <language>', 'The language to build')
|
||||
.option('--temp-dir <dir>', 'Directory to use for temporary files. Defaults to OS temp dir.')
|
||||
.action(async (cmd) => {
|
||||
try {
|
||||
const language = languages_1.parseLanguage(cmd.language);
|
||||
if (language === undefined) {
|
||||
throw new Error(`"${cmd.language}" is not a recognised language`);
|
||||
}
|
||||
await autobuild_1.runAutobuild(language, getTempDir(cmd.tempDir), logger);
|
||||
}
|
||||
catch (e) {
|
||||
logger.error('Autobuild failed');
|
||||
logger.error(e);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
});
|
||||
program
|
||||
.command('analyze')
|
||||
.description('Finishes extracting code and runs CodeQL queries')
|
||||
.requiredOption('--repository <repository>', 'Repository name')
|
||||
.requiredOption('--commit <commit>', 'SHA of commit that was analyzed')
|
||||
.requiredOption('--ref <ref>', 'Name of ref that was analyzed')
|
||||
.requiredOption('--github-url <url>', 'URL of GitHub instance')
|
||||
.requiredOption('--github-auth <auth>', 'GitHub Apps token, or of the form "username:token" if using a personal access token')
|
||||
.option('--checkout-path <path>', 'Checkout path (default: current working directory)')
|
||||
.option('--no-upload', 'Do not upload results after analysis', false)
|
||||
.option('--output-dir <dir>', 'Directory to output SARIF files to. By default will use temp directory.')
|
||||
.option('--temp-dir <dir>', 'Directory to use for temporary files. Defaults to OS temp dir.')
|
||||
.action(async (cmd) => {
|
||||
try {
|
||||
const tempDir = getTempDir(cmd.tempDir);
|
||||
const outputDir = cmd.outputDir || path.join(tempDir, 'codeql-sarif');
|
||||
await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, cmd.ref, undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, cmd.githubAuth, parseGithubUrl(cmd.githubUrl), cmd.upload, 'runner', outputDir, tempDir, logger);
|
||||
}
|
||||
catch (e) {
|
||||
logger.error('Upload failed');
|
||||
logger.error(e);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
});
|
||||
program
|
||||
.command('upload')
|
||||
.description('Uploads a SARIF file, or all SARIF files from a directory, to code scanning')
|
||||
|
|
@ -45,7 +144,7 @@ program
|
|||
.option('--checkout-path <path>', 'Checkout path (default: current working directory)')
|
||||
.action(async (cmd) => {
|
||||
try {
|
||||
await upload_lib.upload(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, cmd.ref, undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, cmd.githubAuth, parseGithubApiUrl(cmd.githubUrl), 'runner', logger);
|
||||
await upload_lib.upload(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, cmd.ref, undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, cmd.githubAuth, parseGithubUrl(cmd.githubUrl), 'runner', logger);
|
||||
}
|
||||
catch (e) {
|
||||
logger.error('Upload failed');
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
14
lib/upload-lib.js
generated
14
lib/upload-lib.js
generated
|
|
@ -43,7 +43,7 @@ function combineSarifFiles(sarifFiles) {
|
|||
exports.combineSarifFiles = combineSarifFiles;
|
||||
// Upload the given payload.
|
||||
// If the request fails then this will retry a small number of times.
|
||||
async function uploadPayload(payload, repositoryNwo, githubAuth, githubApiUrl, mode, logger) {
|
||||
async function uploadPayload(payload, repositoryNwo, githubAuth, githubUrl, mode, logger) {
|
||||
logger.info('Uploading results');
|
||||
// If in test mode we don't want to upload the results
|
||||
const testMode = process.env['TEST_MODE'] === 'true' || false;
|
||||
|
|
@ -55,7 +55,7 @@ async function uploadPayload(payload, repositoryNwo, githubAuth, githubApiUrl, m
|
|||
// We don't want to backoff too much to avoid wasting action
|
||||
// minutes, but just waiting a little bit could maybe help.
|
||||
const backoffPeriods = [1, 5, 15];
|
||||
const client = api.getApiClient(githubAuth, githubApiUrl);
|
||||
const client = api.getApiClient(githubAuth, githubUrl);
|
||||
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
|
||||
const reqURL = mode === 'actions'
|
||||
? 'PUT /repos/:owner/:repo/code-scanning/analysis'
|
||||
|
|
@ -100,7 +100,7 @@ async function uploadPayload(payload, repositoryNwo, githubAuth, githubApiUrl, m
|
|||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function upload(sarifPath, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubApiUrl, mode, logger) {
|
||||
async function upload(sarifPath, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger) {
|
||||
const sarifFiles = [];
|
||||
if (!fs.existsSync(sarifPath)) {
|
||||
throw new Error(`Path does not exist: ${sarifPath}`);
|
||||
|
|
@ -117,7 +117,7 @@ async function upload(sarifPath, repositoryNwo, commitOid, ref, analysisKey, ana
|
|||
else {
|
||||
sarifFiles.push(sarifPath);
|
||||
}
|
||||
return await uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubApiUrl, mode, logger);
|
||||
return await uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger);
|
||||
}
|
||||
exports.upload = upload;
|
||||
// Counts the number of results in the given SARIF file
|
||||
|
|
@ -151,7 +151,7 @@ function validateSarifFileSchema(sarifFilePath, logger) {
|
|||
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubApiUrl, mode, logger) {
|
||||
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger) {
|
||||
logger.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
if (mode === 'actions') {
|
||||
// This check only works on actions as env vars don't persist between calls to the runner
|
||||
|
|
@ -166,7 +166,7 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
|||
validateSarifFileSchema(file, logger);
|
||||
}
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload, logger);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload, checkoutPath, logger);
|
||||
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
|
||||
let checkoutURI = file_url_1.default(checkoutPath);
|
||||
const toolNames = util.getToolNames(sarifPayload);
|
||||
|
|
@ -202,7 +202,7 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
|||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
logger.debug("Number of results in upload: " + numResultInSarif);
|
||||
// Make the upload
|
||||
await uploadPayload(payload, repositoryNwo, githubAuth, githubApiUrl, mode, logger);
|
||||
await uploadPayload(payload, repositoryNwo, githubAuth, githubUrl, mode, logger);
|
||||
return {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
21
lib/util.js
generated
21
lib/util.js
generated
|
|
@ -14,25 +14,14 @@ const os = __importStar(require("os"));
|
|||
const path = __importStar(require("path"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
/**
|
||||
* The URL for github.com.
|
||||
*/
|
||||
exports.GITHUB_DOTCOM_URL = "https://github.com";
|
||||
/**
|
||||
* The API URL for github.com.
|
||||
*/
|
||||
exports.GITHUB_DOTCOM_API_URL = "https://api.github.com";
|
||||
/**
|
||||
* Get the API URL for the GitHub instance we are connected to.
|
||||
* May be for github.com or for an enterprise instance.
|
||||
*/
|
||||
function getInstanceAPIURL() {
|
||||
return process.env["GITHUB_API_URL"] || exports.GITHUB_DOTCOM_API_URL;
|
||||
}
|
||||
exports.getInstanceAPIURL = getInstanceAPIURL;
|
||||
/**
|
||||
* Are we running against a GitHub Enterpise instance, as opposed to github.com.
|
||||
*/
|
||||
function isEnterprise() {
|
||||
return getInstanceAPIURL() !== exports.GITHUB_DOTCOM_API_URL;
|
||||
}
|
||||
exports.isEnterprise = isEnterprise;
|
||||
/**
|
||||
* Get an environment parameter, but throw an error if it is not set.
|
||||
*/
|
||||
|
|
@ -245,7 +234,7 @@ exports.createStatusReportBase = createStatusReportBase;
|
|||
* Returns whether sending the status report was successful of not.
|
||||
*/
|
||||
async function sendStatusReport(statusReport, ignoreFailures) {
|
||||
if (isEnterprise()) {
|
||||
if (getRequiredEnvParam("GITHUB_API_URL") !== exports.GITHUB_DOTCOM_API_URL) {
|
||||
core.debug("Not sending status report to GitHub Enterprise");
|
||||
return true;
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -8,13 +8,25 @@
|
|||
|
||||
import javascript
|
||||
|
||||
/**
|
||||
* Although these libraries are designed for use on actions they
|
||||
* have been deemed safe to use outside of actions as well.
|
||||
*/
|
||||
class SafeActionLibs extends string {
|
||||
SafeActionLibs() {
|
||||
this = "@actions/http-client" or
|
||||
this = "@actions/exec"
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An import from a library that is meant for GitHub Actions and
|
||||
* we do not want to be using outside of actions.
|
||||
*/
|
||||
class ActionsLibImport extends ImportDeclaration {
|
||||
ActionsLibImport() {
|
||||
getImportedPath().getValue().matches("@actions/%")
|
||||
getImportedPath().getValue().matches("@actions/%") and
|
||||
not getImportedPath().getValue() instanceof SafeActionLibs
|
||||
}
|
||||
|
||||
string getName() {
|
||||
|
|
|
|||
30
runner/package-lock.json
generated
30
runner/package-lock.json
generated
|
|
@ -519,6 +519,16 @@
|
|||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"bindings": {
|
||||
"version": "1.5.0",
|
||||
"resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz",
|
||||
"integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"file-uri-to-path": "1.0.0"
|
||||
}
|
||||
},
|
||||
"bluebird": {
|
||||
"version": "3.7.2",
|
||||
"resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz",
|
||||
|
|
@ -1514,6 +1524,13 @@
|
|||
"integrity": "sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw==",
|
||||
"dev": true
|
||||
},
|
||||
"file-uri-to-path": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz",
|
||||
"integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"fill-range": {
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
|
||||
|
|
@ -2555,6 +2572,13 @@
|
|||
"readable-stream": "^2.0.5"
|
||||
}
|
||||
},
|
||||
"nan": {
|
||||
"version": "2.14.1",
|
||||
"resolved": "https://registry.npmjs.org/nan/-/nan-2.14.1.tgz",
|
||||
"integrity": "sha512-isWHgVjnFjh2x2yuJ/tj3JbwoHu3UC2dX5G/88Cm24yB6YopVgxvBObDY7n5xW6ExmFhJpSEQqFPvq9zaXc8Jw==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"nanomatch": {
|
||||
"version": "1.2.13",
|
||||
"resolved": "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz",
|
||||
|
|
@ -4234,7 +4258,11 @@
|
|||
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz",
|
||||
"integrity": "sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"bindings": "^1.5.0",
|
||||
"nan": "^2.12.1"
|
||||
}
|
||||
},
|
||||
"glob-parent": {
|
||||
"version": "3.1.0",
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import * as core from '@actions/core';
|
||||
|
||||
import * as configUtils from './config-utils';
|
||||
import { Logger } from './logging';
|
||||
|
||||
function isInterpretedLanguage(language): boolean {
|
||||
return language === 'javascript' || language === 'python';
|
||||
|
|
@ -22,6 +21,17 @@ function buildIncludeExcludeEnvVar(paths: string[]): string {
|
|||
return paths.join('\n');
|
||||
}
|
||||
|
||||
export function printPathFiltersWarning(config: configUtils.Config, logger: Logger) {
|
||||
// Index include/exclude/filters only work in javascript and python.
|
||||
// If any other languages are detected/configured then show a warning.
|
||||
if ((config.paths.length !== 0 ||
|
||||
config.pathsIgnore.length !== 0) &&
|
||||
!config.languages.every(isInterpretedLanguage)) {
|
||||
|
||||
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
|
||||
}
|
||||
}
|
||||
|
||||
export function includeAndExcludeAnalysisPaths(config: configUtils.Config) {
|
||||
// The 'LGTM_INDEX_INCLUDE' and 'LGTM_INDEX_EXCLUDE' environment variables
|
||||
// control which files/directories are traversed when scanning.
|
||||
|
|
@ -31,10 +41,10 @@ export function includeAndExcludeAnalysisPaths(config: configUtils.Config) {
|
|||
// traverse the entire file tree to determine which files are matched.
|
||||
// Any paths containing "*" are not included in these.
|
||||
if (config.paths.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_INCLUDE', buildIncludeExcludeEnvVar(config.paths));
|
||||
process.env['LGTM_INDEX_INCLUDE'] = buildIncludeExcludeEnvVar(config.paths);
|
||||
}
|
||||
if (config.pathsIgnore.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_EXCLUDE', buildIncludeExcludeEnvVar(config.pathsIgnore));
|
||||
process.env['LGTM_INDEX_EXCLUDE'] = buildIncludeExcludeEnvVar(config.pathsIgnore);
|
||||
}
|
||||
|
||||
// The 'LGTM_INDEX_FILTERS' environment variable controls which files are
|
||||
|
|
@ -44,15 +54,6 @@ export function includeAndExcludeAnalysisPaths(config: configUtils.Config) {
|
|||
filters.push(...config.paths.map(p => 'include:' + p));
|
||||
filters.push(...config.pathsIgnore.map(p => 'exclude:' + p));
|
||||
if (filters.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_FILTERS', filters.join('\n'));
|
||||
}
|
||||
|
||||
// Index include/exclude/filters only work in javascript and python.
|
||||
// If any other languages are detected/configured then show a warning.
|
||||
if ((config.paths.length !== 0 ||
|
||||
config.pathsIgnore.length !== 0 ||
|
||||
filters.length !== 0) &&
|
||||
!config.languages.every(isInterpretedLanguage)) {
|
||||
core.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
|
||||
process.env['LGTM_INDEX_FILTERS'] = filters.join('\n');
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,174 +1,59 @@
|
|||
import * as core from '@actions/core';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import { getCodeQL } from './codeql';
|
||||
import * as configUtils from './config-utils';
|
||||
import { isScannedLanguage } from './languages';
|
||||
import { AnalysisStatusReport, runAnalyze } from './analyze';
|
||||
import { getActionsLogger } from './logging';
|
||||
import { parseRepositoryNwo } from './repository';
|
||||
import * as sharedEnv from './shared-environment';
|
||||
import * as upload_lib from './upload-lib';
|
||||
import * as util from './util';
|
||||
|
||||
interface QueriesStatusReport {
|
||||
// Time taken in ms to analyze builtin queries for cpp (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_cpp_duration_ms?: number;
|
||||
// Time taken in ms to analyze builtin queries for csharp (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_csharp_duration_ms?: number;
|
||||
// Time taken in ms to analyze builtin queries for go (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_go_duration_ms?: number;
|
||||
// Time taken in ms to analyze builtin queries for java (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_java_duration_ms?: number;
|
||||
// Time taken in ms to analyze builtin queries for javascript (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_javascript_duration_ms?: number;
|
||||
// Time taken in ms to analyze builtin queries for python (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_python_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for cpp (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_cpp_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for csharp (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_csharp_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for go (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_go_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for java (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_java_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for javascript (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_javascript_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for python (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_python_duration_ms?: number;
|
||||
// Name of language that errored during analysis (or undefined if no langauge failed)
|
||||
analyze_failure_language?: string;
|
||||
}
|
||||
|
||||
interface FinishStatusReport extends util.StatusReportBase, upload_lib.UploadStatusReport, QueriesStatusReport {}
|
||||
interface FinishStatusReport extends util.StatusReportBase, AnalysisStatusReport {}
|
||||
|
||||
async function sendStatusReport(
|
||||
startedAt: Date,
|
||||
queriesStats: QueriesStatusReport | undefined,
|
||||
uploadStats: upload_lib.UploadStatusReport | undefined,
|
||||
stats: AnalysisStatusReport | undefined,
|
||||
error?: Error) {
|
||||
|
||||
const status = queriesStats?.analyze_failure_language !== undefined || error !== undefined ? 'failure' : 'success';
|
||||
const status = stats?.analyze_failure_language !== undefined || error !== undefined ? 'failure' : 'success';
|
||||
const statusReportBase = await util.createStatusReportBase('finish', status, startedAt, error?.message, error?.stack);
|
||||
const statusReport: FinishStatusReport = {
|
||||
...statusReportBase,
|
||||
...(queriesStats || {}),
|
||||
...(uploadStats || {}),
|
||||
...(stats || {}),
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
|
||||
async function createdDBForScannedLanguages(config: configUtils.Config) {
|
||||
const codeql = getCodeQL(config.codeQLCmd);
|
||||
for (const language of config.languages) {
|
||||
if (isScannedLanguage(language)) {
|
||||
core.startGroup('Extracting ' + language);
|
||||
await codeql.extractScannedLanguage(util.getCodeQLDatabasePath(config.tempDir, language), language);
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function finalizeDatabaseCreation(config: configUtils.Config) {
|
||||
await createdDBForScannedLanguages(config);
|
||||
|
||||
const codeql = getCodeQL(config.codeQLCmd);
|
||||
for (const language of config.languages) {
|
||||
core.startGroup('Finalizing ' + language);
|
||||
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config.tempDir, language));
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
|
||||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(
|
||||
sarifFolder: string,
|
||||
config: configUtils.Config): Promise<QueriesStatusReport> {
|
||||
|
||||
const codeql = getCodeQL(config.codeQLCmd);
|
||||
for (let language of config.languages) {
|
||||
core.startGroup('Analyzing ' + language);
|
||||
|
||||
const queries = config.queries[language] || [];
|
||||
if (queries.length === 0) {
|
||||
throw new Error('Unable to analyse ' + language + ' as no queries were selected for this language');
|
||||
}
|
||||
|
||||
try {
|
||||
const databasePath = util.getCodeQLDatabasePath(config.tempDir, language);
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuite = databasePath + '-queries.qls';
|
||||
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||
fs.writeFileSync(querySuite, querySuiteContents);
|
||||
core.debug('Query suite file for ' + language + '...\n' + querySuiteContents);
|
||||
|
||||
const sarifFile = path.join(sarifFolder, language + '.sarif');
|
||||
|
||||
await codeql.databaseAnalyze(databasePath, sarifFile, querySuite);
|
||||
|
||||
core.debug('SARIF results for database ' + language + ' created at "' + sarifFile + '"');
|
||||
core.endGroup();
|
||||
|
||||
} catch (e) {
|
||||
// For now the fields about query performance are not populated
|
||||
return {
|
||||
analyze_failure_language: language,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let queriesStats: QueriesStatusReport | undefined = undefined;
|
||||
let uploadStats: upload_lib.UploadStatusReport | undefined = undefined;
|
||||
let stats: AnalysisStatusReport | undefined = undefined;
|
||||
try {
|
||||
util.prepareLocalRunEnvironment();
|
||||
if (!await util.sendStatusReport(await util.createStatusReportBase('finish', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
const config = await configUtils.getConfig(util.getRequiredEnvParam('RUNNER_TEMP'));
|
||||
|
||||
core.exportVariable(sharedEnv.ODASA_TRACER_CONFIGURATION, '');
|
||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||
|
||||
const sarifFolder = core.getInput('output');
|
||||
fs.mkdirSync(sarifFolder, { recursive: true });
|
||||
|
||||
core.info('Finalizing database creation');
|
||||
await finalizeDatabaseCreation(config);
|
||||
|
||||
core.info('Analyzing database');
|
||||
queriesStats = await runQueries(sarifFolder, config);
|
||||
|
||||
if ('true' === core.getInput('upload')) {
|
||||
uploadStats = await upload_lib.upload(
|
||||
sarifFolder,
|
||||
parseRepositoryNwo(util.getRequiredEnvParam('GITHUB_REPOSITORY')),
|
||||
await util.getCommitOid(),
|
||||
util.getRef(),
|
||||
await util.getAnalysisKey(),
|
||||
util.getRequiredEnvParam('GITHUB_WORKFLOW'),
|
||||
util.getWorkflowRunID(),
|
||||
core.getInput('checkout_path'),
|
||||
core.getInput('matrix'),
|
||||
core.getInput('token'),
|
||||
util.getRequiredEnvParam('GITHUB_API_URL'),
|
||||
'actions',
|
||||
getActionsLogger());
|
||||
}
|
||||
stats = await runAnalyze(
|
||||
parseRepositoryNwo(util.getRequiredEnvParam('GITHUB_REPOSITORY')),
|
||||
await util.getCommitOid(),
|
||||
util.getRef(),
|
||||
await util.getAnalysisKey(),
|
||||
util.getRequiredEnvParam('GITHUB_WORKFLOW'),
|
||||
util.getWorkflowRunID(),
|
||||
core.getInput('checkout_path'),
|
||||
core.getInput('matrix'),
|
||||
core.getInput('token'),
|
||||
util.getRequiredEnvParam('GITHUB_API_URL'),
|
||||
core.getInput('upload') === 'true',
|
||||
'actions',
|
||||
core.getInput('output'),
|
||||
util.getRequiredEnvParam('RUNNER_TEMP'),
|
||||
getActionsLogger());
|
||||
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
console.log(error);
|
||||
await sendStatusReport(startedAt, queriesStats, uploadStats, error);
|
||||
await sendStatusReport(startedAt, stats, error);
|
||||
return;
|
||||
}
|
||||
|
||||
await sendStatusReport(startedAt, queriesStats, uploadStats);
|
||||
await sendStatusReport(startedAt, stats);
|
||||
}
|
||||
|
||||
run().catch(e => {
|
||||
|
|
|
|||
170
src/analyze.ts
Normal file
170
src/analyze.ts
Normal file
|
|
@ -0,0 +1,170 @@
|
|||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as analysisPaths from './analysis-paths';
|
||||
import { getCodeQL } from './codeql';
|
||||
import * as configUtils from './config-utils';
|
||||
import { isScannedLanguage } from './languages';
|
||||
import { Logger } from './logging';
|
||||
import { RepositoryNwo } from './repository';
|
||||
import * as sharedEnv from './shared-environment';
|
||||
import * as upload_lib from './upload-lib';
|
||||
import * as util from './util';
|
||||
|
||||
export interface QueriesStatusReport {
|
||||
// Time taken in ms to analyze builtin queries for cpp (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_cpp_duration_ms?: number;
|
||||
// Time taken in ms to analyze builtin queries for csharp (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_csharp_duration_ms?: number;
|
||||
// Time taken in ms to analyze builtin queries for go (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_go_duration_ms?: number;
|
||||
// Time taken in ms to analyze builtin queries for java (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_java_duration_ms?: number;
|
||||
// Time taken in ms to analyze builtin queries for javascript (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_javascript_duration_ms?: number;
|
||||
// Time taken in ms to analyze builtin queries for python (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_python_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for cpp (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_cpp_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for csharp (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_csharp_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for go (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_go_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for java (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_java_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for javascript (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_javascript_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for python (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_python_duration_ms?: number;
|
||||
// Name of language that errored during analysis (or undefined if no langauge failed)
|
||||
analyze_failure_language?: string;
|
||||
}
|
||||
|
||||
export interface AnalysisStatusReport extends upload_lib.UploadStatusReport, QueriesStatusReport {}
|
||||
|
||||
async function createdDBForScannedLanguages(
|
||||
config: configUtils.Config,
|
||||
logger: Logger) {
|
||||
|
||||
// Insert the LGTM_INDEX_X env vars at this point so they are set when
|
||||
// we extract any scanned languages.
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
|
||||
const codeql = getCodeQL(config.codeQLCmd);
|
||||
for (const language of config.languages) {
|
||||
if (isScannedLanguage(language)) {
|
||||
logger.startGroup('Extracting ' + language);
|
||||
await codeql.extractScannedLanguage(util.getCodeQLDatabasePath(config.tempDir, language), language);
|
||||
logger.endGroup();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function finalizeDatabaseCreation(
|
||||
config: configUtils.Config,
|
||||
logger: Logger) {
|
||||
|
||||
await createdDBForScannedLanguages(config, logger);
|
||||
|
||||
const codeql = getCodeQL(config.codeQLCmd);
|
||||
for (const language of config.languages) {
|
||||
logger.startGroup('Finalizing ' + language);
|
||||
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config.tempDir, language));
|
||||
logger.endGroup();
|
||||
}
|
||||
}
|
||||
|
||||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(
|
||||
sarifFolder: string,
|
||||
config: configUtils.Config,
|
||||
logger: Logger): Promise<QueriesStatusReport> {
|
||||
|
||||
const codeql = getCodeQL(config.codeQLCmd);
|
||||
for (let language of config.languages) {
|
||||
logger.startGroup('Analyzing ' + language);
|
||||
|
||||
const queries = config.queries[language] || [];
|
||||
if (queries.length === 0) {
|
||||
throw new Error('Unable to analyse ' + language + ' as no queries were selected for this language');
|
||||
}
|
||||
|
||||
try {
|
||||
const databasePath = util.getCodeQLDatabasePath(config.tempDir, language);
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuite = databasePath + '-queries.qls';
|
||||
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||
fs.writeFileSync(querySuite, querySuiteContents);
|
||||
logger.debug('Query suite file for ' + language + '...\n' + querySuiteContents);
|
||||
|
||||
const sarifFile = path.join(sarifFolder, language + '.sarif');
|
||||
|
||||
await codeql.databaseAnalyze(databasePath, sarifFile, querySuite);
|
||||
|
||||
logger.debug('SARIF results for database ' + language + ' created at "' + sarifFile + '"');
|
||||
logger.endGroup();
|
||||
|
||||
} catch (e) {
|
||||
// For now the fields about query performance are not populated
|
||||
return {
|
||||
analyze_failure_language: language,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
export async function runAnalyze(
|
||||
repositoryNwo: RepositoryNwo,
|
||||
commitOid: string,
|
||||
ref: string,
|
||||
analysisKey: string | undefined,
|
||||
analysisName: string | undefined,
|
||||
workflowRunID: number | undefined,
|
||||
checkoutPath: string,
|
||||
environment: string | undefined,
|
||||
githubAuth: string,
|
||||
githubUrl: string,
|
||||
doUpload: boolean,
|
||||
mode: util.Mode,
|
||||
outputDir: string,
|
||||
tempDir: string,
|
||||
logger: Logger): Promise<AnalysisStatusReport> {
|
||||
|
||||
const config = await configUtils.getConfig(tempDir, logger);
|
||||
|
||||
// Delete the tracer config env var to avoid tracing ourselves
|
||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||
|
||||
fs.mkdirSync(outputDir, { recursive: true });
|
||||
|
||||
logger.info('Finalizing database creation');
|
||||
await finalizeDatabaseCreation(config, logger);
|
||||
|
||||
logger.info('Analyzing database');
|
||||
const queriesStats = await runQueries(outputDir, config, logger);
|
||||
|
||||
if (!doUpload) {
|
||||
logger.info('Not uploading results');
|
||||
return { ...queriesStats };
|
||||
}
|
||||
|
||||
const uploadStats = await upload_lib.upload(
|
||||
outputDir,
|
||||
repositoryNwo,
|
||||
commitOid,
|
||||
ref,
|
||||
analysisKey,
|
||||
analysisName,
|
||||
workflowRunID,
|
||||
checkoutPath,
|
||||
environment,
|
||||
githubAuth,
|
||||
githubUrl,
|
||||
mode,
|
||||
logger);
|
||||
|
||||
return { ...queriesStats, ...uploadStats };
|
||||
}
|
||||
|
|
@ -1,22 +1,37 @@
|
|||
import * as core from "@actions/core";
|
||||
import * as github from "@actions/github";
|
||||
import consoleLogLevel from "console-log-level";
|
||||
import * as path from 'path';
|
||||
|
||||
import { getRequiredEnvParam, isLocalRun } from "./util";
|
||||
|
||||
export const getApiClient = function(githubAuth: string, githubApiUrl: string, allowLocalRun = false) {
|
||||
export const getApiClient = function(githubAuth: string, githubUrl: string, allowLocalRun = false) {
|
||||
if (isLocalRun() && !allowLocalRun) {
|
||||
throw new Error('Invalid API call in local run');
|
||||
}
|
||||
return new github.GitHub(
|
||||
{
|
||||
auth: parseAuth(githubAuth),
|
||||
baseUrl: githubApiUrl,
|
||||
baseUrl: getApiUrl(githubUrl),
|
||||
userAgent: "CodeQL Action",
|
||||
log: consoleLogLevel({ level: "debug" })
|
||||
});
|
||||
};
|
||||
|
||||
function getApiUrl(githubUrl: string): string {
|
||||
const url = new URL(githubUrl);
|
||||
|
||||
// If we detect this is trying to be to github.com
|
||||
// then return with a fixed canonical URL.
|
||||
if (url.hostname === 'github.com' || url.hostname === 'api.github.com') {
|
||||
return 'https://api.github.com';
|
||||
}
|
||||
|
||||
// Add the /api/v3 API prefix
|
||||
url.pathname = path.join(url.pathname, 'api', 'v3');
|
||||
return url.toString();
|
||||
}
|
||||
|
||||
// Parses the user input as either a single token,
|
||||
// or a username and password / PAT.
|
||||
function parseAuth(auth: string): string {
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import * as core from '@actions/core';
|
|||
import { getCodeQL } from './codeql';
|
||||
import * as config_utils from './config-utils';
|
||||
import { isTracedLanguage } from './languages';
|
||||
import { getActionsLogger } from './logging';
|
||||
import * as util from './util';
|
||||
|
||||
interface AutobuildStatusReport extends util.StatusReportBase {
|
||||
|
|
@ -34,6 +35,7 @@ async function sendCompletedStatusReport(
|
|||
}
|
||||
|
||||
async function run() {
|
||||
const logger = getActionsLogger();
|
||||
const startedAt = new Date();
|
||||
let language;
|
||||
try {
|
||||
|
|
@ -42,7 +44,7 @@ async function run() {
|
|||
return;
|
||||
}
|
||||
|
||||
const config = await config_utils.getConfig(util.getRequiredEnvParam('RUNNER_TEMP'));
|
||||
const config = await config_utils.getConfig(util.getRequiredEnvParam('RUNNER_TEMP'), logger);
|
||||
|
||||
// Attempt to find a language to autobuild
|
||||
// We want pick the dominant language in the repo from the ones we're able to build
|
||||
|
|
|
|||
21
src/autobuild.ts
Normal file
21
src/autobuild.ts
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
import { getCodeQL } from './codeql';
|
||||
import * as config_utils from './config-utils';
|
||||
import { isTracedLanguage, Language } from './languages';
|
||||
import { Logger } from './logging';
|
||||
|
||||
export async function runAutobuild(
|
||||
language: Language,
|
||||
tmpDir: string,
|
||||
logger: Logger) {
|
||||
|
||||
if (!isTracedLanguage(language)) {
|
||||
throw new Error(`Cannot build "${language}" as it is not a traced language`);
|
||||
}
|
||||
|
||||
const config = await config_utils.getConfig(tmpDir, logger);
|
||||
|
||||
logger.startGroup(`Attempting to automatically build ${language} code`);
|
||||
const codeQL = getCodeQL(config.codeQLCmd);
|
||||
await codeQL.runAutobuild(language);
|
||||
logger.endGroup();
|
||||
}
|
||||
|
|
@ -4,6 +4,7 @@ import nock from 'nock';
|
|||
import * as path from 'path';
|
||||
|
||||
import * as codeql from './codeql';
|
||||
import { getRunnerLogger } from './logging';
|
||||
import {setupTests} from './testing-utils';
|
||||
import * as util from './util';
|
||||
|
||||
|
|
@ -12,12 +13,6 @@ setupTests(test);
|
|||
test('download codeql bundle cache', async t => {
|
||||
|
||||
await util.withTmpDir(async tmpDir => {
|
||||
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
|
||||
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
|
||||
|
||||
const versions = ['20200601', '20200610'];
|
||||
|
||||
for (let i = 0; i < versions.length; i++) {
|
||||
|
|
@ -27,10 +22,14 @@ test('download codeql bundle cache', async t => {
|
|||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
|
||||
|
||||
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
|
||||
|
||||
await codeql.setupCodeQL();
|
||||
await codeql.setupCodeQL(
|
||||
`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
'runner',
|
||||
getRunnerLogger());
|
||||
|
||||
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
|
||||
}
|
||||
|
|
@ -56,7 +55,7 @@ test('parse codeql bundle url version', t => {
|
|||
const url = `https://github.com/.../codeql-bundle-${version}/...`;
|
||||
|
||||
try {
|
||||
const parsedVersion = codeql.getCodeQLURLVersion(url);
|
||||
const parsedVersion = codeql.getCodeQLURLVersion(url, getRunnerLogger());
|
||||
t.deepEqual(parsedVersion, expectedVersion);
|
||||
} catch (e) {
|
||||
t.fail(e.message);
|
||||
|
|
|
|||
107
src/codeql.ts
107
src/codeql.ts
|
|
@ -1,4 +1,3 @@
|
|||
import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as http from '@actions/http-client';
|
||||
import { IHeaders } from '@actions/http-client/interfaces';
|
||||
|
|
@ -13,6 +12,7 @@ import uuidV4 from 'uuid/v4';
|
|||
import * as api from './api-client';
|
||||
import * as defaults from './defaults.json'; // Referenced from codeql-action-sync-tool!
|
||||
import { Language } from './languages';
|
||||
import { Logger } from './logging';
|
||||
import * as util from './util';
|
||||
|
||||
type Options = (string|number|boolean)[];
|
||||
|
|
@ -101,31 +101,36 @@ const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
|
|||
const CODEQL_BUNDLE_NAME = "codeql-bundle.tar.gz";
|
||||
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||
|
||||
function getCodeQLActionRepository(): string {
|
||||
// Actions do not know their own repository name,
|
||||
// so we currently use this hack to find the name based on where our files are.
|
||||
// This can be removed once the change to the runner in https://github.com/actions/runner/pull/585 is deployed.
|
||||
const runnerTemp = util.getRequiredEnvParam("RUNNER_TEMP");
|
||||
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
|
||||
const relativeScriptPath = path.relative(actionsDirectory, __filename);
|
||||
// This handles the case where the Action does not come from an Action repository,
|
||||
// e.g. our integration tests which use the Action code from the current checkout.
|
||||
if (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath)) {
|
||||
function getCodeQLActionRepository(mode: util.Mode): string {
|
||||
if (mode === 'actions') {
|
||||
// Actions do not know their own repository name,
|
||||
// so we currently use this hack to find the name based on where our files are.
|
||||
// This can be removed once the change to the runner in https://github.com/actions/runner/pull/585 is deployed.
|
||||
const runnerTemp = util.getRequiredEnvParam("RUNNER_TEMP");
|
||||
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
|
||||
const relativeScriptPath = path.relative(actionsDirectory, __filename);
|
||||
// This handles the case where the Action does not come from an Action repository,
|
||||
// e.g. our integration tests which use the Action code from the current checkout.
|
||||
if (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath)) {
|
||||
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
||||
}
|
||||
const relativeScriptPathParts = relativeScriptPath.split(path.sep);
|
||||
return relativeScriptPathParts[0] + "/" + relativeScriptPathParts[1];
|
||||
|
||||
} else {
|
||||
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
||||
}
|
||||
const relativeScriptPathParts = relativeScriptPath.split(path.sep);
|
||||
return relativeScriptPathParts[0] + "/" + relativeScriptPathParts[1];
|
||||
}
|
||||
|
||||
async function getCodeQLBundleDownloadURL(): Promise<string> {
|
||||
const codeQLActionRepository = getCodeQLActionRepository();
|
||||
async function getCodeQLBundleDownloadURL(githubUrl: string, mode: util.Mode, logger: Logger): Promise<string> {
|
||||
const codeQLActionRepository = getCodeQLActionRepository(mode);
|
||||
const potentialDownloadSources = [
|
||||
// This GitHub instance, and this Action.
|
||||
[util.getInstanceAPIURL(), codeQLActionRepository],
|
||||
[githubUrl, codeQLActionRepository],
|
||||
// This GitHub instance, and the canonical Action.
|
||||
[util.getInstanceAPIURL(), CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
[githubUrl, CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
// GitHub.com, and the canonical Action.
|
||||
[util.GITHUB_DOTCOM_API_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
[util.GITHUB_DOTCOM_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
];
|
||||
// We now filter out any duplicates.
|
||||
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
|
||||
|
|
@ -133,7 +138,7 @@ async function getCodeQLBundleDownloadURL(): Promise<string> {
|
|||
for (let downloadSource of uniqueDownloadSources) {
|
||||
let [apiURL, repository] = downloadSource;
|
||||
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
|
||||
if (apiURL === util.GITHUB_DOTCOM_API_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
|
||||
if (apiURL === util.GITHUB_DOTCOM_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
|
||||
break;
|
||||
}
|
||||
let [repositoryOwner, repositoryName] = repository.split("/");
|
||||
|
|
@ -145,29 +150,29 @@ async function getCodeQLBundleDownloadURL(): Promise<string> {
|
|||
});
|
||||
for (let asset of release.data.assets) {
|
||||
if (asset.name === CODEQL_BUNDLE_NAME) {
|
||||
core.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
|
||||
logger.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
|
||||
return asset.url;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
core.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
|
||||
logger.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
|
||||
}
|
||||
}
|
||||
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${CODEQL_BUNDLE_NAME}`;
|
||||
}
|
||||
|
||||
// We have to download CodeQL manually because the toolcache doesn't support Accept headers.
|
||||
// This can be removed once https://github.com/actions/toolkit/pull/530 is merged and released.
|
||||
async function toolcacheDownloadTool(url: string, headers?: IHeaders): Promise<string> {
|
||||
async function toolcacheDownloadTool(
|
||||
url: string,
|
||||
headers: IHeaders | undefined,
|
||||
tempDir: string,
|
||||
logger: Logger): Promise<string> {
|
||||
|
||||
const client = new http.HttpClient('CodeQL Action');
|
||||
const dest = path.join(util.getRequiredEnvParam('RUNNER_TEMP'), uuidV4());
|
||||
const dest = path.join(tempDir, uuidV4());
|
||||
const response: http.HttpClientResponse = await client.get(url, headers);
|
||||
if (response.message.statusCode !== 200) {
|
||||
const err = new toolcache.HTTPError(response.message.statusCode);
|
||||
core.info(
|
||||
`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`
|
||||
);
|
||||
throw err;
|
||||
logger.info(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
||||
throw new Error(`Unexpected HTTP response: ${response.message.statusCode}`);
|
||||
}
|
||||
const pipeline = globalutil.promisify(stream.pipeline);
|
||||
fs.mkdirSync(path.dirname(dest), { recursive: true });
|
||||
|
|
@ -175,32 +180,44 @@ async function toolcacheDownloadTool(url: string, headers?: IHeaders): Promise<s
|
|||
return dest;
|
||||
}
|
||||
|
||||
export async function setupCodeQL(): Promise<CodeQL> {
|
||||
export async function setupCodeQL(
|
||||
codeqlURL: string | undefined,
|
||||
githubAuth: string,
|
||||
githubUrl: string,
|
||||
tempDir: string,
|
||||
toolsDir: string,
|
||||
mode: util.Mode,
|
||||
logger: Logger): Promise<CodeQL> {
|
||||
|
||||
// Setting these two env vars makes the toolcache code safe to use,
|
||||
// but this is obviously not a great thing we're doing and it would
|
||||
// be better to write our own implementation to use outside of actions.
|
||||
process.env['RUNNER_TEMP'] = tempDir;
|
||||
process.env['RUNNER_TOOL_CACHE'] = toolsDir;
|
||||
|
||||
try {
|
||||
let codeqlURL = core.getInput('tools');
|
||||
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
|
||||
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`, logger);
|
||||
|
||||
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
|
||||
if (codeqlFolder) {
|
||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
} else {
|
||||
if (!codeqlURL) {
|
||||
codeqlURL = await getCodeQLBundleDownloadURL();
|
||||
codeqlURL = await getCodeQLBundleDownloadURL(githubUrl, mode, logger);
|
||||
}
|
||||
|
||||
const headers: IHeaders = {accept: 'application/octet-stream'};
|
||||
// We only want to provide an authorization header if we are downloading
|
||||
// from the same GitHub instance the Action is running on.
|
||||
// This avoids leaking Enterprise tokens to dotcom.
|
||||
if (codeqlURL.startsWith(util.getInstanceAPIURL() + "/")) {
|
||||
core.debug('Downloading CodeQL bundle with token.');
|
||||
let token = core.getInput('token', { required: true });
|
||||
headers.authorization = `token ${token}`;
|
||||
if (codeqlURL.startsWith(githubUrl + "/")) {
|
||||
logger.debug('Downloading CodeQL bundle with token.');
|
||||
headers.authorization = `token ${githubAuth}`;
|
||||
} else {
|
||||
core.debug('Downloading CodeQL bundle without token.');
|
||||
logger.debug('Downloading CodeQL bundle without token.');
|
||||
}
|
||||
let codeqlPath = await toolcacheDownloadTool(codeqlURL, headers);
|
||||
core.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
||||
let codeqlPath = await toolcacheDownloadTool(codeqlURL, headers, tempDir, logger);
|
||||
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
||||
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
|
||||
|
|
@ -217,12 +234,12 @@ export async function setupCodeQL(): Promise<CodeQL> {
|
|||
return cachedCodeQL;
|
||||
|
||||
} catch (e) {
|
||||
core.error(e);
|
||||
logger.error(e);
|
||||
throw new Error("Unable to download and extract CodeQL CLI");
|
||||
}
|
||||
}
|
||||
|
||||
export function getCodeQLURLVersion(url: string): string {
|
||||
export function getCodeQLURLVersion(url: string, logger: Logger): string {
|
||||
|
||||
const match = url.match(/\/codeql-bundle-(.*)\//);
|
||||
if (match === null || match.length < 2) {
|
||||
|
|
@ -232,7 +249,7 @@ export function getCodeQLURLVersion(url: string): string {
|
|||
let version = match[1];
|
||||
|
||||
if (!semver.valid(version)) {
|
||||
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||
logger.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||
version = '0.0.0-' + version;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -8,22 +8,12 @@ import * as api from './api-client';
|
|||
import { getCachedCodeQL, setCodeQL } from './codeql';
|
||||
import * as configUtils from './config-utils';
|
||||
import { Language } from "./languages";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import {setupTests} from './testing-utils';
|
||||
import * as util from './util';
|
||||
|
||||
setupTests(test);
|
||||
|
||||
function setInput(name: string, value: string | undefined) {
|
||||
// Transformation copied from
|
||||
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
|
||||
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
|
||||
if (value !== undefined) {
|
||||
process.env[envVar] = value;
|
||||
} else {
|
||||
delete process.env[envVar];
|
||||
}
|
||||
}
|
||||
|
||||
type GetContentsResponse = { content?: string; } | {}[];
|
||||
|
||||
function mockGetContents(content: GetContentsResponse): sinon.SinonStub<any, any> {
|
||||
|
|
@ -52,11 +42,8 @@ function mockListLanguages(languages: string[]) {
|
|||
|
||||
test("load empty config", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
setInput('config-file', undefined);
|
||||
setInput('languages', 'javascript,python');
|
||||
const logger = getRunnerLogger();
|
||||
const languages = 'javascript,python';
|
||||
|
||||
const codeQL = setCodeQL({
|
||||
resolveQueries: async function() {
|
||||
|
|
@ -68,19 +55,34 @@ test("load empty config", async t => {
|
|||
},
|
||||
});
|
||||
|
||||
const config = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
const config = await configUtils.initConfig(
|
||||
languages,
|
||||
undefined,
|
||||
undefined,
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
codeQL,
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
logger);
|
||||
|
||||
t.deepEqual(config, await configUtils.getDefaultConfig(tmpDir, tmpDir, codeQL));
|
||||
t.deepEqual(config, await configUtils.getDefaultConfig(
|
||||
languages,
|
||||
undefined,
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
codeQL,
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
logger));
|
||||
});
|
||||
});
|
||||
|
||||
test("loading config saves config", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
setInput('config-file', undefined);
|
||||
setInput('languages', 'javascript,python');
|
||||
const logger = getRunnerLogger();
|
||||
|
||||
const codeQL = setCodeQL({
|
||||
resolveQueries: async function() {
|
||||
|
|
@ -97,28 +99,43 @@ test("loading config saves config", async t => {
|
|||
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||
|
||||
// Sanity check that getConfig throws before we have called initConfig
|
||||
await t.throwsAsync(() => configUtils.getConfig(tmpDir));
|
||||
await t.throwsAsync(() => configUtils.getConfig(tmpDir, logger));
|
||||
|
||||
const config1 = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
const config1 = await configUtils.initConfig(
|
||||
'javascript,python',
|
||||
undefined,
|
||||
undefined,
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
codeQL,
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
logger);
|
||||
|
||||
// The saved config file should now exist
|
||||
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||
|
||||
// And that same newly-initialised config should now be returned by getConfig
|
||||
const config2 = await configUtils.getConfig(tmpDir);
|
||||
const config2 = await configUtils.getConfig(tmpDir, logger);
|
||||
t.deepEqual(config1, config2);
|
||||
});
|
||||
});
|
||||
|
||||
test("load input outside of workspace", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
setInput('config-file', '../input');
|
||||
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, getCachedCodeQL());
|
||||
await configUtils.initConfig(
|
||||
undefined,
|
||||
undefined,
|
||||
'../input',
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
getCachedCodeQL(),
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
getRunnerLogger());
|
||||
throw new Error('initConfig did not throw error');
|
||||
} catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
|
||||
|
|
@ -128,14 +145,21 @@ test("load input outside of workspace", async t => {
|
|||
|
||||
test("load non-local input with invalid repo syntax", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
// no filename given, just a repo
|
||||
setInput('config-file', 'octo-org/codeql-config@main');
|
||||
const configFile = 'octo-org/codeql-config@main';
|
||||
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, getCachedCodeQL());
|
||||
await configUtils.initConfig(
|
||||
undefined,
|
||||
undefined,
|
||||
configFile,
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
getCachedCodeQL(),
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
getRunnerLogger());
|
||||
throw new Error('initConfig did not throw error');
|
||||
} catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileRepoFormatInvalidMessage('octo-org/codeql-config@main')));
|
||||
|
|
@ -145,15 +169,22 @@ test("load non-local input with invalid repo syntax", async t => {
|
|||
|
||||
test("load non-existent input", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
t.false(fs.existsSync(path.join(tmpDir, 'input')));
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
const languages = 'javascript';
|
||||
const configFile = 'input';
|
||||
t.false(fs.existsSync(path.join(tmpDir, configFile)));
|
||||
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, getCachedCodeQL());
|
||||
await configUtils.initConfig(
|
||||
languages,
|
||||
undefined,
|
||||
configFile,
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
getCachedCodeQL(),
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
getRunnerLogger());
|
||||
throw new Error('initConfig did not throw error');
|
||||
} catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
|
||||
|
|
@ -163,9 +194,6 @@ test("load non-existent input", async t => {
|
|||
|
||||
test("load non-empty input", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
const codeQL = setCodeQL({
|
||||
resolveQueries: async function() {
|
||||
return {
|
||||
|
|
@ -213,11 +241,21 @@ test("load non-empty input", async t => {
|
|||
codeQLCmd: codeQL.getPath(),
|
||||
};
|
||||
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
const languages = 'javascript';
|
||||
const configFile = 'input';
|
||||
fs.writeFileSync(path.join(tmpDir, configFile), inputFileContents, 'utf8');
|
||||
|
||||
const actualConfig = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
const actualConfig = await configUtils.initConfig(
|
||||
languages,
|
||||
undefined,
|
||||
configFile,
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
codeQL,
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
getRunnerLogger());
|
||||
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
|
|
@ -226,9 +264,6 @@ test("load non-empty input", async t => {
|
|||
|
||||
test("default queries are used", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
// Check that the default behaviour is to add the default queries.
|
||||
// In this case if a config file is specified but does not include
|
||||
// the disable-default-queries field.
|
||||
|
|
@ -260,11 +295,21 @@ test("default queries are used", async t => {
|
|||
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
const languages = 'javascript';
|
||||
const configFile = 'input';
|
||||
fs.writeFileSync(path.join(tmpDir, configFile), inputFileContents, 'utf8');
|
||||
|
||||
await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
await configUtils.initConfig(
|
||||
languages,
|
||||
undefined,
|
||||
configFile,
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
codeQL,
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
getRunnerLogger());
|
||||
|
||||
// Check resolve queries was called correctly
|
||||
t.deepEqual(resolveQueriesArgs.length, 1);
|
||||
|
|
@ -275,16 +320,13 @@ test("default queries are used", async t => {
|
|||
|
||||
test("Queries can be specified in config file", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
queries:
|
||||
- uses: ./foo`;
|
||||
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
const configFile = path.join(tmpDir, 'input');
|
||||
fs.writeFileSync(configFile, inputFileContents, 'utf8');
|
||||
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
|
||||
|
|
@ -307,9 +349,19 @@ test("Queries can be specified in config file", async t => {
|
|||
},
|
||||
});
|
||||
|
||||
setInput('languages', 'javascript');
|
||||
const languages = 'javascript';
|
||||
|
||||
const config = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
const config = await configUtils.initConfig(
|
||||
languages,
|
||||
undefined,
|
||||
configFile,
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
codeQL,
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
getRunnerLogger());
|
||||
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries
|
||||
|
|
@ -327,19 +379,16 @@ test("Queries can be specified in config file", async t => {
|
|||
|
||||
test("Queries from config file can be overridden in workflow file", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
queries:
|
||||
- uses: ./foo`;
|
||||
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
const configFile = path.join(tmpDir, 'input');
|
||||
fs.writeFileSync(configFile, inputFileContents, 'utf8');
|
||||
|
||||
// This config item should take precedence over the config file but shouldn't affect the default queries.
|
||||
setInput('queries', './override');
|
||||
const queries = './override';
|
||||
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
fs.mkdirSync(path.join(tmpDir, 'override'));
|
||||
|
|
@ -363,9 +412,19 @@ test("Queries from config file can be overridden in workflow file", async t => {
|
|||
},
|
||||
});
|
||||
|
||||
setInput('languages', 'javascript');
|
||||
const languages = 'javascript';
|
||||
|
||||
const config = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
const config = await configUtils.initConfig(
|
||||
languages,
|
||||
queries,
|
||||
configFile,
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
codeQL,
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
getRunnerLogger());
|
||||
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries and once for `./override`,
|
||||
|
|
@ -383,13 +442,10 @@ test("Queries from config file can be overridden in workflow file", async t => {
|
|||
|
||||
test("Multiple queries can be specified in workflow file, no config file required", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
fs.mkdirSync(path.join(tmpDir, 'override1'));
|
||||
fs.mkdirSync(path.join(tmpDir, 'override2'));
|
||||
|
||||
setInput('queries', './override1,./override2');
|
||||
const queries = './override1,./override2';
|
||||
|
||||
const resolveQueriesArgs: {queries: string[], extraSearchPath: string | undefined}[] = [];
|
||||
const codeQL = setCodeQL({
|
||||
|
|
@ -410,9 +466,19 @@ test("Multiple queries can be specified in workflow file, no config file require
|
|||
},
|
||||
});
|
||||
|
||||
setInput('languages', 'javascript');
|
||||
const languages = 'javascript';
|
||||
|
||||
const config = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
const config = await configUtils.initConfig(
|
||||
languages,
|
||||
queries,
|
||||
undefined,
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
codeQL,
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
getRunnerLogger());
|
||||
|
||||
// Check resolveQueries was called correctly:
|
||||
// It'll be called once for the default queries,
|
||||
|
|
@ -433,11 +499,8 @@ test("Multiple queries can be specified in workflow file, no config file require
|
|||
|
||||
test("Invalid queries in workflow file handled correctly", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
setInput('queries', 'foo/bar@v1@v3');
|
||||
setInput('languages', 'javascript');
|
||||
const queries = 'foo/bar@v1@v3';
|
||||
const languages = 'javascript';
|
||||
|
||||
// This function just needs to be type-correct; it doesn't need to do anything,
|
||||
// since we're deliberately passing in invalid data
|
||||
|
|
@ -454,7 +517,17 @@ test("Invalid queries in workflow file handled correctly", async t => {
|
|||
});
|
||||
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
await configUtils.initConfig(
|
||||
languages,
|
||||
queries,
|
||||
undefined,
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
codeQL,
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
getRunnerLogger());
|
||||
t.fail('initConfig did not throw error');
|
||||
} catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getQueryUsesInvalid(undefined, "foo/bar@v1@v3")));
|
||||
|
|
@ -464,9 +537,6 @@ test("Invalid queries in workflow file handled correctly", async t => {
|
|||
|
||||
test("API client used when reading remote config", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
const codeQL = setCodeQL({
|
||||
resolveQueries: async function() {
|
||||
return {
|
||||
|
|
@ -501,26 +571,42 @@ test("API client used when reading remote config", async t => {
|
|||
// Create checkout directory for remote queries repository
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo/bar/dev'), { recursive: true });
|
||||
|
||||
setInput('config-file', 'octo-org/codeql-config/config.yaml@main');
|
||||
setInput('languages', 'javascript');
|
||||
const configFile = 'octo-org/codeql-config/config.yaml@main';
|
||||
const languages = 'javascript';
|
||||
|
||||
await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
await configUtils.initConfig(
|
||||
languages,
|
||||
undefined,
|
||||
configFile,
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
codeQL,
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
getRunnerLogger());
|
||||
t.assert(spyGetContents.called);
|
||||
});
|
||||
});
|
||||
|
||||
test("Remote config handles the case where a directory is provided", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
const dummyResponse = []; // directories are returned as arrays
|
||||
mockGetContents(dummyResponse);
|
||||
|
||||
const repoReference = 'octo-org/codeql-config/config.yaml@main';
|
||||
setInput('config-file', repoReference);
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, getCachedCodeQL());
|
||||
await configUtils.initConfig(
|
||||
undefined,
|
||||
undefined,
|
||||
repoReference,
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
getCachedCodeQL(),
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
getRunnerLogger());
|
||||
throw new Error('initConfig did not throw error');
|
||||
} catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileDirectoryGivenMessage(repoReference)));
|
||||
|
|
@ -530,18 +616,24 @@ test("Remote config handles the case where a directory is provided", async t =>
|
|||
|
||||
test("Invalid format of remote config handled correctly", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
const dummyResponse = {
|
||||
// note no "content" property here
|
||||
};
|
||||
mockGetContents(dummyResponse);
|
||||
|
||||
const repoReference = 'octo-org/codeql-config/config.yaml@main';
|
||||
setInput('config-file', repoReference);
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, getCachedCodeQL());
|
||||
await configUtils.initConfig(
|
||||
undefined,
|
||||
undefined,
|
||||
repoReference,
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
getCachedCodeQL(),
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
getRunnerLogger());
|
||||
throw new Error('initConfig did not throw error');
|
||||
} catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileFormatInvalidMessage(repoReference)));
|
||||
|
|
@ -551,13 +643,20 @@ test("Invalid format of remote config handled correctly", async t => {
|
|||
|
||||
test("No detected languages", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
mockListLanguages([]);
|
||||
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, getCachedCodeQL());
|
||||
await configUtils.initConfig(
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
getCachedCodeQL(),
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
getRunnerLogger());
|
||||
throw new Error('initConfig did not throw error');
|
||||
} catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getNoLanguagesError()));
|
||||
|
|
@ -567,13 +666,20 @@ test("No detected languages", async t => {
|
|||
|
||||
test("Unknown languages", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
setInput('languages', 'ruby,english');
|
||||
const languages = 'ruby,english';
|
||||
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, getCachedCodeQL());
|
||||
await configUtils.initConfig(
|
||||
languages,
|
||||
undefined,
|
||||
undefined,
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
getCachedCodeQL(),
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
getRunnerLogger());
|
||||
throw new Error('initConfig did not throw error');
|
||||
} catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getUnknownLanguagesError(['ruby', 'english'])));
|
||||
|
|
@ -588,9 +694,6 @@ function doInvalidInputTest(
|
|||
|
||||
test("load invalid input - " + testName, async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
const codeQL = setCodeQL({
|
||||
resolveQueries: async function() {
|
||||
return {
|
||||
|
|
@ -601,13 +704,23 @@ function doInvalidInputTest(
|
|||
},
|
||||
});
|
||||
|
||||
const inputFile = path.join(tmpDir, 'input');
|
||||
const languages = 'javascript';
|
||||
const configFile = 'input';
|
||||
const inputFile = path.join(tmpDir, configFile);
|
||||
fs.writeFileSync(inputFile, inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
|
||||
try {
|
||||
await configUtils.initConfig(tmpDir, tmpDir, codeQL);
|
||||
await configUtils.initConfig(
|
||||
languages,
|
||||
undefined,
|
||||
configFile,
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
codeQL,
|
||||
tmpDir,
|
||||
'token',
|
||||
'https://github.example.com',
|
||||
getRunnerLogger());
|
||||
throw new Error('initConfig did not throw error');
|
||||
} catch (err) {
|
||||
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));
|
||||
|
|
@ -715,10 +828,10 @@ test('path validations', t => {
|
|||
const configFile = './.github/codeql/config.yml';
|
||||
|
||||
for (const path of validPaths) {
|
||||
t.truthy(configUtils.validateAndSanitisePath(path, propertyName, configFile));
|
||||
t.truthy(configUtils.validateAndSanitisePath(path, propertyName, configFile, getRunnerLogger()));
|
||||
}
|
||||
for (const path of invalidPaths) {
|
||||
t.throws(() => configUtils.validateAndSanitisePath(path, propertyName, configFile));
|
||||
t.throws(() => configUtils.validateAndSanitisePath(path, propertyName, configFile, getRunnerLogger()));
|
||||
}
|
||||
});
|
||||
|
||||
|
|
@ -729,11 +842,11 @@ test('path sanitisation', t => {
|
|||
|
||||
// Valid paths are not modified
|
||||
t.deepEqual(
|
||||
configUtils.validateAndSanitisePath('foo/bar', propertyName, configFile),
|
||||
configUtils.validateAndSanitisePath('foo/bar', propertyName, configFile, getRunnerLogger()),
|
||||
'foo/bar');
|
||||
|
||||
// Trailing stars are stripped
|
||||
t.deepEqual(
|
||||
configUtils.validateAndSanitisePath('foo/**', propertyName, configFile),
|
||||
configUtils.validateAndSanitisePath('foo/**', propertyName, configFile, getRunnerLogger()),
|
||||
'foo/');
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
import * as core from '@actions/core';
|
||||
import * as fs from 'fs';
|
||||
import * as yaml from 'js-yaml';
|
||||
import * as path from 'path';
|
||||
|
|
@ -7,7 +6,7 @@ import * as api from './api-client';
|
|||
import { CodeQL, ResolveQueriesOutput } from './codeql';
|
||||
import * as externalQueries from "./external-queries";
|
||||
import { Language, parseLanguage } from "./languages";
|
||||
import * as util from './util';
|
||||
import { Logger } from './logging';
|
||||
|
||||
// Property names from the user-supplied config file.
|
||||
const NAME_PROPERTY = 'name';
|
||||
|
|
@ -182,12 +181,12 @@ async function addLocalQueries(
|
|||
codeQL: CodeQL,
|
||||
resultMap: { [language: string]: string[] },
|
||||
localQueryPath: string,
|
||||
checkoutPath: string,
|
||||
configFile?: string) {
|
||||
|
||||
// Resolve the local path against the workspace so that when this is
|
||||
// passed to codeql it resolves to exactly the path we expect it to resolve to.
|
||||
const workspacePath = fs.realpathSync(util.getRequiredEnvParam('GITHUB_WORKSPACE'));
|
||||
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
|
||||
let absoluteQueryPath = path.join(checkoutPath, localQueryPath);
|
||||
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(absoluteQueryPath)) {
|
||||
|
|
@ -198,14 +197,11 @@ async function addLocalQueries(
|
|||
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
|
||||
|
||||
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
||||
if (!(absoluteQueryPath + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
if (!(absoluteQueryPath + path.sep).startsWith(fs.realpathSync(checkoutPath) + path.sep)) {
|
||||
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
||||
}
|
||||
|
||||
// Get the root of the current repo to use when resolving query dependencies
|
||||
const rootOfRepo = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
|
||||
await runResolveQueries(codeQL, resultMap, [absoluteQueryPath], rootOfRepo, true);
|
||||
await runResolveQueries(codeQL, resultMap, [absoluteQueryPath], checkoutPath, true);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -216,6 +212,7 @@ async function addRemoteQueries(
|
|||
resultMap: { [language: string]: string[] },
|
||||
queryUses: string,
|
||||
tempDir: string,
|
||||
githubUrl: string,
|
||||
configFile?: string) {
|
||||
|
||||
let tok = queryUses.split('@');
|
||||
|
|
@ -239,13 +236,17 @@ async function addRemoteQueries(
|
|||
const nwo = tok[0] + '/' + tok[1];
|
||||
|
||||
// Checkout the external repository
|
||||
const rootOfRepo = await externalQueries.checkoutExternalRepository(nwo, ref, tempDir);
|
||||
const checkoutPath = await externalQueries.checkoutExternalRepository(
|
||||
nwo,
|
||||
ref,
|
||||
githubUrl,
|
||||
tempDir);
|
||||
|
||||
const queryPath = tok.length > 2
|
||||
? path.join(rootOfRepo, tok.slice(2).join('/'))
|
||||
: rootOfRepo;
|
||||
? path.join(checkoutPath, tok.slice(2).join('/'))
|
||||
: checkoutPath;
|
||||
|
||||
await runResolveQueries(codeQL, resultMap, [queryPath], rootOfRepo, true);
|
||||
await runResolveQueries(codeQL, resultMap, [queryPath], checkoutPath, true);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -262,6 +263,8 @@ async function parseQueryUses(
|
|||
resultMap: { [language: string]: string[] },
|
||||
queryUses: string,
|
||||
tempDir: string,
|
||||
checkoutPath: string,
|
||||
githubUrl: string,
|
||||
configFile?: string) {
|
||||
|
||||
queryUses = queryUses.trim();
|
||||
|
|
@ -271,7 +274,7 @@ async function parseQueryUses(
|
|||
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), configFile);
|
||||
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), checkoutPath, configFile);
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -282,7 +285,7 @@ async function parseQueryUses(
|
|||
}
|
||||
|
||||
// Otherwise, must be a reference to another repo
|
||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, configFile);
|
||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, configFile);
|
||||
}
|
||||
|
||||
// Regex validating stars in paths or paths-ignore entries.
|
||||
|
|
@ -299,7 +302,8 @@ const filterPatternCharactersRegex = /.*[\?\+\[\]!].*/;
|
|||
export function validateAndSanitisePath(
|
||||
originalPath: string,
|
||||
propertyName: string,
|
||||
configFile: string): string {
|
||||
configFile: string,
|
||||
logger: Logger): string {
|
||||
|
||||
// Take a copy so we don't modify the original path, so we can still construct error messages
|
||||
let path = originalPath;
|
||||
|
|
@ -335,7 +339,7 @@ export function validateAndSanitisePath(
|
|||
// Check for other regex characters that we don't support.
|
||||
// Output a warning so the user knows, but otherwise continue normally.
|
||||
if (path.match(filterPatternCharactersRegex)) {
|
||||
core.warning(getConfigFilePropertyError(
|
||||
logger.warning(getConfigFilePropertyError(
|
||||
configFile,
|
||||
propertyName,
|
||||
'"' + originalPath + '" contains an unsupported character. ' +
|
||||
|
|
@ -445,19 +449,23 @@ export function getUnknownLanguagesError(languages: string[]): string {
|
|||
/**
|
||||
* Gets the set of languages in the current repository
|
||||
*/
|
||||
async function getLanguagesInRepo(): Promise<Language[]> {
|
||||
async function getLanguagesInRepo(
|
||||
githubAuth: string,
|
||||
githubUrl: string,
|
||||
logger: Logger): Promise<Language[]> {
|
||||
|
||||
let repo_nwo = process.env['GITHUB_REPOSITORY']?.split("/");
|
||||
if (repo_nwo) {
|
||||
let owner = repo_nwo[0];
|
||||
let repo = repo_nwo[1];
|
||||
|
||||
core.debug(`GitHub repo ${owner} ${repo}`);
|
||||
const response = await api.getActionsApiClient(true).repos.listLanguages({
|
||||
logger.debug(`GitHub repo ${owner} ${repo}`);
|
||||
const response = await api.getApiClient(githubAuth, githubUrl, true).repos.listLanguages({
|
||||
owner,
|
||||
repo
|
||||
});
|
||||
|
||||
core.debug("Languages API response: " + JSON.stringify(response));
|
||||
logger.debug("Languages API response: " + JSON.stringify(response));
|
||||
|
||||
// The GitHub API is going to return languages in order of popularity,
|
||||
// When we pick a language to autobuild we want to pick the most popular traced language
|
||||
|
|
@ -486,19 +494,26 @@ async function getLanguagesInRepo(): Promise<Language[]> {
|
|||
* If no languages could be detected from either the workflow or the repository
|
||||
* then throw an error.
|
||||
*/
|
||||
async function getLanguages(): Promise<Language[]> {
|
||||
async function getLanguages(
|
||||
languagesInput: string | undefined,
|
||||
githubAuth: string,
|
||||
githubUrl: string,
|
||||
logger: Logger): Promise<Language[]> {
|
||||
|
||||
// Obtain from action input 'languages' if set
|
||||
let languages = core.getInput('languages', { required: false })
|
||||
let languages = (languagesInput || "")
|
||||
.split(',')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
core.info("Languages from configuration: " + JSON.stringify(languages));
|
||||
logger.info("Languages from configuration: " + JSON.stringify(languages));
|
||||
|
||||
if (languages.length === 0) {
|
||||
// Obtain languages as all languages in the repo that can be analysed
|
||||
languages = await getLanguagesInRepo();
|
||||
core.info("Automatically detected languages: " + JSON.stringify(languages));
|
||||
languages = await getLanguagesInRepo(
|
||||
githubAuth,
|
||||
githubUrl,
|
||||
logger);
|
||||
logger.info("Automatically detected languages: " + JSON.stringify(languages));
|
||||
}
|
||||
|
||||
// If the languages parameter was not given and no languages were
|
||||
|
|
@ -529,31 +544,44 @@ async function getLanguages(): Promise<Language[]> {
|
|||
* Returns true if queries were provided in the workflow file
|
||||
* (and thus added), otherwise false
|
||||
*/
|
||||
async function addQueriesFromWorkflowIfRequired(
|
||||
async function addQueriesFromWorkflow(
|
||||
codeQL: CodeQL,
|
||||
queriesInput: string,
|
||||
languages: string[],
|
||||
resultMap: { [language: string]: string[] },
|
||||
tempDir: string
|
||||
): Promise<boolean> {
|
||||
const queryUses = core.getInput('queries');
|
||||
if (queryUses) {
|
||||
for (const query of queryUses.split(',')) {
|
||||
await parseQueryUses(languages, codeQL, resultMap, query, tempDir);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
tempDir: string,
|
||||
checkoutPath: string,
|
||||
githubUrl: string) {
|
||||
|
||||
return false;
|
||||
for (const query of queriesInput.split(',')) {
|
||||
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, checkoutPath, githubUrl);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the default config for when the user has not supplied one.
|
||||
*/
|
||||
export async function getDefaultConfig(tempDir: string, toolCacheDir: string, codeQL: CodeQL): Promise<Config> {
|
||||
const languages = await getLanguages();
|
||||
export async function getDefaultConfig(
|
||||
languagesInput: string | undefined,
|
||||
queriesInput: string | undefined,
|
||||
tempDir: string,
|
||||
toolCacheDir: string,
|
||||
codeQL: CodeQL,
|
||||
checkoutPath: string,
|
||||
githubAuth: string,
|
||||
githubUrl: string,
|
||||
logger: Logger): Promise<Config> {
|
||||
|
||||
const languages = await getLanguages(
|
||||
languagesInput,
|
||||
githubAuth,
|
||||
githubUrl,
|
||||
logger);
|
||||
const queries = {};
|
||||
await addDefaultQueries(codeQL, languages, queries);
|
||||
await addQueriesFromWorkflowIfRequired(codeQL, languages, queries, tempDir);
|
||||
if (queriesInput) {
|
||||
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, githubUrl);
|
||||
}
|
||||
|
||||
return {
|
||||
languages: languages,
|
||||
|
|
@ -570,17 +598,29 @@ export async function getDefaultConfig(tempDir: string, toolCacheDir: string, co
|
|||
/**
|
||||
* Load the config from the given file.
|
||||
*/
|
||||
async function loadConfig(configFile: string, tempDir: string, toolCacheDir: string, codeQL: CodeQL): Promise<Config> {
|
||||
async function loadConfig(
|
||||
languagesInput: string | undefined,
|
||||
queriesInput: string | undefined,
|
||||
configFile: string,
|
||||
tempDir: string,
|
||||
toolCacheDir: string,
|
||||
codeQL: CodeQL,
|
||||
checkoutPath: string,
|
||||
githubAuth: string,
|
||||
githubUrl: string,
|
||||
logger: Logger): Promise<Config> {
|
||||
|
||||
let parsedYAML: UserConfig;
|
||||
|
||||
if (isLocal(configFile)) {
|
||||
// Treat the config file as relative to the workspace
|
||||
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
configFile = path.resolve(workspacePath, configFile);
|
||||
|
||||
parsedYAML = getLocalConfig(configFile, workspacePath);
|
||||
configFile = path.resolve(checkoutPath, configFile);
|
||||
parsedYAML = getLocalConfig(configFile, checkoutPath);
|
||||
} else {
|
||||
parsedYAML = await getRemoteConfig(configFile);
|
||||
parsedYAML = await getRemoteConfig(
|
||||
configFile,
|
||||
githubAuth,
|
||||
githubUrl);
|
||||
}
|
||||
|
||||
// Validate that the 'name' property is syntactically correct,
|
||||
|
|
@ -594,7 +634,11 @@ async function loadConfig(configFile: string, tempDir: string, toolCacheDir: str
|
|||
}
|
||||
}
|
||||
|
||||
const languages = await getLanguages();
|
||||
const languages = await getLanguages(
|
||||
languagesInput,
|
||||
githubAuth,
|
||||
githubUrl,
|
||||
logger);
|
||||
|
||||
const queries = {};
|
||||
const pathsIgnore: string[] = [];
|
||||
|
|
@ -613,8 +657,9 @@ async function loadConfig(configFile: string, tempDir: string, toolCacheDir: str
|
|||
|
||||
// If queries were provided using `with` in the action configuration,
|
||||
// they should take precedence over the queries in the config file
|
||||
const addedQueriesFromAction = await addQueriesFromWorkflowIfRequired(codeQL, languages, queries, tempDir);
|
||||
if (!addedQueriesFromAction && QUERIES_PROPERTY in parsedYAML) {
|
||||
if (queriesInput) {
|
||||
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, githubUrl);
|
||||
} else if (QUERIES_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getQueriesInvalid(configFile));
|
||||
}
|
||||
|
|
@ -622,7 +667,15 @@ async function loadConfig(configFile: string, tempDir: string, toolCacheDir: str
|
|||
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, configFile);
|
||||
await parseQueryUses(
|
||||
languages,
|
||||
codeQL,
|
||||
queries,
|
||||
query[QUERIES_USES_PROPERTY],
|
||||
tempDir,
|
||||
checkoutPath,
|
||||
githubUrl,
|
||||
configFile);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -634,7 +687,7 @@ async function loadConfig(configFile: string, tempDir: string, toolCacheDir: str
|
|||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile));
|
||||
pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile, logger));
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -646,7 +699,7 @@ async function loadConfig(configFile: string, tempDir: string, toolCacheDir: str
|
|||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile));
|
||||
paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile, logger));
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -677,20 +730,49 @@ async function loadConfig(configFile: string, tempDir: string, toolCacheDir: str
|
|||
* This will parse the config from the user input if present, or generate
|
||||
* a default config. The parsed config is then stored to a known location.
|
||||
*/
|
||||
export async function initConfig(tempDir: string, toolCacheDir: string, codeQL: CodeQL): Promise<Config> {
|
||||
const configFile = core.getInput('config-file');
|
||||
export async function initConfig(
|
||||
languagesInput: string | undefined,
|
||||
queriesInput: string | undefined,
|
||||
configFile: string | undefined,
|
||||
tempDir: string,
|
||||
toolCacheDir: string,
|
||||
codeQL: CodeQL,
|
||||
checkoutPath: string,
|
||||
githubAuth: string,
|
||||
githubUrl: string,
|
||||
logger: Logger): Promise<Config> {
|
||||
|
||||
let config: Config;
|
||||
|
||||
// If no config file was provided create an empty one
|
||||
if (configFile === '') {
|
||||
core.debug('No configuration file was provided');
|
||||
config = await getDefaultConfig(tempDir, toolCacheDir, codeQL);
|
||||
if (!configFile) {
|
||||
logger.debug('No configuration file was provided');
|
||||
config = await getDefaultConfig(
|
||||
languagesInput,
|
||||
queriesInput,
|
||||
tempDir,
|
||||
toolCacheDir,
|
||||
codeQL,
|
||||
checkoutPath,
|
||||
githubAuth,
|
||||
githubUrl,
|
||||
logger);
|
||||
} else {
|
||||
config = await loadConfig(configFile, tempDir, toolCacheDir, codeQL);
|
||||
config = await loadConfig(
|
||||
languagesInput,
|
||||
queriesInput,
|
||||
configFile,
|
||||
tempDir,
|
||||
toolCacheDir,
|
||||
codeQL,
|
||||
checkoutPath,
|
||||
githubAuth,
|
||||
githubUrl,
|
||||
logger);
|
||||
}
|
||||
|
||||
// Save the config so we can easily access it again in the future
|
||||
await saveConfig(config);
|
||||
await saveConfig(config, logger);
|
||||
return config;
|
||||
}
|
||||
|
||||
|
|
@ -703,9 +785,9 @@ function isLocal(configPath: string): boolean {
|
|||
return (configPath.indexOf("@") === -1);
|
||||
}
|
||||
|
||||
function getLocalConfig(configFile: string, workspacePath: string): UserConfig {
|
||||
function getLocalConfig(configFile: string, checkoutPath: string): UserConfig {
|
||||
// Error if the config file is now outside of the workspace
|
||||
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
if (!(configFile + path.sep).startsWith(checkoutPath + path.sep)) {
|
||||
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||
}
|
||||
|
||||
|
|
@ -717,7 +799,11 @@ function getLocalConfig(configFile: string, workspacePath: string): UserConfig {
|
|||
return yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
}
|
||||
|
||||
async function getRemoteConfig(configFile: string): Promise<UserConfig> {
|
||||
async function getRemoteConfig(
|
||||
configFile: string,
|
||||
githubAuth: string,
|
||||
githubUrl: string): Promise<UserConfig> {
|
||||
|
||||
// retrieve the various parts of the config location, and ensure they're present
|
||||
const format = new RegExp('(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)');
|
||||
const pieces = format.exec(configFile);
|
||||
|
|
@ -726,7 +812,7 @@ async function getRemoteConfig(configFile: string): Promise<UserConfig> {
|
|||
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
|
||||
}
|
||||
|
||||
const response = await api.getActionsApiClient(true).repos.getContents({
|
||||
const response = await api.getApiClient(githubAuth, githubUrl, true).repos.getContents({
|
||||
owner: pieces.groups.owner,
|
||||
repo: pieces.groups.repo,
|
||||
path: pieces.groups.path,
|
||||
|
|
@ -755,13 +841,13 @@ export function getPathToParsedConfigFile(tempDir: string): string {
|
|||
/**
|
||||
* Store the given config to the path returned from getPathToParsedConfigFile.
|
||||
*/
|
||||
async function saveConfig(config: Config) {
|
||||
async function saveConfig(config: Config, logger: Logger) {
|
||||
const configString = JSON.stringify(config);
|
||||
const configFile = getPathToParsedConfigFile(config.tempDir);
|
||||
fs.mkdirSync(path.dirname(configFile), { recursive: true });
|
||||
fs.writeFileSync(configFile, configString, 'utf8');
|
||||
core.debug('Saved config:');
|
||||
core.debug(configString);
|
||||
logger.debug('Saved config:');
|
||||
logger.debug(configString);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -772,13 +858,13 @@ async function saveConfig(config: Config) {
|
|||
* stored to a known location. On the second and further calls, this will
|
||||
* return the contents of the parsed config from the known location.
|
||||
*/
|
||||
export async function getConfig(tempDir: string): Promise<Config> {
|
||||
export async function getConfig(tempDir: string, logger: Logger): Promise<Config> {
|
||||
const configFile = getPathToParsedConfigFile(tempDir);
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||
}
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
core.debug(configString);
|
||||
logger.debug('Loaded config:');
|
||||
logger.debug(configString);
|
||||
return JSON.parse(configString);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ test("checkoutExternalQueries", async t => {
|
|||
await externalQueries.checkoutExternalRepository(
|
||||
"github/codeql-go",
|
||||
ref,
|
||||
'https://github.com',
|
||||
tmpDir);
|
||||
|
||||
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in the default branch
|
||||
|
|
|
|||
|
|
@ -6,7 +6,12 @@ import * as path from 'path';
|
|||
/**
|
||||
* Check out repository at the given ref, and return the directory of the checkout.
|
||||
*/
|
||||
export async function checkoutExternalRepository(repository: string, ref: string, tempDir: string): Promise<string> {
|
||||
export async function checkoutExternalRepository(
|
||||
repository: string,
|
||||
ref: string,
|
||||
githubUrl: string,
|
||||
tempDir: string): Promise<string> {
|
||||
|
||||
core.info('Checking out ' + repository);
|
||||
|
||||
const checkoutLocation = path.join(tempDir, repository, ref);
|
||||
|
|
@ -17,7 +22,7 @@ export async function checkoutExternalRepository(repository: string, ref: string
|
|||
}
|
||||
|
||||
if (!fs.existsSync(checkoutLocation)) {
|
||||
const repoURL = 'https://github.com/' + repository + '.git';
|
||||
const repoURL = githubUrl + '/' + repository + '.git';
|
||||
await exec.exec('git', ['clone', repoURL, checkoutLocation]);
|
||||
await exec.exec('git', [
|
||||
'--work-tree=' + checkoutLocation,
|
||||
|
|
|
|||
|
|
@ -116,7 +116,7 @@ test('hash', (t: ava.Assertions) => {
|
|||
function testResolveUriToFile(uri: any, index: any, artifactsURIs: any[]) {
|
||||
const location = { "uri": uri, "index": index };
|
||||
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
|
||||
return fingerprints.resolveUriToFile(location, artifacts, getRunnerLogger());
|
||||
return fingerprints.resolveUriToFile(location, artifacts, process.cwd(), getRunnerLogger());
|
||||
}
|
||||
|
||||
test('resolveUriToFile', t => {
|
||||
|
|
@ -129,8 +129,6 @@ test('resolveUriToFile', t => {
|
|||
t.true(filepath.startsWith(cwd + '/'));
|
||||
const relativeFilepaht = filepath.substring(cwd.length + 1);
|
||||
|
||||
process.env['GITHUB_WORKSPACE'] = cwd;
|
||||
|
||||
// Absolute paths are unmodified
|
||||
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
|
||||
t.is(testResolveUriToFile('file://' + filepath, undefined, []), filepath);
|
||||
|
|
@ -173,9 +171,9 @@ test('addFingerprints', t => {
|
|||
expected = JSON.stringify(JSON.parse(expected));
|
||||
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||
const checkoutPath = path.normalize(__dirname + '/../src/testdata');
|
||||
|
||||
t.deepEqual(fingerprints.addFingerprints(input, getRunnerLogger()), expected);
|
||||
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, getRunnerLogger()), expected);
|
||||
});
|
||||
|
||||
test('missingRegions', t => {
|
||||
|
|
@ -188,7 +186,7 @@ test('missingRegions', t => {
|
|||
expected = JSON.stringify(JSON.parse(expected));
|
||||
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||
const checkoutPath = path.normalize(__dirname + '/../src/testdata');
|
||||
|
||||
t.deepEqual(fingerprints.addFingerprints(input, getRunnerLogger()), expected);
|
||||
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, getRunnerLogger()), expected);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -161,7 +161,12 @@ function locationUpdateCallback(result: any, location: any, logger: Logger): has
|
|||
// the source file so we can hash it.
|
||||
// If possible returns a absolute file path for the source file,
|
||||
// or if not possible then returns undefined.
|
||||
export function resolveUriToFile(location: any, artifacts: any[], logger: Logger): string | undefined {
|
||||
export function resolveUriToFile(
|
||||
location: any,
|
||||
artifacts: any[],
|
||||
checkoutPath: string,
|
||||
logger: Logger): string | undefined {
|
||||
|
||||
// This may be referencing an artifact
|
||||
if (!location.uri && location.index !== undefined) {
|
||||
if (typeof location.index !== 'number' ||
|
||||
|
|
@ -192,7 +197,7 @@ export function resolveUriToFile(location: any, artifacts: any[], logger: Logger
|
|||
}
|
||||
|
||||
// Discard any absolute paths that aren't in the src root
|
||||
const srcRootPrefix = process.env['GITHUB_WORKSPACE'] + '/';
|
||||
const srcRootPrefix = checkoutPath + '/';
|
||||
if (uri.startsWith('/') && !uri.startsWith(srcRootPrefix)) {
|
||||
logger.debug(`Ignoring location URI "${uri}" as it is outside of the src root`);
|
||||
return undefined;
|
||||
|
|
@ -216,7 +221,7 @@ export function resolveUriToFile(location: any, artifacts: any[], logger: Logger
|
|||
|
||||
// Compute fingerprints for results in the given sarif file
|
||||
// and return an updated sarif file contents.
|
||||
export function addFingerprints(sarifContents: string, logger: Logger): string {
|
||||
export function addFingerprints(sarifContents: string, checkoutPath: string, logger: Logger): string {
|
||||
let sarif = JSON.parse(sarifContents);
|
||||
|
||||
// Gather together results for the same file and construct
|
||||
|
|
@ -234,7 +239,11 @@ export function addFingerprints(sarifContents: string, logger: Logger): string {
|
|||
continue;
|
||||
}
|
||||
|
||||
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts, logger);
|
||||
const filepath = resolveUriToFile(
|
||||
primaryLocation.physicalLocation.artifactLocation,
|
||||
artifacts,
|
||||
checkoutPath,
|
||||
logger);
|
||||
if (!filepath) {
|
||||
continue;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,9 @@
|
|||
import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as analysisPaths from './analysis-paths';
|
||||
import { CodeQL, setupCodeQL } from './codeql';
|
||||
import { CodeQL } from './codeql';
|
||||
import * as configUtils from './config-utils';
|
||||
import { getCombinedTracerConfig } from './tracer-config';
|
||||
import { initCodeQL, initConfig, runInit } from './init';
|
||||
import { getActionsLogger } from './logging';
|
||||
import * as util from './util';
|
||||
|
||||
interface InitSuccessStatusReport extends util.StatusReportBase {
|
||||
|
|
@ -49,8 +46,8 @@ async function sendSuccessStatusReport(startedAt: Date, config: configUtils.Conf
|
|||
}
|
||||
|
||||
async function run() {
|
||||
|
||||
const startedAt = new Date();
|
||||
const logger = getActionsLogger();
|
||||
let config: configUtils.Config;
|
||||
let codeql: CodeQL;
|
||||
|
||||
|
|
@ -60,18 +57,25 @@ async function run() {
|
|||
return;
|
||||
}
|
||||
|
||||
core.startGroup('Setup CodeQL tools');
|
||||
codeql = await setupCodeQL();
|
||||
await codeql.printVersion();
|
||||
core.endGroup();
|
||||
|
||||
core.startGroup('Load language configuration');
|
||||
config = await configUtils.initConfig(
|
||||
codeql = await initCodeQL(
|
||||
core.getInput('tools'),
|
||||
core.getInput('token'),
|
||||
util.getRequiredEnvParam('GITHUB_API_URL'),
|
||||
util.getRequiredEnvParam('RUNNER_TEMP'),
|
||||
util.getRequiredEnvParam('RUNNER_TOOL_CACHE'),
|
||||
codeql);
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
core.endGroup();
|
||||
'actions',
|
||||
logger);
|
||||
config = await initConfig(
|
||||
core.getInput('languages'),
|
||||
core.getInput('queries'),
|
||||
core.getInput('config-file'),
|
||||
util.getRequiredEnvParam('RUNNER_TEMP'),
|
||||
util.getRequiredEnvParam('RUNNER_TOOL_CACHE'),
|
||||
codeql,
|
||||
util.getRequiredEnvParam('GITHUB_WORKSPACE'),
|
||||
core.getInput('token'),
|
||||
util.getRequiredEnvParam('GITHUB_API_URL'),
|
||||
logger);
|
||||
|
||||
} catch (e) {
|
||||
core.setFailed(e.message);
|
||||
|
|
@ -82,8 +86,6 @@ async function run() {
|
|||
|
||||
try {
|
||||
|
||||
const sourceRoot = path.resolve();
|
||||
|
||||
// Forward Go flags
|
||||
const goFlags = process.env['GOFLAGS'];
|
||||
if (goFlags) {
|
||||
|
|
@ -95,27 +97,8 @@ async function run() {
|
|||
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
|
||||
core.exportVariable('CODEQL_RAM', codeqlRam);
|
||||
|
||||
fs.mkdirSync(util.getCodeQLDatabasesDir(config.tempDir), { recursive: true });
|
||||
|
||||
// TODO: replace this code once CodeQL supports multi-language tracing
|
||||
for (let language of config.languages) {
|
||||
// Init language database
|
||||
await codeql.databaseInit(util.getCodeQLDatabasePath(config.tempDir, language), language, sourceRoot);
|
||||
}
|
||||
|
||||
const tracerConfig = await getCombinedTracerConfig(config, codeql);
|
||||
const tracerConfig = await runInit(codeql, config);
|
||||
if (tracerConfig !== undefined) {
|
||||
if (process.platform === 'win32') {
|
||||
await exec.exec(
|
||||
'powershell',
|
||||
[
|
||||
path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
|
||||
path.resolve(path.dirname(codeql.getPath()), 'tools', 'win64', 'tracer.exe'),
|
||||
],
|
||||
{ env: { 'ODASA_TRACER_CONFIGURATION': tracerConfig.spec } });
|
||||
}
|
||||
|
||||
// NB: in CLI mode these will be output to a file rather than exported with core.exportVariable
|
||||
Object.entries(tracerConfig.env).forEach(([key, value]) => core.exportVariable(key, value));
|
||||
}
|
||||
|
||||
|
|
|
|||
91
src/init.ts
Normal file
91
src/init.ts
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
import * as exec from '@actions/exec';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as analysisPaths from './analysis-paths';
|
||||
import { CodeQL, setupCodeQL } from './codeql';
|
||||
import * as configUtils from './config-utils';
|
||||
import { Logger } from './logging';
|
||||
import { getCombinedTracerConfig, TracerConfig } from './tracer-config';
|
||||
import * as util from './util';
|
||||
|
||||
export async function initCodeQL(
|
||||
codeqlURL: string | undefined,
|
||||
githubAuth: string,
|
||||
githubUrl: string,
|
||||
tempDir: string,
|
||||
toolsDir: string,
|
||||
mode: util.Mode,
|
||||
logger: Logger): Promise<CodeQL> {
|
||||
|
||||
logger.startGroup('Setup CodeQL tools');
|
||||
const codeql = await setupCodeQL(
|
||||
codeqlURL,
|
||||
githubAuth,
|
||||
githubUrl,
|
||||
tempDir,
|
||||
toolsDir,
|
||||
mode,
|
||||
logger);
|
||||
await codeql.printVersion();
|
||||
logger.endGroup();
|
||||
return codeql;
|
||||
}
|
||||
|
||||
export async function initConfig(
|
||||
languagesInput: string | undefined,
|
||||
queriesInput: string | undefined,
|
||||
configFile: string | undefined,
|
||||
tempDir: string,
|
||||
toolCacheDir: string,
|
||||
codeQL: CodeQL,
|
||||
checkoutPath: string,
|
||||
githubAuth: string,
|
||||
githubUrl: string,
|
||||
logger: Logger): Promise<configUtils.Config> {
|
||||
|
||||
logger.startGroup('Load language configuration');
|
||||
const config = await configUtils.initConfig(
|
||||
languagesInput,
|
||||
queriesInput,
|
||||
configFile,
|
||||
tempDir,
|
||||
toolCacheDir,
|
||||
codeQL,
|
||||
checkoutPath,
|
||||
githubAuth,
|
||||
githubUrl,
|
||||
logger);
|
||||
analysisPaths.printPathFiltersWarning(config, logger);
|
||||
logger.endGroup();
|
||||
return config;
|
||||
}
|
||||
|
||||
export async function runInit(
|
||||
codeql: CodeQL,
|
||||
config: configUtils.Config): Promise<TracerConfig | undefined> {
|
||||
|
||||
const sourceRoot = path.resolve();
|
||||
|
||||
fs.mkdirSync(util.getCodeQLDatabasesDir(config.tempDir), { recursive: true });
|
||||
|
||||
// TODO: replace this code once CodeQL supports multi-language tracing
|
||||
for (let language of config.languages) {
|
||||
// Init language database
|
||||
await codeql.databaseInit(util.getCodeQLDatabasePath(config.tempDir, language), language, sourceRoot);
|
||||
}
|
||||
|
||||
const tracerConfig = await getCombinedTracerConfig(config, codeql);
|
||||
if (tracerConfig !== undefined) {
|
||||
if (process.platform === 'win32') {
|
||||
await exec.exec(
|
||||
'powershell',
|
||||
[
|
||||
path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
|
||||
path.resolve(path.dirname(codeql.getPath()), 'tools', 'win64', 'tracer.exe'),
|
||||
],
|
||||
{ env: { 'ODASA_TRACER_CONFIGURATION': tracerConfig.spec } });
|
||||
}
|
||||
}
|
||||
return tracerConfig;
|
||||
}
|
||||
219
src/runner.ts
219
src/runner.ts
|
|
@ -1,6 +1,13 @@
|
|||
import { Command } from 'commander';
|
||||
import * as fs from 'fs';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
|
||||
import { runAnalyze } from './analyze';
|
||||
import { runAutobuild } from './autobuild';
|
||||
import { CodeQL, getCodeQL } from './codeql';
|
||||
import { initCodeQL, initConfig, runInit } from './init';
|
||||
import { parseLanguage } from './languages';
|
||||
import { getRunnerLogger } from './logging';
|
||||
import { parseRepositoryNwo } from './repository';
|
||||
import * as upload_lib from './upload-lib';
|
||||
|
|
@ -8,6 +15,192 @@ import * as upload_lib from './upload-lib';
|
|||
const program = new Command();
|
||||
program.version('0.0.1');
|
||||
|
||||
function parseGithubUrl(inputUrl: string): string {
|
||||
try {
|
||||
const url = new URL(inputUrl);
|
||||
|
||||
// If we detect this is trying to be to github.com
|
||||
// then return with a fixed canonical URL.
|
||||
if (url.hostname === 'github.com' || url.hostname === 'api.github.com') {
|
||||
return 'https://github.com';
|
||||
}
|
||||
|
||||
// Remove the API prefix if it's present
|
||||
if (url.pathname.indexOf('/api/v3') !== -1) {
|
||||
url.pathname = url.pathname.substring(0, url.pathname.indexOf('/api/v3'));
|
||||
}
|
||||
|
||||
return url.toString();
|
||||
|
||||
} catch (e) {
|
||||
throw new Error(`"${inputUrl}" is not a valid URL`);
|
||||
}
|
||||
}
|
||||
|
||||
function getTempDir(userInput: string | undefined): string {
|
||||
const tempDir = path.join(userInput || os.tmpdir(), 'codeql-runner-temp');
|
||||
if (!fs.existsSync(tempDir)) {
|
||||
fs.mkdirSync(tempDir, { recursive: true });
|
||||
}
|
||||
return tempDir;
|
||||
}
|
||||
|
||||
function getToolsDir(userInput: string | undefined, tmpDir: string): string {
|
||||
const toolsDir = path.join(userInput || path.dirname(tmpDir), 'codeql-runner-tools');
|
||||
if (!fs.existsSync(toolsDir)) {
|
||||
fs.mkdirSync(toolsDir, { recursive: true });
|
||||
}
|
||||
return toolsDir;
|
||||
}
|
||||
|
||||
const logger = getRunnerLogger();
|
||||
|
||||
interface InitArgs {
|
||||
languages: string | undefined;
|
||||
queries: string | undefined;
|
||||
configFile: string | undefined;
|
||||
codeqlPath: string | undefined;
|
||||
tempDir: string | undefined;
|
||||
toolsDir: string | undefined;
|
||||
checkoutPath: string | undefined;
|
||||
githubUrl: string;
|
||||
githubAuth: string;
|
||||
}
|
||||
|
||||
program
|
||||
.command('init')
|
||||
.description('Initializes CodeQL')
|
||||
.requiredOption('--github-url <url>', 'URL of GitHub instance')
|
||||
.requiredOption('--github-auth <auth>', 'GitHub Apps token, or of the form "username:token" if using a personal access token')
|
||||
.option('--languages <languages>', 'Comma-separated list of languages to analyze. Defaults to trying to detect languages from the repo.')
|
||||
.option('--queries <queries>', 'Comma-separated list of additional queries to run. By default, this overrides the same setting in a configuration file.')
|
||||
.option('--config-file <file>', 'Path to config file')
|
||||
.option('--codeql-path <path>', 'Path to a copy of the CodeQL CLI executable to use. Otherwise downloads a copy.')
|
||||
.option('--temp-dir <dir>', 'Directory to use for temporary files. Defaults to OS temp dir.')
|
||||
.option('--tools-dir <dir>', 'Directory to use for CodeQL tools and other files to store between runs. Defaults to same as temp dir.')
|
||||
.option('--checkout-path <path>', 'Checkout path (default: current working directory)')
|
||||
.action(async (cmd: InitArgs) => {
|
||||
try {
|
||||
const tempDir = getTempDir(cmd.tempDir);
|
||||
const toolsDir = getToolsDir(cmd.toolsDir, tempDir);
|
||||
|
||||
// Wipe the temp dir
|
||||
fs.rmdirSync(tempDir, { recursive: true });
|
||||
fs.mkdirSync(tempDir, { recursive: true });
|
||||
|
||||
let codeql: CodeQL;
|
||||
if (cmd.codeqlPath !== undefined) {
|
||||
codeql = getCodeQL(cmd.codeqlPath);
|
||||
} else {
|
||||
codeql = await initCodeQL(
|
||||
undefined,
|
||||
cmd.githubAuth,
|
||||
parseGithubUrl(cmd.githubUrl),
|
||||
tempDir,
|
||||
toolsDir,
|
||||
'runner',
|
||||
logger);
|
||||
}
|
||||
|
||||
const config = await initConfig(
|
||||
cmd.languages,
|
||||
cmd.queries,
|
||||
cmd.configFile,
|
||||
tempDir,
|
||||
toolsDir,
|
||||
codeql,
|
||||
cmd.checkoutPath || process.cwd(),
|
||||
cmd.githubAuth,
|
||||
parseGithubUrl(cmd.githubUrl),
|
||||
logger);
|
||||
|
||||
await runInit(codeql, config);
|
||||
|
||||
} catch (e) {
|
||||
logger.error('Init failed');
|
||||
logger.error(e);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
});
|
||||
|
||||
interface AutobuildArgs {
|
||||
language: string;
|
||||
tempDir: string | undefined;
|
||||
}
|
||||
|
||||
program
|
||||
.command('autobuild')
|
||||
.description('Attempts to automatically build code')
|
||||
.requiredOption('--language <language>', 'The language to build')
|
||||
.option('--temp-dir <dir>', 'Directory to use for temporary files. Defaults to OS temp dir.')
|
||||
.action(async (cmd: AutobuildArgs) => {
|
||||
try {
|
||||
const language = parseLanguage(cmd.language);
|
||||
if (language === undefined) {
|
||||
throw new Error(`"${cmd.language}" is not a recognised language`);
|
||||
}
|
||||
await runAutobuild(
|
||||
language,
|
||||
getTempDir(cmd.tempDir),
|
||||
logger);
|
||||
} catch (e) {
|
||||
logger.error('Autobuild failed');
|
||||
logger.error(e);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
});
|
||||
|
||||
interface AnalyzeArgs {
|
||||
repository: string;
|
||||
commit: string;
|
||||
ref: string;
|
||||
githubUrl: string;
|
||||
githubAuth: string;
|
||||
checkoutPath: string | undefined;
|
||||
upload: boolean;
|
||||
outputDir: string | undefined;
|
||||
tempDir: string | undefined;
|
||||
}
|
||||
|
||||
program
|
||||
.command('analyze')
|
||||
.description('Finishes extracting code and runs CodeQL queries')
|
||||
.requiredOption('--repository <repository>', 'Repository name')
|
||||
.requiredOption('--commit <commit>', 'SHA of commit that was analyzed')
|
||||
.requiredOption('--ref <ref>', 'Name of ref that was analyzed')
|
||||
.requiredOption('--github-url <url>', 'URL of GitHub instance')
|
||||
.requiredOption('--github-auth <auth>', 'GitHub Apps token, or of the form "username:token" if using a personal access token')
|
||||
.option('--checkout-path <path>', 'Checkout path (default: current working directory)')
|
||||
.option('--no-upload', 'Do not upload results after analysis', false)
|
||||
.option('--output-dir <dir>', 'Directory to output SARIF files to. By default will use temp directory.')
|
||||
.option('--temp-dir <dir>', 'Directory to use for temporary files. Defaults to OS temp dir.')
|
||||
.action(async (cmd: AnalyzeArgs) => {
|
||||
try {
|
||||
const tempDir = getTempDir(cmd.tempDir);
|
||||
const outputDir = cmd.outputDir || path.join(tempDir, 'codeql-sarif');
|
||||
await runAnalyze(
|
||||
parseRepositoryNwo(cmd.repository),
|
||||
cmd.commit,
|
||||
cmd.ref,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
cmd.checkoutPath || process.cwd(),
|
||||
undefined,
|
||||
cmd.githubAuth,
|
||||
parseGithubUrl(cmd.githubUrl),
|
||||
cmd.upload,
|
||||
'runner',
|
||||
outputDir,
|
||||
tempDir,
|
||||
logger);
|
||||
} catch (e) {
|
||||
logger.error('Upload failed');
|
||||
logger.error(e);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
});
|
||||
|
||||
interface UploadArgs {
|
||||
sarifFile: string;
|
||||
repository: string;
|
||||
|
|
@ -18,30 +211,6 @@ interface UploadArgs {
|
|||
checkoutPath: string | undefined;
|
||||
}
|
||||
|
||||
function parseGithubApiUrl(inputUrl: string): string {
|
||||
try {
|
||||
const url = new URL(inputUrl);
|
||||
|
||||
// If we detect this is trying to be to github.com
|
||||
// then return with a fixed canonical URL.
|
||||
if (url.hostname === 'github.com' || url.hostname === 'api.github.com') {
|
||||
return 'https://api.github.com';
|
||||
}
|
||||
|
||||
// Add the API path if it's not already present.
|
||||
if (url.pathname.indexOf('/api/v3') === -1) {
|
||||
url.pathname = path.join(url.pathname, 'api', 'v3');
|
||||
}
|
||||
|
||||
return url.toString();
|
||||
|
||||
} catch (e) {
|
||||
throw new Error(`"${inputUrl}" is not a valid URL`);
|
||||
}
|
||||
}
|
||||
|
||||
const logger = getRunnerLogger();
|
||||
|
||||
program
|
||||
.command('upload')
|
||||
.description('Uploads a SARIF file, or all SARIF files from a directory, to code scanning')
|
||||
|
|
@ -65,7 +234,7 @@ program
|
|||
cmd.checkoutPath || process.cwd(),
|
||||
undefined,
|
||||
cmd.githubAuth,
|
||||
parseGithubApiUrl(cmd.githubUrl),
|
||||
parseGithubUrl(cmd.githubUrl),
|
||||
'runner',
|
||||
logger);
|
||||
} catch (e) {
|
||||
|
|
|
|||
|
|
@ -12,8 +12,6 @@ import { RepositoryNwo } from './repository';
|
|||
import * as sharedEnv from './shared-environment';
|
||||
import * as util from './util';
|
||||
|
||||
type UploadMode = 'actions' | 'runner';
|
||||
|
||||
// Takes a list of paths to sarif files and combines them together,
|
||||
// returning the contents of the combined sarif file.
|
||||
export function combineSarifFiles(sarifFiles: string[]): string {
|
||||
|
|
@ -43,8 +41,8 @@ async function uploadPayload(
|
|||
payload: any,
|
||||
repositoryNwo: RepositoryNwo,
|
||||
githubAuth: string,
|
||||
githubApiUrl: string,
|
||||
mode: UploadMode,
|
||||
githubUrl: string,
|
||||
mode: util.Mode,
|
||||
logger: Logger) {
|
||||
|
||||
logger.info('Uploading results');
|
||||
|
|
@ -61,7 +59,7 @@ async function uploadPayload(
|
|||
// minutes, but just waiting a little bit could maybe help.
|
||||
const backoffPeriods = [1, 5, 15];
|
||||
|
||||
const client = api.getApiClient(githubAuth, githubApiUrl);
|
||||
const client = api.getApiClient(githubAuth, githubUrl);
|
||||
|
||||
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
|
||||
const reqURL = mode === 'actions'
|
||||
|
|
@ -134,8 +132,8 @@ export async function upload(
|
|||
checkoutPath: string,
|
||||
environment: string | undefined,
|
||||
githubAuth: string,
|
||||
githubApiUrl: string,
|
||||
mode: UploadMode,
|
||||
githubUrl: string,
|
||||
mode: util.Mode,
|
||||
logger: Logger): Promise<UploadStatusReport> {
|
||||
|
||||
const sarifFiles: string[] = [];
|
||||
|
|
@ -165,7 +163,7 @@ export async function upload(
|
|||
checkoutPath,
|
||||
environment,
|
||||
githubAuth,
|
||||
githubApiUrl,
|
||||
githubUrl,
|
||||
mode,
|
||||
logger);
|
||||
}
|
||||
|
|
@ -214,8 +212,8 @@ async function uploadFiles(
|
|||
checkoutPath: string,
|
||||
environment: string | undefined,
|
||||
githubAuth: string,
|
||||
githubApiUrl: string,
|
||||
mode: UploadMode,
|
||||
githubUrl: string,
|
||||
mode: util.Mode,
|
||||
logger: Logger): Promise<UploadStatusReport> {
|
||||
|
||||
logger.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
|
|
@ -235,7 +233,7 @@ async function uploadFiles(
|
|||
}
|
||||
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload, logger);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload, checkoutPath, logger);
|
||||
|
||||
const zipped_sarif = zlib.gzipSync(sarifPayload).toString('base64');
|
||||
let checkoutURI = fileUrl(checkoutPath);
|
||||
|
|
@ -275,7 +273,7 @@ async function uploadFiles(
|
|||
logger.debug("Number of results in upload: " + numResultInSarif);
|
||||
|
||||
// Make the upload
|
||||
await uploadPayload(payload, repositoryNwo, githubAuth, githubApiUrl, mode, logger);
|
||||
await uploadPayload(payload, repositoryNwo, githubAuth, githubUrl, mode, logger);
|
||||
|
||||
return {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
|
|
|
|||
27
src/util.ts
27
src/util.ts
|
|
@ -8,26 +8,21 @@ import * as api from './api-client';
|
|||
import { Language } from './languages';
|
||||
import * as sharedEnv from './shared-environment';
|
||||
|
||||
/**
|
||||
* Are we running on actions, or not.
|
||||
*/
|
||||
export type Mode = 'actions' | 'runner';
|
||||
|
||||
/**
|
||||
* The URL for github.com.
|
||||
*/
|
||||
export const GITHUB_DOTCOM_URL = "https://github.com";
|
||||
|
||||
/**
|
||||
* The API URL for github.com.
|
||||
*/
|
||||
export const GITHUB_DOTCOM_API_URL = "https://api.github.com";
|
||||
|
||||
/**
|
||||
* Get the API URL for the GitHub instance we are connected to.
|
||||
* May be for github.com or for an enterprise instance.
|
||||
*/
|
||||
export function getInstanceAPIURL(): string {
|
||||
return process.env["GITHUB_API_URL"] || GITHUB_DOTCOM_API_URL;
|
||||
}
|
||||
|
||||
/**
|
||||
* Are we running against a GitHub Enterpise instance, as opposed to github.com.
|
||||
*/
|
||||
export function isEnterprise(): boolean {
|
||||
return getInstanceAPIURL() !== GITHUB_DOTCOM_API_URL;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an environment parameter, but throw an error if it is not set.
|
||||
*/
|
||||
|
|
@ -297,7 +292,7 @@ export async function sendStatusReport<S extends StatusReportBase>(
|
|||
statusReport: S,
|
||||
ignoreFailures?: boolean): Promise<boolean> {
|
||||
|
||||
if (isEnterprise()) {
|
||||
if (getRequiredEnvParam("GITHUB_API_URL") !== GITHUB_DOTCOM_API_URL) {
|
||||
core.debug("Not sending status report to GitHub Enterprise");
|
||||
return true;
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue