Upload CodeQL databases

This commit is contained in:
Robert 2021-06-22 13:05:12 +01:00
parent b2d10b39b0
commit 146c897909
24 changed files with 981 additions and 24 deletions

View file

@ -34,6 +34,10 @@ inputs:
category:
description: String used by Code Scanning for matching the analyses
required: false
upload-database:
description: Whether to upload the resulting CodeQL database
required: false
default: "true"
token:
default: ${{ github.token }}
matrix:

30
lib/actions-util.js generated
View file

@ -448,10 +448,6 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
return statusReport;
}
exports.createStatusReportBase = createStatusReportBase;
function isHTTPError(arg) {
var _a;
return ((_a = arg) === null || _a === void 0 ? void 0 : _a.status) !== undefined && Number.isInteger(arg.status);
}
const GENERIC_403_MSG = "The repo on which this action is running is not opted-in to CodeQL code scanning.";
const GENERIC_404_MSG = "Not authorized to used the CodeQL code scanning feature on this repo.";
const OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
@ -481,7 +477,7 @@ async function sendStatusReport(statusReport) {
}
catch (e) {
console.log(e);
if (isHTTPError(e)) {
if (util_1.isHTTPError(e)) {
switch (e.status) {
case 403:
if (workflowIsTriggeredByPushEvent() && isDependabotActor()) {
@ -540,4 +536,28 @@ function getRelativeScriptPath() {
return path.relative(actionsDirectory, __filename);
}
exports.getRelativeScriptPath = getRelativeScriptPath;
// Reads the contents of GITHUB_EVENT_PATH as a JSON object
function getWorkflowEvent() {
const eventJsonFile = util_1.getRequiredEnvParam("GITHUB_EVENT_PATH");
try {
return JSON.parse(fs.readFileSync(eventJsonFile, "utf-8"));
}
catch (e) {
throw new Error(`Unable to read workflow event JSON from ${eventJsonFile}: ${e}`);
}
}
// Is the version of the repository we are currently analyzing from the default branch,
// or alternatively from another branch or a pull request.
async function isAnalyzingDefaultBranch() {
var _a, _b;
// Get the current ref and trim and refs/heads/ prefix
let currentRef = await getRef();
currentRef = currentRef.startsWith("refs/heads/")
? currentRef.substr("refs/heads/".length)
: currentRef;
const event = getWorkflowEvent();
const defaultBranch = (_b = (_a = event) === null || _a === void 0 ? void 0 : _a.repository) === null || _b === void 0 ? void 0 : _b.default_branch;
return currentRef === defaultBranch;
}
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
//# sourceMappingURL=actions-util.js.map

File diff suppressed because one or more lines are too long

View file

@ -1,7 +1,4 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
@ -9,7 +6,12 @@ var __importStar = (this && this.__importStar) || function (mod) {
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const ava_1 = __importDefault(require("ava"));
const yaml = __importStar(require("js-yaml"));
const sinon_1 = __importDefault(require("sinon"));
@ -408,4 +410,22 @@ ava_1.default("initializeEnvironment", (t) => {
t.deepEqual(util_1.getMode(), util_1.Mode.runner);
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "4.5.6");
});
ava_1.default("isAnalyzingDefaultBranch()", async (t) => {
await util_1.withTmpDir(async (tmpDir) => {
const envFile = path.join(tmpDir, "event.json");
fs.writeFileSync(envFile, JSON.stringify({
repository: {
default_branch: "main",
},
}));
process.env["GITHUB_EVENT_PATH"] = envFile;
process.env["GITHUB_REF"] = "main";
process.env["GITHUB_SHA"] = "1234";
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
process.env["GITHUB_REF"] = "refs/heads/main";
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
process.env["GITHUB_REF"] = "feature";
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
});
});
//# sourceMappingURL=actions-util.test.js.map

File diff suppressed because one or more lines are too long

4
lib/analyze-action.js generated
View file

@ -13,7 +13,9 @@ const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const analyze_1 = require("./analyze");
const config_utils_1 = require("./config-utils");
const database_upload_1 = require("./database-upload");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
// eslint-disable-next-line import/no-commonjs
@ -66,6 +68,8 @@ async function run() {
logger.info("Not uploading results");
stats = { ...queriesStats };
}
const repositoryNwo = repository_1.parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
await database_upload_1.uploadDatabases(repositoryNwo, config, apiDetails, logger);
}
catch (error) {
core.setFailed(error.message);

View file

@ -1 +1 @@
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAKmB;AACnB,iDAAmD;AACnD,uCAA6C;AAC7C,yDAA2C;AAC3C,6CAA+B;AAE/B,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAUvC,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GACV,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,QAAQ,EACR,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI,MAAM,GAAuB,SAAS,CAAC;IAC3C,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAE3D,IAAI;QACF,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,QAAQ,EACR,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,GAAG,MAAM,wBAAS,CAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;QACtE,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SACnD,CAAC;QACF,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,YAAY,GAAG,MAAM,oBAAU,CACnC,SAAS,EACT,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,EACvD,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EACrE,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC,EACpE,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,MAAM,EACN,MAAM,CACP,CAAC;QAEF,IAAI,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,KAAK,MAAM,EAAE;YAC5D,MAAM,oBAAU,CACd,MAAM,EACN,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,IAAI,QAAQ,EACzD,MAAM,CACP,CAAC;SACH;QAED,MAAM,WAAW,GAA+B,EAAE,CAAC;QACnD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;YACvC,WAAW,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;SACtE;QACD,IAAI,CAAC,SAAS,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;QAE5C,IAAI,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EAAE;YACrD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,SAAS,EACT,MAAM,CAAC,aAAa,EACpB,UAAU,EACV,MAAM,CACP,CAAC;YACF,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;SAC7C;aAAM;YACL,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;YACrC,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,CAAC;SAC7B;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAEnB,IAAI,KAAK,YAAY,6BAAmB,EAAE;YACxC,KAAK,GAAG,EAAE,GAAG,KAAK,CAAC,mBAAmB,EAAE,CAAC;SAC1C;QAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;YAAS;QACR,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,MAAM,KAAK,SAAS,EAAE;YAC1C,IAAI,CAAC,IAAI,CAAC,iDAAiD,CAAC,CAAC;YAC7D,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,MAAM,iBAAiB,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;gBACvE,MAAM,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;gBAE1D,MAAM,YAAY,GAAG,CAAC,GAAW,EAAE,EAAE;oBACnC,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;oBAC7D,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE;wBAC3B,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;4BAClB,IAAI,CAAC,UAAU,CACb,uBAAuB,QAAQ,MAAM,KAAK,CAAC,IAAI,EAAE,CAClD,CAAC;4BACF,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAC/C,CAAC;4BACF,IAAI,CAAC,QAAQ,EAAE,CAAC;yBACjB;6BAAM,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;4BAC9B,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;yBAC7C;qBACF;gBACH,CAAC,CAAC;gBACF,YAAY,CAAC,aAAa,CAAC,CAAC;aAC7B;SACF;KACF;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,0BAA0B,KAAK,EAAE,CAAC,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAKmB;AACnB,iDAAmD;AACnD,uDAAoD;AACpD,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAE/B,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAUvC,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GACV,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,QAAQ,EACR,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI,MAAM,GAAuB,SAAS,CAAC;IAC3C,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAE3D,IAAI;QACF,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,QAAQ,EACR,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,GAAG,MAAM,wBAAS,CAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;QACtE,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SACnD,CAAC;QACF,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,YAAY,GAAG,MAAM,oBAAU,CACnC,SAAS,EACT,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,EACvD,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EACrE,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC,EACpE,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,MAAM,EACN,MAAM,CACP,CAAC;QAEF,IAAI,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,KAAK,MAAM,EAAE;YAC5D,MAAM,oBAAU,CACd,MAAM,EACN,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,IAAI,QAAQ,EACzD,MAAM,CACP,CAAC;SACH;QAED,MAAM,WAAW,GAA+B,EAAE,CAAC;QACnD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;YACvC,WAAW,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;SACtE;QACD,IAAI,CAAC,SAAS,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;QAE5C,IAAI,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EAAE;YACrD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,SAAS,EACT,MAAM,CAAC,aAAa,EACpB,UAAU,EACV,MAAM,CACP,CAAC;YACF,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;SAC7C;aAAM;YACL,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;YACrC,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,CAAC;SAC7B;QAED,MAAM,aAAa,GAAG,+BAAkB,CACtC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAC9C,CAAC;QACF,MAAM,iCAAe,CAAC,aAAa,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;KAClE;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAEnB,IAAI,KAAK,YAAY,6BAAmB,EAAE;YACxC,KAAK,GAAG,EAAE,GAAG,KAAK,CAAC,mBAAmB,EAAE,CAAC;SAC1C;QAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;YAAS;QACR,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,MAAM,KAAK,SAAS,EAAE;YAC1C,IAAI,CAAC,IAAI,CAAC,iDAAiD,CAAC,CAAC;YAC7D,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,MAAM,iBAAiB,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;gBACvE,MAAM,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;gBAE1D,MAAM,YAAY,GAAG,CAAC,GAAW,EAAE,EAAE;oBACnC,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;oBAC7D,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE;wBAC3B,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;4BAClB,IAAI,CAAC,UAAU,CACb,uBAAuB,QAAQ,MAAM,KAAK,CAAC,IAAI,EAAE,CAClD,CAAC;4BACF,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAC/C,CAAC;4BACF,IAAI,CAAC,QAAQ,EAAE,CAAC;yBACjB;6BAAM,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;4BAC9B,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;yBAC7C;qBACF;gBACH,CAAC,CAAC;gBACF,YAAY,CAAC,aAAa,CAAC,CAAC;aAC7B;SACF;KACF;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,0BAA0B,KAAK,EAAE,CAAC,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

76
lib/analyze-action.test.js generated Normal file
View file

@ -0,0 +1,76 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const sinon_1 = __importDefault(require("sinon"));
const actionsutil = __importStar(require("./actions-util"));
const analyze_action_1 = require("./analyze-action");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util_1 = require("./util");
testing_utils_1.setupTests(ava_1.default);
const testRepoName = { owner: "github", repo: "example" };
const testApiDetails = {
auth: "1234",
url: "https://example.github.com"
};
function getTestConfig(tmpDir) {
return {
languages: [languages_1.Language.javascript],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "foo",
gitHubVersion: { type: util_1.GitHubVariant.DOTCOM },
dbLocation: "foo",
packs: {}
};
}
function getRecordingLogger(messages) {
const baseLogger = logging_1.getRunnerLogger(true);
return {
debug: (message) => {
messages.push({ type: "debug", message });
baseLogger.debug(message);
},
info: (message) => {
messages.push({ type: "info", message });
baseLogger.info(message);
},
warning: (message) => {
messages.push({ type: "warning", message });
baseLogger.warning(message);
},
error: (message) => {
messages.push({ type: "error", message });
baseLogger.error(message);
},
isDebug: () => true,
startGroup: () => undefined,
endGroup: () => undefined,
};
}
ava_1.default("Abort database upload if 'upload-database' input set to false", async (t) => {
await util_1.withTmpDir(async (tmpDir) => {
const callback = sinon_1.default.stub(actionsutil, "getRequiredInput");
callback.withArgs("upload-database").resolves("false");
const loggedMessages = [];
await analyze_action_1.uploadDatabases(testRepoName, await getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" && v.message === "Database upload disabled in workflow. Skipping upload.") !== undefined);
callback.restore();
});
});
//# sourceMappingURL=analyze-action.test.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"analyze-action.test.js","sourceRoot":"","sources":["../src/analyze-action.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,kDAA0B;AAE1B,4DAA8C;AAC9C,qDAAmD;AAGnD,2CAAuC;AACvC,uCAAoD;AAEpD,mDAA6C;AAC7C,iCAAmD;AAEnD,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,MAAM,YAAY,GAAkB,EAAE,KAAK,EAAE,QAAQ,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC;AACzE,MAAM,cAAc,GAAqB;IACrC,IAAI,EAAE,MAAM;IACZ,GAAG,EAAE,4BAA4B;CACpC,CAAA;AAED,SAAS,aAAa,CAAC,MAAM;IACzB,OAAO;QACH,SAAS,EAAE,CAAC,oBAAQ,CAAC,UAAU,CAAC;QAChC,OAAO,EAAE,EAAE;QACX,WAAW,EAAE,EAAE;QACf,KAAK,EAAE,EAAE;QACT,iBAAiB,EAAE,EAAE;QACrB,OAAO,EAAE,MAAM;QACf,YAAY,EAAE,MAAM;QACpB,SAAS,EAAE,KAAK;QAChB,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE;QAC7C,UAAU,EAAE,KAAK;QACjB,KAAK,EAAE,EAAE;KACZ,CAAC;AACN,CAAC;AAOD,SAAS,kBAAkB,CAAC,QAAyB;IACjD,MAAM,UAAU,GAAG,yBAAe,CAAC,IAAI,CAAC,CAAC;IACzC,OAAO;QACH,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACvB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAA;YACzC,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACtB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAA;YACxC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QAC7B,CAAC;QACD,OAAO,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAA;YAC3C,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAChC,CAAC;QACD,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACvB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAA;YACzC,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC5B,CAAC;AACN,CAAC;AAED,aAAI,CAAC,+DAA+D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9E,MAAM,iBAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC9B,MAAM,QAAQ,GAAG,eAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QAC7D,QAAQ,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QAEvD,MAAM,cAAc,GAAG,EAAE,CAAC;QAC1B,MAAM,gCAAe,CACjB,YAAY,EACZ,MAAM,aAAa,CAAC,MAAM,CAAC,EAC3B,cAAc,EACd,kBAAkB,CAAC,cAAc,CAAC,CACrC,CAAC;QACF,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC,CAAgB,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,OAAO,IAAI,CAAC,CAAC,OAAO,KAAK,wDAAwD,CAAC,KAAK,SAAS,CAAC,CAAC;QAEhK,QAAQ,CAAC,OAAO,EAAE,CAAC;IACvB,CAAC,CAAC,CAAC;AACP,CAAC,CAAC,CAAC"}

10
lib/codeql.js generated
View file

@ -287,6 +287,7 @@ function setCodeQL(partialCodeql) {
resolveQueries: resolveFunction(partialCodeql, "resolveQueries"),
packDownload: resolveFunction(partialCodeql, "packDownload"),
databaseCleanup: resolveFunction(partialCodeql, "databaseCleanup"),
databaseBundle: resolveFunction(partialCodeql, "databaseBundle"),
databaseRunQueries: resolveFunction(partialCodeql, "databaseRunQueries"),
databaseInterpretResults: resolveFunction(partialCodeql, "databaseInterpretResults"),
};
@ -535,6 +536,15 @@ function getCodeQLForCmd(cmd) {
];
await runTool(cmd, codeqlArgs);
},
async databaseBundle(databasePath, outputFilePath) {
const args = [
"database",
"bundle",
databasePath,
`--output=${outputFilePath}`,
];
await new toolrunner.ToolRunner(cmd, args).exec();
},
};
}
function packWithVersionToString(pack) {

File diff suppressed because one or more lines are too long

71
lib/database-upload.js generated Normal file
View file

@ -0,0 +1,71 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const actionsUtil = __importStar(require("./actions-util"));
const api_client_1 = require("./api-client");
const codeql_1 = require("./codeql");
const util = __importStar(require("./util"));
async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
if (actionsUtil.getRequiredInput("upload-database") !== "true") {
logger.debug("Database upload disabled in workflow. Skipping upload.");
return;
}
// Do nothing when not running against github.com
if (config.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
logger.debug("Not running against github.com. Skipping upload.");
return;
}
if (!(await actionsUtil.isAnalyzingDefaultBranch())) {
// We only want to upload a database if we are analyzing the default branch.
logger.debug("Not analyzing default branch. Skipping upload.");
return;
}
const client = api_client_1.getApiClient(apiDetails);
try {
await client.request("GET /repos/:owner/:repo/code-scanning/databases", {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
});
}
catch (e) {
if (util.isHTTPError(e) && e.status === 404) {
logger.debug("Repository is not opted in to database uploads. Skipping upload.");
}
else {
console.log(e);
logger.info(`Skipping database upload due to unknown error: ${e}`);
}
return;
}
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
// Bundle the database up into a single zip file
const databasePath = util.getCodeQLDatabasePath(config, language);
const databaseBundlePath = `${databasePath}.zip`;
await codeql.databaseBundle(databasePath, databaseBundlePath);
// Upload the database bundle
const payload = fs.readFileSync(databaseBundlePath);
try {
await client.request(`PUT /repos/:owner/:repo/code-scanning/databases/${language}`, {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
data: payload,
});
logger.debug(`Successfully uploaded database for ${language}`);
}
catch (e) {
console.log(e);
// Log a warning but don't fail the workflow
logger.warning(`Failed to upload database for ${language}: ${e}`);
}
}
}
exports.uploadDatabases = uploadDatabases;
//# sourceMappingURL=database-upload.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAExB,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,yBAAY,CAAC,UAAU,CAAC,CAAC;IACxC,IAAI;QACF,MAAM,MAAM,CAAC,OAAO,CAAC,iDAAiD,EAAE;YACtE,KAAK,EAAE,aAAa,CAAC,KAAK;YAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;SACzB,CAAC,CAAC;KACJ;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3C,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;SACH;aAAM;YACL,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,MAAM,CAAC,IAAI,CAAC,kDAAkD,CAAC,EAAE,CAAC,CAAC;SACpE;QACD,OAAO;KACR;IAED,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,gDAAgD;QAChD,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QAClE,MAAM,kBAAkB,GAAG,GAAG,YAAY,MAAM,CAAC;QACjD,MAAM,MAAM,CAAC,cAAc,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC;QAE9D,6BAA6B;QAC7B,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAAC,kBAAkB,CAAC,CAAC;QACpD,IAAI;YACF,MAAM,MAAM,CAAC,OAAO,CAClB,mDAAmD,QAAQ,EAAE,EAC7D;gBACE,KAAK,EAAE,aAAa,CAAC,KAAK;gBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;gBACxB,IAAI,EAAE,OAAO;aACd,CACF,CAAC;YACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAlED,0CAkEC"}

226
lib/database-upload.test.js generated Normal file
View file

@ -0,0 +1,226 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const github = __importStar(require("@actions/github"));
const ava_1 = __importDefault(require("ava"));
const sinon_1 = __importDefault(require("sinon"));
const actionsUtil = __importStar(require("./actions-util"));
const apiClient = __importStar(require("./api-client"));
const codeql_1 = require("./codeql");
const database_upload_1 = require("./database-upload");
const languages_1 = require("./languages");
const testing_utils_1 = require("./testing-utils");
const util_1 = require("./util");
testing_utils_1.setupTests(ava_1.default);
ava_1.default.beforeEach(() => {
util_1.initializeEnvironment(util_1.Mode.actions, "1.2.3");
});
const testRepoName = { owner: "github", repo: "example" };
const testApiDetails = {
auth: "1234",
url: "https://github.com",
};
function getTestConfig(tmpDir) {
return {
languages: [languages_1.Language.javascript],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "foo",
gitHubVersion: { type: util_1.GitHubVariant.DOTCOM },
dbLocation: tmpDir,
packs: {},
};
}
function getRecordingLogger(messages) {
return {
debug: (message) => {
messages.push({ type: "debug", message });
console.debug(message);
},
info: (message) => {
messages.push({ type: "info", message });
console.info(message);
},
warning: (message) => {
messages.push({ type: "warning", message });
console.warn(message);
},
error: (message) => {
messages.push({ type: "error", message });
console.error(message);
},
isDebug: () => true,
startGroup: () => undefined,
endGroup: () => undefined,
};
}
function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
// Passing an auth token is required, so we just use a dummy value
const client = github.getOctokit("123");
const requestSpy = sinon_1.default.stub(client, "request");
const optInSpy = requestSpy.withArgs("GET /repos/:owner/:repo/code-scanning/databases");
if (optInStatusCode < 300) {
optInSpy.resolves(undefined);
}
else {
optInSpy.throws(new util_1.HTTPError("some error message", optInStatusCode));
}
if (databaseUploadStatusCode !== undefined) {
const databaseUploadSpy = requestSpy.withArgs("PUT /repos/:owner/:repo/code-scanning/databases/javascript");
if (databaseUploadStatusCode < 300) {
databaseUploadSpy.resolves(undefined);
}
else {
databaseUploadSpy.throws(new util_1.HTTPError("some error message", databaseUploadStatusCode));
}
}
sinon_1.default.stub(apiClient, "getApiClient").value(() => client);
}
ava_1.default("Abort database upload if 'upload-database' input set to false", async (t) => {
await util_1.withTmpDir(async (tmpDir) => {
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
sinon_1.default
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("false");
sinon_1.default.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
const loggedMessages = [];
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Database upload disabled in workflow. Skipping upload.") !== undefined);
});
});
ava_1.default("Abort database upload if running against GHES", async (t) => {
await util_1.withTmpDir(async (tmpDir) => {
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
sinon_1.default
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon_1.default.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
const config = getTestConfig(tmpDir);
config.gitHubVersion = { type: util_1.GitHubVariant.GHES, version: "3.0" };
const loggedMessages = [];
await database_upload_1.uploadDatabases(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Not running against github.com. Skipping upload.") !== undefined);
});
});
ava_1.default("Abort database upload if running against GHAE", async (t) => {
await util_1.withTmpDir(async (tmpDir) => {
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
sinon_1.default
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon_1.default.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
const config = getTestConfig(tmpDir);
config.gitHubVersion = { type: util_1.GitHubVariant.GHAE };
const loggedMessages = [];
await database_upload_1.uploadDatabases(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Not running against github.com. Skipping upload.") !== undefined);
});
});
ava_1.default("Abort database upload if not analyzing default branch", async (t) => {
await util_1.withTmpDir(async (tmpDir) => {
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
sinon_1.default
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon_1.default.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
const loggedMessages = [];
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Not analyzing default branch. Skipping upload.") !== undefined);
});
});
ava_1.default("Abort database upload if opt-in request returns 404", async (t) => {
await util_1.withTmpDir(async (tmpDir) => {
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
sinon_1.default
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon_1.default.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(404);
const loggedMessages = [];
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message ===
"Repository is not opted in to database uploads. Skipping upload.") !== undefined);
});
});
ava_1.default("Abort database upload if opt-in request fails with something other than 404", async (t) => {
await util_1.withTmpDir(async (tmpDir) => {
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
sinon_1.default
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon_1.default.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(500);
const loggedMessages = [];
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "info" &&
v.message ===
"Skipping database upload due to unknown error: Error: some error message") !== undefined);
});
});
ava_1.default("Don't crash if uploading a database fails", async (t) => {
await util_1.withTmpDir(async (tmpDir) => {
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
sinon_1.default
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon_1.default.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(204, 500);
codeql_1.setCodeQL({
async databaseBundle(_, outputFilePath) {
fs.writeFileSync(outputFilePath, "");
},
});
const loggedMessages = [];
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "warning" &&
v.message ===
"Failed to upload database for javascript: Error: some error message") !== undefined);
});
});
ava_1.default("Successfully uploading a database", async (t) => {
await util_1.withTmpDir(async (tmpDir) => {
testing_utils_1.setupActionsVars(tmpDir, tmpDir);
sinon_1.default
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon_1.default.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(204, 201);
codeql_1.setCodeQL({
async databaseBundle(_, outputFilePath) {
fs.writeFileSync(outputFilePath, "");
},
});
const loggedMessages = [];
await database_upload_1.uploadDatabases(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
t.assert(loggedMessages.find((v) => v.type === "debug" &&
v.message === "Successfully uploaded database for javascript") !== undefined);
});
});
//# sourceMappingURL=database-upload.test.js.map

File diff suppressed because one or more lines are too long

12
lib/util.js generated
View file

@ -408,4 +408,16 @@ function getRequiredEnvParam(paramName) {
return value;
}
exports.getRequiredEnvParam = getRequiredEnvParam;
class HTTPError extends Error {
constructor(message, status) {
super(message);
this.status = status;
}
}
exports.HTTPError = HTTPError;
function isHTTPError(arg) {
var _a;
return ((_a = arg) === null || _a === void 0 ? void 0 : _a.status) !== undefined && Number.isInteger(arg.status);
}
exports.isHTTPError = isHTTPError;
//# sourceMappingURL=util.js.map

File diff suppressed because one or more lines are too long

View file

@ -1,10 +1,13 @@
import * as fs from "fs";
import * as path from "path";
import test from "ava";
import * as yaml from "js-yaml";
import sinon from "sinon";
import * as actionsutil from "./actions-util";
import { setupTests } from "./testing-utils";
import { getMode, initializeEnvironment, Mode } from "./util";
import { getMode, initializeEnvironment, Mode, withTmpDir } from "./util";
function errorCodes(
actual: actionsutil.CodedError[],
@ -652,3 +655,28 @@ test("initializeEnvironment", (t) => {
t.deepEqual(getMode(), Mode.runner);
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "4.5.6");
});
test("isAnalyzingDefaultBranch()", async (t) => {
await withTmpDir(async (tmpDir) => {
const envFile = path.join(tmpDir, "event.json");
fs.writeFileSync(
envFile,
JSON.stringify({
repository: {
default_branch: "main",
},
})
);
process.env["GITHUB_EVENT_PATH"] = envFile;
process.env["GITHUB_REF"] = "main";
process.env["GITHUB_SHA"] = "1234";
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
process.env["GITHUB_REF"] = "refs/heads/main";
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
process.env["GITHUB_REF"] = "feature";
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
});
});

View file

@ -8,7 +8,7 @@ import * as yaml from "js-yaml";
import * as api from "./api-client";
import * as sharedEnv from "./shared-environment";
import { getRequiredEnvParam, GITHUB_DOTCOM_URL } from "./util";
import { getRequiredEnvParam, GITHUB_DOTCOM_URL, isHTTPError } from "./util";
/**
* The utils in this module are meant to be run inside of the action only.
@ -576,15 +576,6 @@ export async function createStatusReportBase(
return statusReport;
}
interface HTTPError {
status: number;
message?: string;
}
function isHTTPError(arg: any): arg is HTTPError {
return arg?.status !== undefined && Number.isInteger(arg.status);
}
const GENERIC_403_MSG =
"The repo on which this action is running is not opted-in to CodeQL code scanning.";
const GENERIC_404_MSG =
@ -691,3 +682,30 @@ export function getRelativeScriptPath(): string {
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
return path.relative(actionsDirectory, __filename);
}
// Reads the contents of GITHUB_EVENT_PATH as a JSON object
function getWorkflowEvent(): any {
const eventJsonFile = getRequiredEnvParam("GITHUB_EVENT_PATH");
try {
return JSON.parse(fs.readFileSync(eventJsonFile, "utf-8"));
} catch (e) {
throw new Error(
`Unable to read workflow event JSON from ${eventJsonFile}: ${e}`
);
}
}
// Is the version of the repository we are currently analyzing from the default branch,
// or alternatively from another branch or a pull request.
export async function isAnalyzingDefaultBranch(): Promise<boolean> {
// Get the current ref and trim and refs/heads/ prefix
let currentRef = await getRef();
currentRef = currentRef.startsWith("refs/heads/")
? currentRef.substr("refs/heads/".length)
: currentRef;
const event = getWorkflowEvent();
const defaultBranch = event?.repository?.default_branch;
return currentRef === defaultBranch;
}

View file

@ -11,7 +11,9 @@ import {
runCleanup,
} from "./analyze";
import { Config, getConfig } from "./config-utils";
import { uploadDatabases } from "./database-upload";
import { getActionsLogger } from "./logging";
import { parseRepositoryNwo } from "./repository";
import * as upload_lib from "./upload-lib";
import * as util from "./util";
@ -116,6 +118,11 @@ async function run() {
logger.info("Not uploading results");
stats = { ...queriesStats };
}
const repositoryNwo = parseRepositoryNwo(
util.getRequiredEnvParam("GITHUB_REPOSITORY")
);
await uploadDatabases(repositoryNwo, config, apiDetails, logger);
} catch (error) {
core.setFailed(error.message);
console.log(error);

View file

@ -99,6 +99,10 @@ export interface CodeQL {
* Run 'codeql database cleanup'.
*/
databaseCleanup(databasePath: string, cleanupLevel: string): Promise<void>;
/**
* Run 'codeql database bundle'.
*/
databaseBundle(databasePath: string, outputFilePath: string): Promise<void>;
/**
* Run 'codeql database run-queries'.
*/
@ -512,6 +516,7 @@ export function setCodeQL(partialCodeql: Partial<CodeQL>): CodeQL {
resolveQueries: resolveFunction(partialCodeql, "resolveQueries"),
packDownload: resolveFunction(partialCodeql, "packDownload"),
databaseCleanup: resolveFunction(partialCodeql, "databaseCleanup"),
databaseBundle: resolveFunction(partialCodeql, "databaseBundle"),
databaseRunQueries: resolveFunction(partialCodeql, "databaseRunQueries"),
databaseInterpretResults: resolveFunction(
partialCodeql,
@ -829,6 +834,18 @@ function getCodeQLForCmd(cmd: string): CodeQL {
];
await runTool(cmd, codeqlArgs);
},
async databaseBundle(
databasePath: string,
outputFilePath: string
): Promise<void> {
const args = [
"database",
"bundle",
databasePath,
`--output=${outputFilePath}`,
];
await new toolrunner.ToolRunner(cmd, args).exec();
},
};
}

351
src/database-upload.test.ts Normal file
View file

@ -0,0 +1,351 @@
import * as fs from "fs";
import * as github from "@actions/github";
import test from "ava";
import sinon from "sinon";
import * as actionsUtil from "./actions-util";
import { GitHubApiDetails } from "./api-client";
import * as apiClient from "./api-client";
import { setCodeQL } from "./codeql";
import { Config } from "./config-utils";
import { uploadDatabases } from "./database-upload";
import { Language } from "./languages";
import { Logger } from "./logging";
import { RepositoryNwo } from "./repository";
import { setupActionsVars, setupTests } from "./testing-utils";
import {
GitHubVariant,
HTTPError,
initializeEnvironment,
Mode,
withTmpDir,
} from "./util";
setupTests(test);
test.beforeEach(() => {
initializeEnvironment(Mode.actions, "1.2.3");
});
const testRepoName: RepositoryNwo = { owner: "github", repo: "example" };
const testApiDetails: GitHubApiDetails = {
auth: "1234",
url: "https://github.com",
};
function getTestConfig(tmpDir: string): Config {
return {
languages: [Language.javascript],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "foo",
gitHubVersion: { type: GitHubVariant.DOTCOM },
dbLocation: tmpDir,
packs: {},
};
}
interface LoggedMessage {
type: "debug" | "info" | "warning" | "error";
message: string;
}
function getRecordingLogger(messages: LoggedMessage[]): Logger {
return {
debug: (message: string) => {
messages.push({ type: "debug", message });
console.debug(message);
},
info: (message: string) => {
messages.push({ type: "info", message });
console.info(message);
},
warning: (message: string) => {
messages.push({ type: "warning", message });
console.warn(message);
},
error: (message: string) => {
messages.push({ type: "error", message });
console.error(message);
},
isDebug: () => true,
startGroup: () => undefined,
endGroup: () => undefined,
};
}
function mockHttpRequests(
optInStatusCode: number,
databaseUploadStatusCode?: number
) {
// Passing an auth token is required, so we just use a dummy value
const client = github.getOctokit("123");
const requestSpy = sinon.stub(client, "request");
const optInSpy = requestSpy.withArgs(
"GET /repos/:owner/:repo/code-scanning/databases"
);
if (optInStatusCode < 300) {
optInSpy.resolves(undefined);
} else {
optInSpy.throws(new HTTPError("some error message", optInStatusCode));
}
if (databaseUploadStatusCode !== undefined) {
const databaseUploadSpy = requestSpy.withArgs(
"PUT /repos/:owner/:repo/code-scanning/databases/javascript"
);
if (databaseUploadStatusCode < 300) {
databaseUploadSpy.resolves(undefined);
} else {
databaseUploadSpy.throws(
new HTTPError("some error message", databaseUploadStatusCode)
);
}
}
sinon.stub(apiClient, "getApiClient").value(() => client);
}
test("Abort database upload if 'upload-database' input set to false", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("false");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
const loggedMessages = [];
await uploadDatabases(
testRepoName,
getTestConfig(tmpDir),
testApiDetails,
getRecordingLogger(loggedMessages)
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message === "Database upload disabled in workflow. Skipping upload."
) !== undefined
);
});
});
test("Abort database upload if running against GHES", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
const config = getTestConfig(tmpDir);
config.gitHubVersion = { type: GitHubVariant.GHES, version: "3.0" };
const loggedMessages = [];
await uploadDatabases(
testRepoName,
config,
testApiDetails,
getRecordingLogger(loggedMessages)
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message === "Not running against github.com. Skipping upload."
) !== undefined
);
});
});
test("Abort database upload if running against GHAE", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
const config = getTestConfig(tmpDir);
config.gitHubVersion = { type: GitHubVariant.GHAE };
const loggedMessages = [];
await uploadDatabases(
testRepoName,
config,
testApiDetails,
getRecordingLogger(loggedMessages)
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message === "Not running against github.com. Skipping upload."
) !== undefined
);
});
});
test("Abort database upload if not analyzing default branch", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
const loggedMessages = [];
await uploadDatabases(
testRepoName,
getTestConfig(tmpDir),
testApiDetails,
getRecordingLogger(loggedMessages)
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message === "Not analyzing default branch. Skipping upload."
) !== undefined
);
});
});
test("Abort database upload if opt-in request returns 404", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(404);
const loggedMessages = [];
await uploadDatabases(
testRepoName,
getTestConfig(tmpDir),
testApiDetails,
getRecordingLogger(loggedMessages)
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message ===
"Repository is not opted in to database uploads. Skipping upload."
) !== undefined
);
});
});
test("Abort database upload if opt-in request fails with something other than 404", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(500);
const loggedMessages = [] as LoggedMessage[];
await uploadDatabases(
testRepoName,
getTestConfig(tmpDir),
testApiDetails,
getRecordingLogger(loggedMessages)
);
t.assert(
loggedMessages.find(
(v) =>
v.type === "info" &&
v.message ===
"Skipping database upload due to unknown error: Error: some error message"
) !== undefined
);
});
});
test("Don't crash if uploading a database fails", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(204, 500);
setCodeQL({
async databaseBundle(_: string, outputFilePath: string) {
fs.writeFileSync(outputFilePath, "");
},
});
const loggedMessages = [] as LoggedMessage[];
await uploadDatabases(
testRepoName,
getTestConfig(tmpDir),
testApiDetails,
getRecordingLogger(loggedMessages)
);
t.assert(
loggedMessages.find(
(v) =>
v.type === "warning" &&
v.message ===
"Failed to upload database for javascript: Error: some error message"
) !== undefined
);
});
});
test("Successfully uploading a database", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
mockHttpRequests(204, 201);
setCodeQL({
async databaseBundle(_: string, outputFilePath: string) {
fs.writeFileSync(outputFilePath, "");
},
});
const loggedMessages = [] as LoggedMessage[];
await uploadDatabases(
testRepoName,
getTestConfig(tmpDir),
testApiDetails,
getRecordingLogger(loggedMessages)
);
t.assert(
loggedMessages.find(
(v) =>
v.type === "debug" &&
v.message === "Successfully uploaded database for javascript"
) !== undefined
);
});
});

77
src/database-upload.ts Normal file
View file

@ -0,0 +1,77 @@
import * as fs from "fs";
import * as actionsUtil from "./actions-util";
import { getApiClient, GitHubApiDetails } from "./api-client";
import { getCodeQL } from "./codeql";
import { Config } from "./config-utils";
import { Logger } from "./logging";
import { RepositoryNwo } from "./repository";
import * as util from "./util";
export async function uploadDatabases(
repositoryNwo: RepositoryNwo,
config: Config,
apiDetails: GitHubApiDetails,
logger: Logger
): Promise<void> {
if (actionsUtil.getRequiredInput("upload-database") !== "true") {
logger.debug("Database upload disabled in workflow. Skipping upload.");
return;
}
// Do nothing when not running against github.com
if (config.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
logger.debug("Not running against github.com. Skipping upload.");
return;
}
if (!(await actionsUtil.isAnalyzingDefaultBranch())) {
// We only want to upload a database if we are analyzing the default branch.
logger.debug("Not analyzing default branch. Skipping upload.");
return;
}
const client = getApiClient(apiDetails);
try {
await client.request("GET /repos/:owner/:repo/code-scanning/databases", {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
});
} catch (e) {
if (util.isHTTPError(e) && e.status === 404) {
logger.debug(
"Repository is not opted in to database uploads. Skipping upload."
);
} else {
console.log(e);
logger.info(`Skipping database upload due to unknown error: ${e}`);
}
return;
}
const codeql = getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
// Bundle the database up into a single zip file
const databasePath = util.getCodeQLDatabasePath(config, language);
const databaseBundlePath = `${databasePath}.zip`;
await codeql.databaseBundle(databasePath, databaseBundlePath);
// Upload the database bundle
const payload = fs.readFileSync(databaseBundlePath);
try {
await client.request(
`PUT /repos/:owner/:repo/code-scanning/databases/${language}`,
{
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
data: payload,
}
);
logger.debug(`Successfully uploaded database for ${language}`);
} catch (e) {
console.log(e);
// Log a warning but don't fail the workflow
logger.warning(`Failed to upload database for ${language}: ${e}`);
}
}
}

View file

@ -478,3 +478,16 @@ export function getRequiredEnvParam(paramName: string): string {
}
return value;
}
export class HTTPError extends Error {
public status: number;
constructor(message: string, status: number) {
super(message);
this.status = status;
}
}
export function isHTTPError(arg: any): arg is HTTPError {
return arg?.status !== undefined && Number.isInteger(arg.status);
}