Merge remote-tracking branch 'upstream/main' into issue-1589-config-param
This commit is contained in:
commit
824d18c689
263 changed files with 1010 additions and 30486 deletions
10
lib/actions-util.js
generated
10
lib/actions-util.js
generated
|
|
@ -163,7 +163,7 @@ async function getAnalysisKey() {
|
|||
if (analysisKey !== undefined) {
|
||||
return analysisKey;
|
||||
}
|
||||
const workflowPath = await (0, workflow_1.getWorkflowPath)();
|
||||
const workflowPath = await (0, workflow_1.getWorkflowRelativePath)();
|
||||
const jobName = (0, util_1.getRequiredEnvParam)("GITHUB_JOB");
|
||||
analysisKey = `${workflowPath}:${jobName}`;
|
||||
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
||||
|
|
@ -290,11 +290,8 @@ exports.getActionVersion = getActionVersion;
|
|||
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
|
||||
const commitOid = (0, exports.getOptionalInput)("sha") || process.env["GITHUB_SHA"] || "";
|
||||
const ref = await getRef();
|
||||
const workflowRunIDStr = process.env["GITHUB_RUN_ID"];
|
||||
let workflowRunID = -1;
|
||||
if (workflowRunIDStr) {
|
||||
workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
}
|
||||
const workflowRunID = (0, workflow_1.getWorkflowRunID)();
|
||||
const workflowRunAttempt = (0, workflow_1.getWorkflowRunAttempt)();
|
||||
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
|
||||
const jobName = process.env["GITHUB_JOB"] || "";
|
||||
const analysis_key = await getAnalysisKey();
|
||||
|
|
@ -314,6 +311,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
|||
}
|
||||
const statusReport = {
|
||||
workflow_run_id: workflowRunID,
|
||||
workflow_run_attempt: workflowRunAttempt,
|
||||
workflow_name: workflowName,
|
||||
job_name: jobName,
|
||||
analysis_key,
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
1
lib/analyze-action.js
generated
1
lib/analyze-action.js
generated
|
|
@ -155,7 +155,6 @@ async function run() {
|
|||
if (hasBadExpectErrorInput()) {
|
||||
throw new Error("`expect-error` input parameter is for internal use only. It should only be set by codeql-action or a fork.");
|
||||
}
|
||||
await (0, codeql_1.enrichEnvironment)(await (0, codeql_1.getCodeQL)(config.codeQLCmd));
|
||||
const apiDetails = (0, api_client_1.getApiDetails)();
|
||||
const outputDir = actionsUtil.getRequiredInput("output");
|
||||
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
12
lib/analyze.js
generated
12
lib/analyze.js
generated
|
|
@ -37,7 +37,6 @@ const analysisPaths = __importStar(require("./analysis-paths"));
|
|||
const codeql_1 = require("./codeql");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const languages_1 = require("./languages");
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const tracer_config_1 = require("./tracer-config");
|
||||
const util = __importStar(require("./util"));
|
||||
class CodeQLAnalysisError extends Error {
|
||||
|
|
@ -283,20 +282,13 @@ async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger) {
|
|||
}
|
||||
await fs.promises.mkdir(outputDir, { recursive: true });
|
||||
const timings = await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
// WARNING: This does not _really_ end tracing, as the tracer will restore its
|
||||
// critical environment variables and it'll still be active for all processes
|
||||
// launched from this build step.
|
||||
// However, it will stop tracing for all steps past the codeql-action/analyze
|
||||
// step.
|
||||
if (await util.codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
// Delete variables as specified by the end-tracing script
|
||||
await (0, tracer_config_1.endTracingForCluster)(config);
|
||||
}
|
||||
else {
|
||||
// Delete the tracer config env var to avoid tracing ourselves
|
||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||
}
|
||||
// Delete variables as specified by the end-tracing script
|
||||
await (0, tracer_config_1.endTracingForCluster)(config);
|
||||
return timings;
|
||||
}
|
||||
exports.runFinalize = runFinalize;
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
108
lib/codeql.js
generated
108
lib/codeql.js
generated
|
|
@ -23,10 +23,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.enrichEnvironment = exports.getExtraOptions = exports.getCodeQLForCmd = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.setupCodeQL = exports.CODEQL_VERSION_INIT_WITH_QLCONFIG = exports.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CommandInvocationError = void 0;
|
||||
exports.getExtraOptions = exports.getCodeQLForCmd = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.setupCodeQL = exports.CODEQL_VERSION_INIT_WITH_QLCONFIG = exports.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CommandInvocationError = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
|
|
@ -35,7 +34,6 @@ const error_matcher_1 = require("./error-matcher");
|
|||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const setupCodeql = __importStar(require("./setup-codeql"));
|
||||
const shared_environment_1 = require("./shared-environment");
|
||||
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||
const trap_caching_1 = require("./trap-caching");
|
||||
const util = __importStar(require("./util"));
|
||||
|
|
@ -62,7 +60,7 @@ let cachedCodeQL = undefined;
|
|||
* The version flags below can be used to conditionally enable certain features
|
||||
* on versions newer than this.
|
||||
*/
|
||||
const CODEQL_MINIMUM_VERSION = "2.6.3";
|
||||
const CODEQL_MINIMUM_VERSION = "2.8.5";
|
||||
/**
|
||||
* Versions of CodeQL that version-flag certain functionality in the Action.
|
||||
* For convenience, please keep these in descending order. Once a version
|
||||
|
|
@ -73,21 +71,6 @@ const CODEQL_VERSION_LUA_TRACER_CONFIG = "2.10.0";
|
|||
const CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED = "2.10.4";
|
||||
exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = "2.10.4";
|
||||
const CODEQL_VERSION_FILE_BASELINE_INFORMATION = "2.11.3";
|
||||
/**
|
||||
* This variable controls using the new style of tracing from the CodeQL
|
||||
* CLI. In particular, with versions above this we will use both indirect
|
||||
* tracing, and multi-language tracing together with database clusters.
|
||||
*
|
||||
* Note that there were bugs in both of these features that were fixed in
|
||||
* release 2.7.0 of the CodeQL CLI, therefore this flag is only enabled for
|
||||
* versions above that.
|
||||
*/
|
||||
exports.CODEQL_VERSION_NEW_TRACING = "2.7.0";
|
||||
/**
|
||||
* Versions 2.7.3+ of the CodeQL CLI support build tracing with glibc 2.34 on Linux. Versions before
|
||||
* this cannot perform build tracing when running on the Actions `ubuntu-22.04` runner image.
|
||||
*/
|
||||
exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = "2.7.3";
|
||||
/**
|
||||
* Versions 2.9.0+ of the CodeQL CLI run machine learning models from a temporary directory, which
|
||||
* resolves an issue on Windows where TensorFlow models are not correctly loaded due to the path of
|
||||
|
|
@ -139,8 +122,7 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
|
|||
};
|
||||
}
|
||||
catch (e) {
|
||||
logger.error((0, util_1.wrapError)(e).message);
|
||||
throw new Error("Unable to download and extract CodeQL CLI");
|
||||
throw new Error(`Unable to download and extract CodeQL CLI: ${(0, util_1.wrapError)(e).message}`);
|
||||
}
|
||||
}
|
||||
exports.setupCodeQL = setupCodeQL;
|
||||
|
|
@ -177,8 +159,6 @@ function setCodeQL(partialCodeql) {
|
|||
getPath: resolveFunction(partialCodeql, "getPath", () => "/tmp/dummy-path"),
|
||||
getVersion: resolveFunction(partialCodeql, "getVersion", () => new Promise((resolve) => resolve("1.0.0"))),
|
||||
printVersion: resolveFunction(partialCodeql, "printVersion"),
|
||||
getTracerEnv: resolveFunction(partialCodeql, "getTracerEnv"),
|
||||
databaseInit: resolveFunction(partialCodeql, "databaseInit"),
|
||||
databaseInitCluster: resolveFunction(partialCodeql, "databaseInitCluster"),
|
||||
runAutobuild: resolveFunction(partialCodeql, "runAutobuild"),
|
||||
extractScannedLanguage: resolveFunction(partialCodeql, "extractScannedLanguage"),
|
||||
|
|
@ -245,73 +225,6 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
async printVersion() {
|
||||
await runTool(cmd, ["version", "--format=json"]);
|
||||
},
|
||||
async getTracerEnv(databasePath) {
|
||||
// Write tracer-env.js to a temp location.
|
||||
// BEWARE: The name and location of this file is recognized by `codeql database
|
||||
// trace-command` in order to enable special support for concatenable tracer
|
||||
// configurations. Consequently the name must not be changed.
|
||||
// (This warning can be removed once a different way to recognize the
|
||||
// action/runner has been implemented in `codeql database trace-command`
|
||||
// _and_ is present in the latest supported CLI release.)
|
||||
const tracerEnvJs = path.resolve(databasePath, "working", "tracer-env.js");
|
||||
fs.mkdirSync(path.dirname(tracerEnvJs), { recursive: true });
|
||||
fs.writeFileSync(tracerEnvJs, `
|
||||
const fs = require('fs');
|
||||
const env = {};
|
||||
for (let entry of Object.entries(process.env)) {
|
||||
const key = entry[0];
|
||||
const value = entry[1];
|
||||
if (typeof value !== 'undefined' && key !== '_' && !key.startsWith('JAVA_MAIN_CLASS_')) {
|
||||
env[key] = value;
|
||||
}
|
||||
}
|
||||
process.stdout.write(process.argv[2]);
|
||||
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');`);
|
||||
// BEWARE: The name and location of this file is recognized by `codeql database
|
||||
// trace-command` in order to enable special support for concatenable tracer
|
||||
// configurations. Consequently the name must not be changed.
|
||||
// (This warning can be removed once a different way to recognize the
|
||||
// action/runner has been implemented in `codeql database trace-command`
|
||||
// _and_ is present in the latest supported CLI release.)
|
||||
const envFile = path.resolve(databasePath, "working", "env.tmp");
|
||||
try {
|
||||
await runTool(cmd, [
|
||||
"database",
|
||||
"trace-command",
|
||||
databasePath,
|
||||
...getExtraOptionsFromEnv(["database", "trace-command"]),
|
||||
process.execPath,
|
||||
tracerEnvJs,
|
||||
envFile,
|
||||
]);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof CommandInvocationError &&
|
||||
e.output.includes("undefined symbol: __libc_dlopen_mode, version GLIBC_PRIVATE") &&
|
||||
process.platform === "linux" &&
|
||||
!(await util.codeQlVersionAbove(this, exports.CODEQL_VERSION_TRACING_GLIBC_2_34))) {
|
||||
throw new util.UserError("The CodeQL CLI is incompatible with the version of glibc on your system. " +
|
||||
`Please upgrade to CodeQL CLI version ${exports.CODEQL_VERSION_TRACING_GLIBC_2_34} or ` +
|
||||
"later. If you cannot upgrade to a newer version of the CodeQL CLI, you can " +
|
||||
`alternatively run your workflow on another runner image such as "ubuntu-20.04" ` +
|
||||
"that has glibc 2.33 or earlier installed.");
|
||||
}
|
||||
else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
return JSON.parse(fs.readFileSync(envFile, "utf-8"));
|
||||
},
|
||||
async databaseInit(databasePath, language, sourceRoot) {
|
||||
await runTool(cmd, [
|
||||
"database",
|
||||
"init",
|
||||
databasePath,
|
||||
`--language=${language}`,
|
||||
`--source-root=${sourceRoot}`,
|
||||
...getExtraOptionsFromEnv(["database", "init"]),
|
||||
]);
|
||||
},
|
||||
async databaseInitCluster(config, sourceRoot, processName, features, qlconfigFile, logger) {
|
||||
const extraArgs = config.languages.map((language) => `--language=${language}`);
|
||||
if (config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l)).length > 0) {
|
||||
|
|
@ -853,19 +766,4 @@ async function getCodeScanningConfigExportArguments(config, codeql, features) {
|
|||
}
|
||||
return [];
|
||||
}
|
||||
/**
|
||||
* Enrich the environment variables with further flags that we cannot
|
||||
* know the value of until we know what version of CodeQL we're running.
|
||||
*/
|
||||
async function enrichEnvironment(codeql) {
|
||||
if (await util.codeQlVersionAbove(codeql, exports.CODEQL_VERSION_NEW_TRACING)) {
|
||||
core.exportVariable(shared_environment_1.EnvVar.FEATURE_MULTI_LANGUAGE, "false");
|
||||
core.exportVariable(shared_environment_1.EnvVar.FEATURE_SANDWICH, "false");
|
||||
}
|
||||
else {
|
||||
core.exportVariable(shared_environment_1.EnvVar.FEATURE_MULTI_LANGUAGE, "true");
|
||||
core.exportVariable(shared_environment_1.EnvVar.FEATURE_SANDWICH, "true");
|
||||
}
|
||||
}
|
||||
exports.enrichEnvironment = enrichEnvironment;
|
||||
//# sourceMappingURL=codeql.js.map
|
||||
File diff suppressed because one or more lines are too long
18
lib/debug-artifacts.js
generated
18
lib/debug-artifacts.js
generated
|
|
@ -74,7 +74,6 @@ async function uploadSarifDebugArtifact(config, outputDir) {
|
|||
}
|
||||
exports.uploadSarifDebugArtifact = uploadSarifDebugArtifact;
|
||||
async function uploadLogsDebugArtifact(config) {
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
let toUpload = [];
|
||||
for (const language of config.languages) {
|
||||
const databaseDirectory = (0, util_1.getCodeQLDatabasePath)(config, language);
|
||||
|
|
@ -83,21 +82,12 @@ async function uploadLogsDebugArtifact(config) {
|
|||
toUpload = toUpload.concat((0, util_1.listFolder)(logsDirectory));
|
||||
}
|
||||
}
|
||||
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
// Multilanguage tracing: there are additional logs in the root of the cluster
|
||||
const multiLanguageTracingLogsDirectory = path.resolve(config.dbLocation, "log");
|
||||
if ((0, util_1.doesDirectoryExist)(multiLanguageTracingLogsDirectory)) {
|
||||
toUpload = toUpload.concat((0, util_1.listFolder)(multiLanguageTracingLogsDirectory));
|
||||
}
|
||||
// Multilanguage tracing: there are additional logs in the root of the cluster
|
||||
const multiLanguageTracingLogsDirectory = path.resolve(config.dbLocation, "log");
|
||||
if ((0, util_1.doesDirectoryExist)(multiLanguageTracingLogsDirectory)) {
|
||||
toUpload = toUpload.concat((0, util_1.listFolder)(multiLanguageTracingLogsDirectory));
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
|
||||
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
|
||||
if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
|
||||
const compoundBuildTracerLogDirectory = path.resolve(config.tempDir, "compound-build-tracer.log");
|
||||
if ((0, util_1.doesDirectoryExist)(compoundBuildTracerLogDirectory)) {
|
||||
await uploadDebugArtifacts([compoundBuildTracerLogDirectory], config.tempDir, config.debugArtifactName);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.uploadLogsDebugArtifact = uploadLogsDebugArtifact;
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"debug-artifacts.js","sourceRoot":"","sources":["../src/debug-artifacts.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,4DAA8C;AAC9C,oDAAsC;AACtC,sDAA6B;AAC7B,8CAAsB;AAEtB,iDAAkD;AAClD,uCAA0C;AAC1C,qCAAiE;AAIjE,iCAMgB;AAEhB,SAAgB,mBAAmB,CAAC,IAAY;IAC9C,OAAO,IAAI,CAAC,OAAO,CAAC,oBAAoB,EAAE,EAAE,CAAC,CAAC;AAChD,CAAC;AAFD,kDAEC;AAEM,KAAK,UAAU,oBAAoB,CACxC,QAAkB,EAClB,OAAe,EACf,YAAoB;IAEpB,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE;QACzB,OAAO;KACR;IACD,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,QAAQ,CAAC,CAAC;IAC1C,IAAI,MAAM,EAAE;QACV,IAAI;YACF,KAAK,MAAM,CAAC,EAAE,SAAS,CAAC,IAAI,MAAM,CAAC,OAAO,CACxC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAY,CAC9B,CAAC,IAAI,EAAE;gBACN,MAAM,IAAI,IAAI,SAAS,EAAE,CAAC;SAC7B;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,IAAI,CACP,+HAA+H,CAChI,CAAC;SACH;KACF;IACD,MAAM,QAAQ,CAAC,MAAM,EAAE,CAAC,cAAc,CACpC,mBAAmB,CAAC,GAAG,YAAY,GAAG,MAAM,EAAE,CAAC,EAC/C,QAAQ,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,EAC5C,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CACxB,CAAC;AACJ,CAAC;AA3BD,oDA2BC;AAEM,KAAK,UAAU,wBAAwB,CAC5C,MAAc,EACd,SAAiB;IAEjB,IAAI,CAAC,IAAA,yBAAkB,EAAC,SAAS,CAAC,EAAE;QAClC,OAAO;KACR;IAED,IAAI,QAAQ,GAAa,EAAE,CAAC;IAC5B,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,SAAS,EAAE;QACnC,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,GAAG,IAAI,QAAQ,CAAC,CAAC;QAC3D,IAAI,EAAE,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YAC5B,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;SACvC;KACF;IACD,MAAM,oBAAoB,CAAC,QAAQ,EAAE,SAAS,EAAE,MAAM,CAAC,iBAAiB,CAAC,CAAC;AAC5E,CAAC;AAhBD,4DAgBC;AAEM,KAAK,UAAU,uBAAuB,CAAC,MAAc;IAC1D,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAEjD,IAAI,QAAQ,GAAa,EAAE,CAAC;IAC5B,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,iBAAiB,GAAG,IAAA,4BAAqB,EAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QAClE,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;QAC7D,IAAI,IAAA,yBAAkB,EAAC,aAAa,CAAC,EAAE;YACrC,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,IAAA,iBAAU,EAAC,aAAa,CAAC,CAAC,CAAC;SACvD;KACF;IAED,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;QAChE,8EAA8E;QAC9E,MAAM,iCAAiC,GAAG,IAAI,CAAC,OAAO,CACpD,MAAM,CAAC,UAAU,EACjB,KAAK,CACN,CAAC;QACF,IAAI,IAAA,yBAAkB,EAAC,iCAAiC,CAAC,EAAE;YACzD,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,IAAA,iBAAU,EAAC,iCAAiC,CAAC,CAAC,CAAC;SAC3E;KACF;IACD,MAAM,oBAAoB,CACxB,QAAQ,EACR,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,iBAAiB,CACzB,CAAC;IAEF,sFAAsF;IACtF,IAAI,CAAC,CAAC,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,CAAC,EAAE;QACnE,MAAM,+BAA+B,GAAG,IAAI,CAAC,OAAO,CAClD,MAAM,CAAC,OAAO,EACd,2BAA2B,CAC5B,CAAC;QACF,IAAI,IAAA,yBAAkB,EAAC,+BAA+B,CAAC,EAAE;YACvD,MAAM,oBAAoB,CACxB,CAAC,+BAA+B,CAAC,EACjC,MAAM,CAAC,OAAO,EACd,MAAM,CAAC,iBAAiB,CACzB,CAAC;SACH;KACF;AACH,CAAC;AA1CD,0DA0CC;AAED;;;;GAIG;AACH,KAAK,UAAU,2BAA2B,CACxC,MAAc,EACd,QAAkB;IAElB,MAAM,YAAY,GAAG,IAAA,4BAAqB,EAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IAC7D,MAAM,kBAAkB,GAAG,IAAI,CAAC,OAAO,CACrC,MAAM,CAAC,UAAU,EACjB,GAAG,MAAM,CAAC,iBAAiB,IAAI,QAAQ,cAAc,CACtD,CAAC;IACF,IAAI,CAAC,IAAI,CACP,GAAG,MAAM,CAAC,iBAAiB,IAAI,QAAQ,2DAA2D,kBAAkB,KAAK,CAC1H,CAAC;IACF,qEAAqE;IACrE,IAAI,EAAE,CAAC,UAAU,CAAC,kBAAkB,CAAC,EAAE;QACrC,MAAM,IAAA,aAAG,EAAC,kBAAkB,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;KAChD;IACD,MAAM,GAAG,GAAG,IAAI,iBAAM,EAAE,CAAC;IACzB,GAAG,CAAC,cAAc,CAAC,YAAY,CAAC,CAAC;IACjC,GAAG,CAAC,QAAQ,CAAC,kBAAkB,CAAC,CAAC;IACjC,OAAO,kBAAkB,CAAC;AAC5B,CAAC;AAED;;GAEG;AACH,KAAK,UAAU,uBAAuB,CACpC,MAAc,EACd,QAAkB;IAElB,kDAAkD;IAClD,MAAM,kBAAkB,GAAG,MAAM,IAAA,eAAQ,EACvC,MAAM,EACN,QAAQ,EACR,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,EACjC,GAAG,MAAM,CAAC,iBAAiB,IAAI,QAAQ,EAAE,CAC1C,CAAC;IACF,OAAO,kBAAkB,CAAC;AAC5B,CAAC;AAEM,KAAK,UAAU,iCAAiC,CACrD,MAAc,EACd,MAAc;IAEd,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI;YACF,IAAI,kBAA0B,CAAC;YAC/B,IAAI,CAAC,IAAA,uBAAa,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,EAAE;gBAC5C,kBAAkB,GAAG,MAAM,2BAA2B,CACpD,MAAM,EACN,QAAQ,CACT,CAAC;aACH;iBAAM;gBACL,kBAAkB,GAAG,MAAM,uBAAuB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;aACtE;YACD,MAAM,oBAAoB,CACxB,CAAC,kBAAkB,CAAC,EACpB,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,iBAAiB,CACzB,CAAC;SACH;QAAC,OAAO,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CACP,8CAA8C,MAAM,CAAC,iBAAiB,IAAI,QAAQ,KAAK,KAAK,EAAE,CAC/F,CAAC;SACH;KACF;AACH,CAAC;AA1BD,8EA0BC"}
|
||||
{"version":3,"file":"debug-artifacts.js","sourceRoot":"","sources":["../src/debug-artifacts.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,4DAA8C;AAC9C,oDAAsC;AACtC,sDAA6B;AAC7B,8CAAsB;AAEtB,iDAAkD;AAClD,uCAA0C;AAC1C,qCAAqC;AAIrC,iCAKgB;AAEhB,SAAgB,mBAAmB,CAAC,IAAY;IAC9C,OAAO,IAAI,CAAC,OAAO,CAAC,oBAAoB,EAAE,EAAE,CAAC,CAAC;AAChD,CAAC;AAFD,kDAEC;AAEM,KAAK,UAAU,oBAAoB,CACxC,QAAkB,EAClB,OAAe,EACf,YAAoB;IAEpB,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE;QACzB,OAAO;KACR;IACD,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,QAAQ,CAAC,CAAC;IAC1C,IAAI,MAAM,EAAE;QACV,IAAI;YACF,KAAK,MAAM,CAAC,EAAE,SAAS,CAAC,IAAI,MAAM,CAAC,OAAO,CACxC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAY,CAC9B,CAAC,IAAI,EAAE;gBACN,MAAM,IAAI,IAAI,SAAS,EAAE,CAAC;SAC7B;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,IAAI,CACP,+HAA+H,CAChI,CAAC;SACH;KACF;IACD,MAAM,QAAQ,CAAC,MAAM,EAAE,CAAC,cAAc,CACpC,mBAAmB,CAAC,GAAG,YAAY,GAAG,MAAM,EAAE,CAAC,EAC/C,QAAQ,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,EAC5C,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CACxB,CAAC;AACJ,CAAC;AA3BD,oDA2BC;AAEM,KAAK,UAAU,wBAAwB,CAC5C,MAAc,EACd,SAAiB;IAEjB,IAAI,CAAC,IAAA,yBAAkB,EAAC,SAAS,CAAC,EAAE;QAClC,OAAO;KACR;IAED,IAAI,QAAQ,GAAa,EAAE,CAAC;IAC5B,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,SAAS,EAAE;QACnC,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,GAAG,IAAI,QAAQ,CAAC,CAAC;QAC3D,IAAI,EAAE,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YAC5B,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;SACvC;KACF;IACD,MAAM,oBAAoB,CAAC,QAAQ,EAAE,SAAS,EAAE,MAAM,CAAC,iBAAiB,CAAC,CAAC;AAC5E,CAAC;AAhBD,4DAgBC;AAEM,KAAK,UAAU,uBAAuB,CAAC,MAAc;IAC1D,IAAI,QAAQ,GAAa,EAAE,CAAC;IAC5B,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,iBAAiB,GAAG,IAAA,4BAAqB,EAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QAClE,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;QAC7D,IAAI,IAAA,yBAAkB,EAAC,aAAa,CAAC,EAAE;YACrC,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,IAAA,iBAAU,EAAC,aAAa,CAAC,CAAC,CAAC;SACvD;KACF;IAED,8EAA8E;IAC9E,MAAM,iCAAiC,GAAG,IAAI,CAAC,OAAO,CACpD,MAAM,CAAC,UAAU,EACjB,KAAK,CACN,CAAC;IACF,IAAI,IAAA,yBAAkB,EAAC,iCAAiC,CAAC,EAAE;QACzD,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,IAAA,iBAAU,EAAC,iCAAiC,CAAC,CAAC,CAAC;KAC3E;IAED,MAAM,oBAAoB,CACxB,QAAQ,EACR,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,iBAAiB,CACzB,CAAC;AACJ,CAAC;AAxBD,0DAwBC;AAED;;;;GAIG;AACH,KAAK,UAAU,2BAA2B,CACxC,MAAc,EACd,QAAkB;IAElB,MAAM,YAAY,GAAG,IAAA,4BAAqB,EAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IAC7D,MAAM,kBAAkB,GAAG,IAAI,CAAC,OAAO,CACrC,MAAM,CAAC,UAAU,EACjB,GAAG,MAAM,CAAC,iBAAiB,IAAI,QAAQ,cAAc,CACtD,CAAC;IACF,IAAI,CAAC,IAAI,CACP,GAAG,MAAM,CAAC,iBAAiB,IAAI,QAAQ,2DAA2D,kBAAkB,KAAK,CAC1H,CAAC;IACF,qEAAqE;IACrE,IAAI,EAAE,CAAC,UAAU,CAAC,kBAAkB,CAAC,EAAE;QACrC,MAAM,IAAA,aAAG,EAAC,kBAAkB,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;KAChD;IACD,MAAM,GAAG,GAAG,IAAI,iBAAM,EAAE,CAAC;IACzB,GAAG,CAAC,cAAc,CAAC,YAAY,CAAC,CAAC;IACjC,GAAG,CAAC,QAAQ,CAAC,kBAAkB,CAAC,CAAC;IACjC,OAAO,kBAAkB,CAAC;AAC5B,CAAC;AAED;;GAEG;AACH,KAAK,UAAU,uBAAuB,CACpC,MAAc,EACd,QAAkB;IAElB,kDAAkD;IAClD,MAAM,kBAAkB,GAAG,MAAM,IAAA,eAAQ,EACvC,MAAM,EACN,QAAQ,EACR,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,EACjC,GAAG,MAAM,CAAC,iBAAiB,IAAI,QAAQ,EAAE,CAC1C,CAAC;IACF,OAAO,kBAAkB,CAAC;AAC5B,CAAC;AAEM,KAAK,UAAU,iCAAiC,CACrD,MAAc,EACd,MAAc;IAEd,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI;YACF,IAAI,kBAA0B,CAAC;YAC/B,IAAI,CAAC,IAAA,uBAAa,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,EAAE;gBAC5C,kBAAkB,GAAG,MAAM,2BAA2B,CACpD,MAAM,EACN,QAAQ,CACT,CAAC;aACH;iBAAM;gBACL,kBAAkB,GAAG,MAAM,uBAAuB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;aACtE;YACD,MAAM,oBAAoB,CACxB,CAAC,kBAAkB,CAAC,EACpB,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,iBAAiB,CACzB,CAAC;SACH;QAAC,OAAO,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CACP,8CAA8C,MAAM,CAAC,iBAAiB,IAAI,QAAQ,KAAK,KAAK,EAAE,CAC/F,CAAC;SACH;KACF;AACH,CAAC;AA1BD,8EA0BC"}
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"bundleVersion": "codeql-bundle-20230403",
|
||||
"cliVersion": "2.12.6",
|
||||
"priorBundleVersion": "codeql-bundle-20230317",
|
||||
"priorCliVersion": "2.12.5"
|
||||
"bundleVersion": "codeql-bundle-20230414",
|
||||
"cliVersion": "2.13.0",
|
||||
"priorBundleVersion": "codeql-bundle-20230403",
|
||||
"priorCliVersion": "2.12.6"
|
||||
}
|
||||
|
|
|
|||
2
lib/init-action-post-helper.js
generated
2
lib/init-action-post-helper.js
generated
|
|
@ -52,7 +52,7 @@ async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) {
|
|||
if (!(await features.getValue(feature_flags_1.Feature.UploadFailedSarifEnabled, codeql))) {
|
||||
return { upload_failed_run_skipped_because: "Feature disabled" };
|
||||
}
|
||||
const workflow = await (0, workflow_1.getWorkflow)();
|
||||
const workflow = await (0, workflow_1.getWorkflow)(logger);
|
||||
const jobName = (0, util_1.getRequiredEnvParam)("GITHUB_JOB");
|
||||
const matrix = (0, util_1.parseMatrixInput)(actionsUtil.getRequiredInput("matrix"));
|
||||
const shouldUpload = (0, workflow_1.getUploadInputOrThrow)(workflow, jobName, matrix);
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"init-action-post-helper.js","sourceRoot":"","sources":["../src/init-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,qCAAqC;AACrC,iDAAmD;AACnD,mDAA6D;AAG7D,6DAAuF;AACvF,wDAA0C;AAC1C,iCAKgB;AAChB,yCAKoB;AAWpB,SAAS,mCAAmC,CAC1C,KAAc;IAEd,MAAM,YAAY,GAAG,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC;IACtC,OAAO;QACL,uBAAuB,EAAE,YAAY,CAAC,OAAO;QAC7C,6BAA6B,EAAE,YAAY,CAAC,KAAK;KAClD,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,KAAK,UAAU,sBAAsB,CACnC,MAAc,EACd,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE;QACrB,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,wBAAwB,EAAE,MAAM,CAAC,CAAC,EAAE;QACxE,OAAO,EAAE,iCAAiC,EAAE,kBAAkB,EAAE,CAAC;KAClE;IACD,MAAM,QAAQ,GAAG,MAAM,IAAA,sBAAW,GAAE,CAAC;IACrC,MAAM,OAAO,GAAG,IAAA,0BAAmB,EAAC,YAAY,CAAC,CAAC;IAClD,MAAM,MAAM,GAAG,IAAA,uBAAgB,EAAC,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC,CAAC;IACxE,MAAM,YAAY,GAAG,IAAA,gCAAqB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACtE,IACE,CAAC,CAAC,QAAQ,EAAE,cAAc,CAAC,CAAC,QAAQ,CAClC,WAAW,CAAC,cAAc,CAAC,YAAY,CAAC,CACzC;QACD,IAAA,mBAAY,GAAE,EACd;QACA,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,QAAQ,GAAG,IAAA,kCAAuB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACpE,MAAM,YAAY,GAAG,IAAA,sCAA2B,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IAC5E,MAAM,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;IAEvC,MAAM,SAAS,GAAG,4BAA4B,CAAC;IAE/C,kFAAkF;IAClF,IACE,YAAY,KAAK,SAAS;QAC1B,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,wBAAwB,EAAE,MAAM,CAAC,CAAC,EACpE;QACA,MAAM,MAAM,CAAC,iBAAiB,CAAC,SAAS,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;KACvE;SAAM;QACL,8EAA8E;QAC9E,MAAM,MAAM,CAAC,yBAAyB,CACpC,YAAY,EACZ,SAAS,EACT,QAAQ,EACR,MAAM,CAAC,OAAO,EACd,MAAM,CACP,CAAC;KACH;IAED,IAAI,CAAC,IAAI,CAAC,+BAA+B,SAAS,EAAE,CAAC,CAAC;IACtD,MAAM,YAAY,GAAG,MAAM,SAAS,CAAC,iBAAiB,CACpD,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,MAAM,CACP,CAAC;IACF,MAAM,SAAS,CAAC,iBAAiB,CAC/B,aAAa,EACb,YAAY,CAAC,OAAO,EACpB,MAAM,EACN,EAAE,uBAAuB,EAAE,IAAI,EAAE,CAClC,CAAC;IACF,OAAO,YAAY,EAAE,YAAY,IAAI,EAAE,CAAC;AAC1C,CAAC;AAEM,KAAK,UAAU,yBAAyB,CAC7C,MAAc,EACd,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,IAAI,OAAO,CAAC,GAAG,CAAC,oEAA+C,CAAC,KAAK,MAAM,EAAE;QAC3E,IAAI;YACF,OAAO,MAAM,sBAAsB,CACjC,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;SACH;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,KAAK,CACV,2EAA2E,CAAC,EAAE,CAC/E,CAAC;YACF,OAAO,mCAAmC,CAAC,CAAC,CAAC,CAAC;SAC/C;KACF;SAAM;QACL,OAAO;YACL,iCAAiC,EAC/B,uCAAuC;SAC1C,CAAC;KACH;AACH,CAAC;AA1BD,8DA0BC;AAEM,KAAK,UAAU,GAAG,CACvB,iCAA2C,EAC3C,uBAAiC,EACjC,cAAwB,EACxB,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;QACF,OAAO;KACR;IAED,MAAM,uBAAuB,GAAG,MAAM,yBAAyB,CAC7D,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;IAEF,IAAI,uBAAuB,CAAC,iCAAiC,EAAE;QAC7D,MAAM,CAAC,KAAK,CACV,8EAA8E;YAC5E,GAAG,uBAAuB,CAAC,iCAAiC,GAAG,CAClE,CAAC;KACH;IACD,8FAA8F;IAC9F,iCAAiC;IACjC,IACE,OAAO,CAAC,GAAG,CAAC,0CAA0C,CAAC,KAAK,MAAM;QAClE,CAAC,uBAAuB,CAAC,qBAAqB,EAC9C;QACA,MAAM,IAAI,KAAK,CACb,4EAA4E;YAC1E,8BAA8B,uBAAuB,GAAG,CAC3D,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,CAAC,SAAS,EAAE;QACpB,IAAI,CAAC,IAAI,CACP,mGAAmG,CACpG,CAAC;QACF,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtC,MAAM,cAAc,CAAC,MAAM,CAAC,CAAC;KAC9B;IAED,OAAO,uBAAuB,CAAC;AACjC,CAAC;AArDD,kBAqDC"}
|
||||
{"version":3,"file":"init-action-post-helper.js","sourceRoot":"","sources":["../src/init-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,qCAAqC;AACrC,iDAAmD;AACnD,mDAA6D;AAG7D,6DAAuF;AACvF,wDAA0C;AAC1C,iCAKgB;AAChB,yCAKoB;AAWpB,SAAS,mCAAmC,CAC1C,KAAc;IAEd,MAAM,YAAY,GAAG,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC;IACtC,OAAO;QACL,uBAAuB,EAAE,YAAY,CAAC,OAAO;QAC7C,6BAA6B,EAAE,YAAY,CAAC,KAAK;KAClD,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,KAAK,UAAU,sBAAsB,CACnC,MAAc,EACd,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE;QACrB,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,wBAAwB,EAAE,MAAM,CAAC,CAAC,EAAE;QACxE,OAAO,EAAE,iCAAiC,EAAE,kBAAkB,EAAE,CAAC;KAClE;IACD,MAAM,QAAQ,GAAG,MAAM,IAAA,sBAAW,EAAC,MAAM,CAAC,CAAC;IAC3C,MAAM,OAAO,GAAG,IAAA,0BAAmB,EAAC,YAAY,CAAC,CAAC;IAClD,MAAM,MAAM,GAAG,IAAA,uBAAgB,EAAC,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC,CAAC;IACxE,MAAM,YAAY,GAAG,IAAA,gCAAqB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACtE,IACE,CAAC,CAAC,QAAQ,EAAE,cAAc,CAAC,CAAC,QAAQ,CAClC,WAAW,CAAC,cAAc,CAAC,YAAY,CAAC,CACzC;QACD,IAAA,mBAAY,GAAE,EACd;QACA,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,QAAQ,GAAG,IAAA,kCAAuB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACpE,MAAM,YAAY,GAAG,IAAA,sCAA2B,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IAC5E,MAAM,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;IAEvC,MAAM,SAAS,GAAG,4BAA4B,CAAC;IAE/C,kFAAkF;IAClF,IACE,YAAY,KAAK,SAAS;QAC1B,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,wBAAwB,EAAE,MAAM,CAAC,CAAC,EACpE;QACA,MAAM,MAAM,CAAC,iBAAiB,CAAC,SAAS,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;KACvE;SAAM;QACL,8EAA8E;QAC9E,MAAM,MAAM,CAAC,yBAAyB,CACpC,YAAY,EACZ,SAAS,EACT,QAAQ,EACR,MAAM,CAAC,OAAO,EACd,MAAM,CACP,CAAC;KACH;IAED,IAAI,CAAC,IAAI,CAAC,+BAA+B,SAAS,EAAE,CAAC,CAAC;IACtD,MAAM,YAAY,GAAG,MAAM,SAAS,CAAC,iBAAiB,CACpD,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,MAAM,CACP,CAAC;IACF,MAAM,SAAS,CAAC,iBAAiB,CAC/B,aAAa,EACb,YAAY,CAAC,OAAO,EACpB,MAAM,EACN,EAAE,uBAAuB,EAAE,IAAI,EAAE,CAClC,CAAC;IACF,OAAO,YAAY,EAAE,YAAY,IAAI,EAAE,CAAC;AAC1C,CAAC;AAEM,KAAK,UAAU,yBAAyB,CAC7C,MAAc,EACd,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,IAAI,OAAO,CAAC,GAAG,CAAC,oEAA+C,CAAC,KAAK,MAAM,EAAE;QAC3E,IAAI;YACF,OAAO,MAAM,sBAAsB,CACjC,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;SACH;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,KAAK,CACV,2EAA2E,CAAC,EAAE,CAC/E,CAAC;YACF,OAAO,mCAAmC,CAAC,CAAC,CAAC,CAAC;SAC/C;KACF;SAAM;QACL,OAAO;YACL,iCAAiC,EAC/B,uCAAuC;SAC1C,CAAC;KACH;AACH,CAAC;AA1BD,8DA0BC;AAEM,KAAK,UAAU,GAAG,CACvB,iCAA2C,EAC3C,uBAAiC,EACjC,cAAwB,EACxB,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;QACF,OAAO;KACR;IAED,MAAM,uBAAuB,GAAG,MAAM,yBAAyB,CAC7D,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;IAEF,IAAI,uBAAuB,CAAC,iCAAiC,EAAE;QAC7D,MAAM,CAAC,KAAK,CACV,8EAA8E;YAC5E,GAAG,uBAAuB,CAAC,iCAAiC,GAAG,CAClE,CAAC;KACH;IACD,8FAA8F;IAC9F,iCAAiC;IACjC,IACE,OAAO,CAAC,GAAG,CAAC,0CAA0C,CAAC,KAAK,MAAM;QAClE,CAAC,uBAAuB,CAAC,qBAAqB,EAC9C;QACA,MAAM,IAAI,KAAK,CACb,4EAA4E;YAC1E,8BAA8B,uBAAuB,GAAG,CAC3D,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,CAAC,SAAS,EAAE;QACpB,IAAI,CAAC,IAAI,CACP,mGAAmG,CACpG,CAAC;QACF,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtC,MAAM,cAAc,CAAC,MAAM,CAAC,CAAC;KAC9B;IAED,OAAO,uBAAuB,CAAC;AACjC,CAAC;AArDD,kBAqDC"}
|
||||
8
lib/init-action.js
generated
8
lib/init-action.js
generated
|
|
@ -27,7 +27,6 @@ const path = __importStar(require("path"));
|
|||
const core = __importStar(require("@actions/core"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api_client_1 = require("./api-client");
|
||||
const codeql_1 = require("./codeql");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const init_1 = require("./init");
|
||||
const languages_1 = require("./languages");
|
||||
|
|
@ -116,7 +115,7 @@ async function run() {
|
|||
const registriesInput = (0, actions_util_1.getOptionalInput)("registries");
|
||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||
try {
|
||||
const workflowErrors = await (0, workflow_1.validateWorkflow)();
|
||||
const workflowErrors = await (0, workflow_1.validateWorkflow)(logger);
|
||||
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
|
||||
return;
|
||||
}
|
||||
|
|
@ -129,7 +128,6 @@ async function run() {
|
|||
toolsDownloadDurationMs = initCodeQLResult.toolsDownloadDurationMs;
|
||||
toolsVersion = initCodeQLResult.toolsVersion;
|
||||
toolsSource = initCodeQLResult.toolsSource;
|
||||
await (0, codeql_1.enrichEnvironment)(codeql);
|
||||
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), registriesInput, (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), (0, actions_util_1.getOptionalInput)("config"), getTrapCachingEnabled(),
|
||||
// Debug mode is enabled if:
|
||||
// - The `init` Action is passed `debug: true`.
|
||||
|
|
@ -178,10 +176,6 @@ async function run() {
|
|||
for (const [key, value] of Object.entries(tracerConfig.env)) {
|
||||
core.exportVariable(key, value);
|
||||
}
|
||||
if (process.platform === "win32" &&
|
||||
!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
|
||||
await (0, init_1.injectWindowsTracer)("Runner.Worker.exe", undefined, config, codeql, tracerConfig);
|
||||
}
|
||||
}
|
||||
core.setOutput("codeql-path", config.codeQLCmd);
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
126
lib/init.js
generated
126
lib/init.js
generated
|
|
@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.installPythonDeps = exports.injectWindowsTracer = exports.runInit = exports.initConfig = exports.initCodeQL = exports.ToolsSource = void 0;
|
||||
exports.installPythonDeps = exports.runInit = exports.initConfig = exports.initCodeQL = exports.ToolsSource = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
|
|
@ -33,7 +33,6 @@ const codeql_1 = require("./codeql");
|
|||
const configUtils = __importStar(require("./config-utils"));
|
||||
const tracer_config_1 = require("./tracer-config");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
var ToolsSource;
|
||||
(function (ToolsSource) {
|
||||
ToolsSource["Unknown"] = "UNKNOWN";
|
||||
|
|
@ -60,35 +59,27 @@ exports.initConfig = initConfig;
|
|||
async function runInit(codeql, config, sourceRoot, processName, registriesInput, features, apiDetails, logger) {
|
||||
fs.mkdirSync(config.dbLocation, { recursive: true });
|
||||
try {
|
||||
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
// When parsing the codeql config in the CLI, we have not yet created the qlconfig file.
|
||||
// So, create it now.
|
||||
// If we are parsing the config file in the Action, then the qlconfig file was already created
|
||||
// before the `pack download` command was invoked. It is not required for the init command.
|
||||
let registriesAuthTokens;
|
||||
let qlconfigFile;
|
||||
if (await util.useCodeScanningConfigInCli(codeql, features)) {
|
||||
({ registriesAuthTokens, qlconfigFile } =
|
||||
await configUtils.generateRegistries(registriesInput, codeql, config.tempDir, logger));
|
||||
}
|
||||
await configUtils.wrapEnvironment({
|
||||
GITHUB_TOKEN: apiDetails.auth,
|
||||
CODEQL_REGISTRIES_AUTH: registriesAuthTokens,
|
||||
},
|
||||
// Init a database cluster
|
||||
async () => await codeql.databaseInitCluster(config, sourceRoot, processName, features, qlconfigFile, logger));
|
||||
}
|
||||
else {
|
||||
for (const language of config.languages) {
|
||||
// Init language database
|
||||
await codeql.databaseInit(util.getCodeQLDatabasePath(config, language), language, sourceRoot);
|
||||
}
|
||||
// When parsing the codeql config in the CLI, we have not yet created the qlconfig file.
|
||||
// So, create it now.
|
||||
// If we are parsing the config file in the Action, then the qlconfig file was already created
|
||||
// before the `pack download` command was invoked. It is not required for the init command.
|
||||
let registriesAuthTokens;
|
||||
let qlconfigFile;
|
||||
if (await util.useCodeScanningConfigInCli(codeql, features)) {
|
||||
({ registriesAuthTokens, qlconfigFile } =
|
||||
await configUtils.generateRegistries(registriesInput, codeql, config.tempDir, logger));
|
||||
}
|
||||
await configUtils.wrapEnvironment({
|
||||
GITHUB_TOKEN: apiDetails.auth,
|
||||
CODEQL_REGISTRIES_AUTH: registriesAuthTokens,
|
||||
},
|
||||
// Init a database cluster
|
||||
async () => await codeql.databaseInitCluster(config, sourceRoot, processName, features, qlconfigFile, logger));
|
||||
}
|
||||
catch (e) {
|
||||
throw processError(e);
|
||||
}
|
||||
return await (0, tracer_config_1.getCombinedTracerConfig)(config, codeql);
|
||||
return await (0, tracer_config_1.getCombinedTracerConfig)(config);
|
||||
}
|
||||
exports.runInit = runInit;
|
||||
/**
|
||||
|
|
@ -119,89 +110,6 @@ function processError(e) {
|
|||
}
|
||||
return e;
|
||||
}
|
||||
// Runs a powershell script to inject the tracer into a parent process
|
||||
// so it can tracer future processes, hopefully including the build process.
|
||||
// If processName is given then injects into the nearest parent process with
|
||||
// this name, otherwise uses the processLevel-th parent if defined, otherwise
|
||||
// defaults to the 3rd parent as a rough guess.
|
||||
async function injectWindowsTracer(processName, processLevel, config, codeql, tracerConfig) {
|
||||
let script;
|
||||
if (processName !== undefined) {
|
||||
script = `
|
||||
Param(
|
||||
[Parameter(Position=0)]
|
||||
[String]
|
||||
$tracer
|
||||
)
|
||||
|
||||
$id = $PID
|
||||
while ($true) {
|
||||
$p = Get-CimInstance -Class Win32_Process -Filter "ProcessId = $id"
|
||||
Write-Host "Found process: $p"
|
||||
if ($p -eq $null) {
|
||||
throw "Could not determine ${processName} process"
|
||||
}
|
||||
if ($p[0].Name -eq "${processName}") {
|
||||
Break
|
||||
} else {
|
||||
$id = $p[0].ParentProcessId
|
||||
}
|
||||
}
|
||||
Write-Host "Final process: $p"
|
||||
|
||||
Invoke-Expression "&$tracer --inject=$id"`;
|
||||
}
|
||||
else {
|
||||
// If the level is not defined then guess at the 3rd parent process.
|
||||
// This won't be correct in every setting but it should be enough in most settings,
|
||||
// and overestimating is likely better in this situation so we definitely trace
|
||||
// what we want, though this does run the risk of interfering with future CI jobs.
|
||||
// Note that the default of 3 doesn't work on github actions, so we include a
|
||||
// special case in the script that checks for Runner.Worker.exe so we can still work
|
||||
// on actions if the runner is invoked there.
|
||||
processLevel = processLevel || 3;
|
||||
script = `
|
||||
Param(
|
||||
[Parameter(Position=0)]
|
||||
[String]
|
||||
$tracer
|
||||
)
|
||||
|
||||
$id = $PID
|
||||
for ($i = 0; $i -le ${processLevel}; $i++) {
|
||||
$p = Get-CimInstance -Class Win32_Process -Filter "ProcessId = $id"
|
||||
Write-Host "Parent process \${i}: $p"
|
||||
if ($p -eq $null) {
|
||||
throw "Process tree ended before reaching required level"
|
||||
}
|
||||
# Special case just in case the runner is used on actions
|
||||
if ($p[0].Name -eq "Runner.Worker.exe") {
|
||||
Write-Host "Found Runner.Worker.exe process which means we are running on GitHub Actions"
|
||||
Write-Host "Aborting search early and using process: $p"
|
||||
Break
|
||||
} elseif ($p[0].Name -eq "Agent.Worker.exe") {
|
||||
Write-Host "Found Agent.Worker.exe process which means we are running on Azure Pipelines"
|
||||
Write-Host "Aborting search early and using process: $p"
|
||||
Break
|
||||
} else {
|
||||
$id = $p[0].ParentProcessId
|
||||
}
|
||||
}
|
||||
Write-Host "Final process: $p"
|
||||
|
||||
Invoke-Expression "&$tracer --inject=$id"`;
|
||||
}
|
||||
const injectTracerPath = path.join(config.tempDir, "inject-tracer.ps1");
|
||||
fs.writeFileSync(injectTracerPath, script);
|
||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("powershell"), [
|
||||
"-ExecutionPolicy",
|
||||
"Bypass",
|
||||
"-file",
|
||||
injectTracerPath,
|
||||
path.resolve(path.dirname(codeql.getPath()), "tools", "win64", "tracer.exe"),
|
||||
], { env: { ODASA_TRACER_CONFIGURATION: tracerConfig.spec } }).exec();
|
||||
}
|
||||
exports.injectWindowsTracer = injectWindowsTracer;
|
||||
async function installPythonDeps(codeql, logger) {
|
||||
logger.startGroup("Setup Python dependencies");
|
||||
const scriptsFolder = path.resolve(__dirname, "../python-setup");
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
161
lib/tracer-config.js
generated
161
lib/tracer-config.js
generated
|
|
@ -23,20 +23,10 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getCombinedTracerConfig = exports.concatTracerConfigs = exports.getTracerConfigForLanguage = exports.getTracerConfigForCluster = exports.endTracingForCluster = void 0;
|
||||
exports.getCombinedTracerConfig = exports.getTracerConfigForCluster = exports.endTracingForCluster = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const languages_1 = require("./languages");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
const CRITICAL_TRACER_VARS = new Set([
|
||||
"SEMMLE_PRELOAD_libtrace",
|
||||
"SEMMLE_RUNNER",
|
||||
"SEMMLE_COPY_EXECUTABLES_ROOT",
|
||||
"SEMMLE_DEPTRACE_SOCKET",
|
||||
"SEMMLE_JAVA_TOOL_OPTIONS",
|
||||
]);
|
||||
async function endTracingForCluster(config) {
|
||||
// If there are no traced languages, we don't need to do anything.
|
||||
if (!config.languages.some((l) => (0, languages_1.isTracedLanguage)(l)))
|
||||
|
|
@ -64,162 +54,17 @@ exports.endTracingForCluster = endTracingForCluster;
|
|||
async function getTracerConfigForCluster(config) {
|
||||
const tracingEnvVariables = JSON.parse(fs.readFileSync(path.resolve(config.dbLocation, "temp/tracingEnvironment/start-tracing.json"), "utf8"));
|
||||
return {
|
||||
spec: tracingEnvVariables["ODASA_TRACER_CONFIGURATION"],
|
||||
env: tracingEnvVariables,
|
||||
};
|
||||
}
|
||||
exports.getTracerConfigForCluster = getTracerConfigForCluster;
|
||||
async function getTracerConfigForLanguage(codeql, config, language) {
|
||||
const env = await codeql.getTracerEnv(util.getCodeQLDatabasePath(config, language));
|
||||
const spec = env["ODASA_TRACER_CONFIGURATION"];
|
||||
const info = { spec, env: {} };
|
||||
// Extract critical tracer variables from the environment
|
||||
for (const entry of Object.entries(env)) {
|
||||
const key = entry[0];
|
||||
const value = entry[1];
|
||||
// skip ODASA_TRACER_CONFIGURATION as it is handled separately
|
||||
if (key === "ODASA_TRACER_CONFIGURATION") {
|
||||
continue;
|
||||
}
|
||||
// skip undefined values
|
||||
if (typeof value === "undefined") {
|
||||
continue;
|
||||
}
|
||||
// Keep variables that do not exist in current environment. In addition always keep
|
||||
// critical and CODEQL_ variables
|
||||
if (typeof process.env[key] === "undefined" ||
|
||||
CRITICAL_TRACER_VARS.has(key) ||
|
||||
key.startsWith("CODEQL_")) {
|
||||
info.env[key] = value;
|
||||
}
|
||||
}
|
||||
return info;
|
||||
}
|
||||
exports.getTracerConfigForLanguage = getTracerConfigForLanguage;
|
||||
function concatTracerConfigs(tracerConfigs, config, writeBothEnvironments = false) {
|
||||
// A tracer config is a map containing additional environment variables and a tracer 'spec' file.
|
||||
// A tracer 'spec' file has the following format [log_file, number_of_blocks, blocks_text]
|
||||
// Merge the environments
|
||||
const env = {};
|
||||
let copyExecutables = false;
|
||||
let envSize = 0;
|
||||
for (const v of Object.values(tracerConfigs)) {
|
||||
for (const e of Object.entries(v.env)) {
|
||||
const name = e[0];
|
||||
const value = e[1];
|
||||
// skip SEMMLE_COPY_EXECUTABLES_ROOT as it is handled separately
|
||||
if (name === "SEMMLE_COPY_EXECUTABLES_ROOT") {
|
||||
copyExecutables = true;
|
||||
}
|
||||
else if (name in env) {
|
||||
if (env[name] !== value) {
|
||||
throw Error(`Incompatible values in environment parameter ${name}: ${env[name]} and ${value}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
env[name] = value;
|
||||
envSize += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Concatenate spec files into a new spec file
|
||||
const languages = Object.keys(tracerConfigs);
|
||||
const cppIndex = languages.indexOf("cpp");
|
||||
// Make sure cpp is the last language, if it's present since it must be concatenated last
|
||||
if (cppIndex !== -1) {
|
||||
const lastLang = languages[languages.length - 1];
|
||||
languages[languages.length - 1] = languages[cppIndex];
|
||||
languages[cppIndex] = lastLang;
|
||||
}
|
||||
const totalLines = [];
|
||||
let totalCount = 0;
|
||||
for (const lang of languages) {
|
||||
const lines = fs
|
||||
.readFileSync(tracerConfigs[lang].spec, "utf8")
|
||||
.split(/\r?\n/);
|
||||
const count = parseInt(lines[1], 10);
|
||||
totalCount += count;
|
||||
totalLines.push(...lines.slice(2));
|
||||
}
|
||||
const newLogFilePath = path.resolve(config.tempDir, "compound-build-tracer.log");
|
||||
const spec = path.resolve(config.tempDir, "compound-spec");
|
||||
const compoundTempFolder = path.resolve(config.tempDir, "compound-temp");
|
||||
const newSpecContent = [
|
||||
newLogFilePath,
|
||||
totalCount.toString(10),
|
||||
...totalLines,
|
||||
];
|
||||
if (copyExecutables) {
|
||||
env["SEMMLE_COPY_EXECUTABLES_ROOT"] = compoundTempFolder;
|
||||
envSize += 1;
|
||||
}
|
||||
fs.writeFileSync(spec, newSpecContent.join("\n"));
|
||||
if (writeBothEnvironments || process.platform !== "win32") {
|
||||
// Prepare the content of the compound environment file on Unix
|
||||
let buffer = Buffer.alloc(4);
|
||||
buffer.writeInt32LE(envSize, 0);
|
||||
for (const e of Object.entries(env)) {
|
||||
const key = e[0];
|
||||
const value = e[1];
|
||||
const lineBuffer = Buffer.from(`${key}=${value}\0`, "utf8");
|
||||
const sizeBuffer = Buffer.alloc(4);
|
||||
sizeBuffer.writeInt32LE(lineBuffer.length, 0);
|
||||
buffer = Buffer.concat([buffer, sizeBuffer, lineBuffer]);
|
||||
}
|
||||
// Write the compound environment for Unix
|
||||
const envPath = `${spec}.environment`;
|
||||
fs.writeFileSync(envPath, buffer);
|
||||
}
|
||||
if (writeBothEnvironments || process.platform === "win32") {
|
||||
// Prepare the content of the compound environment file on Windows
|
||||
let bufferWindows = Buffer.alloc(0);
|
||||
let length = 0;
|
||||
for (const e of Object.entries(env)) {
|
||||
const key = e[0];
|
||||
const value = e[1];
|
||||
const string = `${key}=${value}\0`;
|
||||
length += string.length;
|
||||
const lineBuffer = Buffer.from(string, "utf16le");
|
||||
bufferWindows = Buffer.concat([bufferWindows, lineBuffer]);
|
||||
}
|
||||
const sizeBuffer = Buffer.alloc(4);
|
||||
sizeBuffer.writeInt32LE(length + 1, 0); // Add one for trailing null character marking end
|
||||
const trailingNull = Buffer.from(`\0`, "utf16le");
|
||||
bufferWindows = Buffer.concat([sizeBuffer, bufferWindows, trailingNull]);
|
||||
// Write the compound environment for Windows
|
||||
const envPathWindows = `${spec}.win32env`;
|
||||
fs.writeFileSync(envPathWindows, bufferWindows);
|
||||
}
|
||||
return { env, spec };
|
||||
}
|
||||
exports.concatTracerConfigs = concatTracerConfigs;
|
||||
async function getCombinedTracerConfig(config, codeql) {
|
||||
async function getCombinedTracerConfig(config) {
|
||||
// Abort if there are no traced languages as there's nothing to do
|
||||
const tracedLanguages = config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l));
|
||||
if (tracedLanguages.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
let mainTracerConfig;
|
||||
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
mainTracerConfig = await getTracerConfigForCluster(config);
|
||||
}
|
||||
else {
|
||||
// Get all the tracer configs and combine them together
|
||||
const tracedLanguageConfigs = {};
|
||||
for (const language of tracedLanguages) {
|
||||
tracedLanguageConfigs[language] = await getTracerConfigForLanguage(codeql, config, language);
|
||||
}
|
||||
mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
|
||||
// Add a couple more variables
|
||||
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
|
||||
const codeQLDir = path.dirname(codeql.getPath());
|
||||
if (process.platform === "darwin") {
|
||||
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(codeQLDir, "tools", "osx64", "libtrace.dylib");
|
||||
}
|
||||
else if (process.platform !== "win32") {
|
||||
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
|
||||
}
|
||||
}
|
||||
const mainTracerConfig = await getTracerConfigForCluster(config);
|
||||
// On macos it's necessary to prefix the build command with the runner executable
|
||||
// on order to trace when System Integrity Protection is enabled.
|
||||
// The executable also exists and works for other platforms so we output this env
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
254
lib/tracer-config.test.js
generated
254
lib/tracer-config.test.js
generated
|
|
@ -29,7 +29,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const languages_1 = require("./languages");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
|
|
@ -56,267 +55,35 @@ function getTestConfig(tmpDir) {
|
|||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
}
|
||||
// A very minimal setup
|
||||
(0, ava_1.default)("getTracerConfigForLanguage - minimal setup", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
const config = getTestConfig(tmpDir);
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async getTracerEnv() {
|
||||
return {
|
||||
ODASA_TRACER_CONFIGURATION: "abc",
|
||||
foo: "bar",
|
||||
};
|
||||
},
|
||||
});
|
||||
const result = await (0, tracer_config_1.getTracerConfigForLanguage)(codeQL, config, languages_1.Language.javascript);
|
||||
t.deepEqual(result, { spec: "abc", env: { foo: "bar" } });
|
||||
});
|
||||
});
|
||||
// Existing vars should not be overwritten, unless they are critical or prefixed with CODEQL_
|
||||
(0, ava_1.default)("getTracerConfigForLanguage - existing / critical vars", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
const config = getTestConfig(tmpDir);
|
||||
// Set up some variables in the environment
|
||||
process.env["foo"] = "abc";
|
||||
process.env["SEMMLE_PRELOAD_libtrace"] = "abc";
|
||||
process.env["SEMMLE_RUNNER"] = "abc";
|
||||
process.env["SEMMLE_COPY_EXECUTABLES_ROOT"] = "abc";
|
||||
process.env["SEMMLE_DEPTRACE_SOCKET"] = "abc";
|
||||
process.env["SEMMLE_JAVA_TOOL_OPTIONS"] = "abc";
|
||||
process.env["CODEQL_VAR"] = "abc";
|
||||
// Now CodeQL returns all these variables, and one more, with different values
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async getTracerEnv() {
|
||||
return {
|
||||
ODASA_TRACER_CONFIGURATION: "abc",
|
||||
foo: "bar",
|
||||
baz: "qux",
|
||||
SEMMLE_PRELOAD_libtrace: "SEMMLE_PRELOAD_libtrace",
|
||||
SEMMLE_RUNNER: "SEMMLE_RUNNER",
|
||||
SEMMLE_COPY_EXECUTABLES_ROOT: "SEMMLE_COPY_EXECUTABLES_ROOT",
|
||||
SEMMLE_DEPTRACE_SOCKET: "SEMMLE_DEPTRACE_SOCKET",
|
||||
SEMMLE_JAVA_TOOL_OPTIONS: "SEMMLE_JAVA_TOOL_OPTIONS",
|
||||
CODEQL_VAR: "CODEQL_VAR",
|
||||
};
|
||||
},
|
||||
});
|
||||
const result = await (0, tracer_config_1.getTracerConfigForLanguage)(codeQL, config, languages_1.Language.javascript);
|
||||
t.deepEqual(result, {
|
||||
spec: "abc",
|
||||
env: {
|
||||
// Should contain all variables except 'foo', because that already existed in the
|
||||
// environment with a different value, and is not deemed a "critical" variable.
|
||||
baz: "qux",
|
||||
SEMMLE_PRELOAD_libtrace: "SEMMLE_PRELOAD_libtrace",
|
||||
SEMMLE_RUNNER: "SEMMLE_RUNNER",
|
||||
SEMMLE_COPY_EXECUTABLES_ROOT: "SEMMLE_COPY_EXECUTABLES_ROOT",
|
||||
SEMMLE_DEPTRACE_SOCKET: "SEMMLE_DEPTRACE_SOCKET",
|
||||
SEMMLE_JAVA_TOOL_OPTIONS: "SEMMLE_JAVA_TOOL_OPTIONS",
|
||||
CODEQL_VAR: "CODEQL_VAR",
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("concatTracerConfigs - minimal configs correctly combined", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
const config = getTestConfig(tmpDir);
|
||||
const spec1 = path.join(tmpDir, "spec1");
|
||||
fs.writeFileSync(spec1, "foo.log\n2\nabc\ndef");
|
||||
const tc1 = {
|
||||
spec: spec1,
|
||||
env: {
|
||||
a: "a",
|
||||
b: "b",
|
||||
},
|
||||
};
|
||||
const spec2 = path.join(tmpDir, "spec2");
|
||||
fs.writeFileSync(spec2, "foo.log\n1\nghi");
|
||||
const tc2 = {
|
||||
spec: spec2,
|
||||
env: {
|
||||
c: "c",
|
||||
},
|
||||
};
|
||||
const result = (0, tracer_config_1.concatTracerConfigs)({ javascript: tc1, python: tc2 }, config);
|
||||
t.deepEqual(result, {
|
||||
spec: path.join(tmpDir, "compound-spec"),
|
||||
env: {
|
||||
a: "a",
|
||||
b: "b",
|
||||
c: "c",
|
||||
},
|
||||
});
|
||||
t.true(fs.existsSync(result.spec));
|
||||
t.deepEqual(fs.readFileSync(result.spec, "utf8"), `${path.join(tmpDir, "compound-build-tracer.log")}\n3\nabc\ndef\nghi`);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("concatTracerConfigs - conflicting env vars", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
const config = getTestConfig(tmpDir);
|
||||
const spec = path.join(tmpDir, "spec");
|
||||
fs.writeFileSync(spec, "foo.log\n0");
|
||||
// Ok if env vars have the same name and the same value
|
||||
t.deepEqual((0, tracer_config_1.concatTracerConfigs)({
|
||||
javascript: { spec, env: { a: "a", b: "b" } },
|
||||
python: { spec, env: { b: "b", c: "c" } },
|
||||
}, config).env, {
|
||||
a: "a",
|
||||
b: "b",
|
||||
c: "c",
|
||||
});
|
||||
// Throws if env vars have same name but different values
|
||||
const e = t.throws(() => (0, tracer_config_1.concatTracerConfigs)({
|
||||
javascript: { spec, env: { a: "a", b: "b" } },
|
||||
python: { spec, env: { b: "c" } },
|
||||
}, config));
|
||||
// If e is undefined, then the previous assertion will fail.
|
||||
if (e !== undefined) {
|
||||
t.deepEqual(e.message, "Incompatible values in environment parameter b: b and c");
|
||||
}
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("concatTracerConfigs - cpp spec lines come last if present", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
const config = getTestConfig(tmpDir);
|
||||
const spec1 = path.join(tmpDir, "spec1");
|
||||
fs.writeFileSync(spec1, "foo.log\n2\nabc\ndef");
|
||||
const tc1 = {
|
||||
spec: spec1,
|
||||
env: {
|
||||
a: "a",
|
||||
b: "b",
|
||||
},
|
||||
};
|
||||
const spec2 = path.join(tmpDir, "spec2");
|
||||
fs.writeFileSync(spec2, "foo.log\n1\nghi");
|
||||
const tc2 = {
|
||||
spec: spec2,
|
||||
env: {
|
||||
c: "c",
|
||||
},
|
||||
};
|
||||
const result = (0, tracer_config_1.concatTracerConfigs)({ cpp: tc1, python: tc2 }, config);
|
||||
t.deepEqual(result, {
|
||||
spec: path.join(tmpDir, "compound-spec"),
|
||||
env: {
|
||||
a: "a",
|
||||
b: "b",
|
||||
c: "c",
|
||||
},
|
||||
});
|
||||
t.true(fs.existsSync(result.spec));
|
||||
t.deepEqual(fs.readFileSync(result.spec, "utf8"), `${path.join(tmpDir, "compound-build-tracer.log")}\n3\nghi\nabc\ndef`);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("concatTracerConfigs - SEMMLE_COPY_EXECUTABLES_ROOT is updated to point to compound spec", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
const config = getTestConfig(tmpDir);
|
||||
const spec = path.join(tmpDir, "spec");
|
||||
fs.writeFileSync(spec, "foo.log\n0");
|
||||
const result = (0, tracer_config_1.concatTracerConfigs)({
|
||||
javascript: { spec, env: { a: "a", b: "b" } },
|
||||
python: { spec, env: { SEMMLE_COPY_EXECUTABLES_ROOT: "foo" } },
|
||||
}, config);
|
||||
t.deepEqual(result.env, {
|
||||
a: "a",
|
||||
b: "b",
|
||||
SEMMLE_COPY_EXECUTABLES_ROOT: path.join(tmpDir, "compound-temp"),
|
||||
});
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("concatTracerConfigs - compound environment file is created correctly", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
const config = getTestConfig(tmpDir);
|
||||
const spec1 = path.join(tmpDir, "spec1");
|
||||
fs.writeFileSync(spec1, "foo.log\n2\nabc\ndef");
|
||||
const tc1 = {
|
||||
spec: spec1,
|
||||
env: {
|
||||
a: "a",
|
||||
},
|
||||
};
|
||||
const spec2 = path.join(tmpDir, "spec2");
|
||||
fs.writeFileSync(spec2, "foo.log\n1\nghi");
|
||||
const tc2 = {
|
||||
spec: spec2,
|
||||
env: {
|
||||
foo: "bar_baz",
|
||||
},
|
||||
};
|
||||
const result = (0, tracer_config_1.concatTracerConfigs)({ javascript: tc1, python: tc2 }, config, true);
|
||||
// Check binary contents for the Unix file
|
||||
const envPath = `${result.spec}.environment`;
|
||||
t.true(fs.existsSync(envPath));
|
||||
const buffer = fs.readFileSync(envPath);
|
||||
t.deepEqual(buffer.length, 28);
|
||||
t.deepEqual(buffer.readInt32LE(0), 2); // number of env vars
|
||||
t.deepEqual(buffer.readInt32LE(4), 4); // length of env var definition
|
||||
t.deepEqual(buffer.toString("utf8", 8, 12), "a=a\0"); // [key]=[value]\0
|
||||
t.deepEqual(buffer.readInt32LE(12), 12); // length of env var definition
|
||||
t.deepEqual(buffer.toString("utf8", 16, 28), "foo=bar_baz\0"); // [key]=[value]\0
|
||||
// Check binary contents for the Windows file
|
||||
const envPathWindows = `${result.spec}.win32env`;
|
||||
t.true(fs.existsSync(envPathWindows));
|
||||
const bufferWindows = fs.readFileSync(envPathWindows);
|
||||
t.deepEqual(bufferWindows.length, 38);
|
||||
t.deepEqual(bufferWindows.readInt32LE(0), 4 + 12 + 1); // number of tchars to represent the environment
|
||||
t.deepEqual(bufferWindows.toString("utf16le", 4, 12), "a=a\0"); // [key]=[value]\0
|
||||
t.deepEqual(bufferWindows.toString("utf16le", 12, 36), "foo=bar_baz\0"); // [key]=[value]\0
|
||||
t.deepEqual(bufferWindows.toString("utf16le", 36, 38), "\0"); // trailing null character
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getCombinedTracerConfig - return undefined when no languages are traced languages", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
const config = getTestConfig(tmpDir);
|
||||
// No traced languages
|
||||
config.languages = [languages_1.Language.javascript, languages_1.Language.python];
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async getTracerEnv() {
|
||||
return {
|
||||
ODASA_TRACER_CONFIGURATION: "abc",
|
||||
CODEQL_DIST: "/",
|
||||
foo: "bar",
|
||||
};
|
||||
},
|
||||
});
|
||||
t.deepEqual(await (0, tracer_config_1.getCombinedTracerConfig)(config, codeQL), undefined);
|
||||
t.deepEqual(await (0, tracer_config_1.getCombinedTracerConfig)(config), undefined);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("getCombinedTracerConfig - valid spec file", async (t) => {
|
||||
(0, ava_1.default)("getCombinedTracerConfig - with start-tracing.json environment file", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
const config = getTestConfig(tmpDir);
|
||||
const spec = path.join(tmpDir, "spec");
|
||||
fs.writeFileSync(spec, "foo.log\n2\nabc\ndef");
|
||||
const bundlePath = path.join(tmpDir, "bundle");
|
||||
const codeqlPlatform = process.platform === "win32"
|
||||
? "win64"
|
||||
: process.platform === "darwin"
|
||||
? "osx64"
|
||||
: "linux64";
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async getTracerEnv() {
|
||||
return {
|
||||
ODASA_TRACER_CONFIGURATION: spec,
|
||||
CODEQL_DIST: bundlePath,
|
||||
CODEQL_PLATFORM: codeqlPlatform,
|
||||
foo: "bar",
|
||||
};
|
||||
},
|
||||
});
|
||||
const result = await (0, tracer_config_1.getCombinedTracerConfig)(config, codeQL);
|
||||
t.notDeepEqual(result, undefined);
|
||||
const expectedEnv = {
|
||||
const startTracingEnv = {
|
||||
foo: "bar",
|
||||
CODEQL_DIST: bundlePath,
|
||||
CODEQL_PLATFORM: codeqlPlatform,
|
||||
ODASA_TRACER_CONFIGURATION: result.spec,
|
||||
};
|
||||
if (process.platform === "darwin") {
|
||||
expectedEnv["DYLD_INSERT_LIBRARIES"] = path.join(path.dirname(codeQL.getPath()), "tools", "osx64", "libtrace.dylib");
|
||||
}
|
||||
else if (process.platform !== "win32") {
|
||||
expectedEnv["LD_PRELOAD"] = path.join(path.dirname(codeQL.getPath()), "tools", "linux64", "${LIB}trace.so");
|
||||
}
|
||||
const tracingEnvironmentDir = path.join(config.dbLocation, "temp", "tracingEnvironment");
|
||||
fs.mkdirSync(tracingEnvironmentDir, { recursive: true });
|
||||
const startTracingJson = path.join(tracingEnvironmentDir, "start-tracing.json");
|
||||
fs.writeFileSync(startTracingJson, JSON.stringify(startTracingEnv));
|
||||
const result = await (0, tracer_config_1.getCombinedTracerConfig)(config);
|
||||
t.notDeepEqual(result, undefined);
|
||||
const expectedEnv = startTracingEnv;
|
||||
if (process.platform === "win32") {
|
||||
expectedEnv["CODEQL_RUNNER"] = path.join(bundlePath, "tools/win64/runner.exe");
|
||||
}
|
||||
|
|
@ -327,7 +94,6 @@ function getTestConfig(tmpDir) {
|
|||
expectedEnv["CODEQL_RUNNER"] = path.join(bundlePath, "tools/linux64/runner");
|
||||
}
|
||||
t.deepEqual(result, {
|
||||
spec: path.join(tmpDir, "compound-spec"),
|
||||
env: expectedEnv,
|
||||
});
|
||||
});
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
9
lib/upload-lib.js
generated
9
lib/upload-lib.js
generated
|
|
@ -134,7 +134,7 @@ exports.findSarifFilesInDir = findSarifFilesInDir;
|
|||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
async function uploadFromActions(sarifPath, checkoutPath, category, logger) {
|
||||
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(checkoutPath), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), category, util.getRequiredEnvParam("GITHUB_WORKFLOW"), workflow.getWorkflowRunID(), checkoutPath, actionsUtil.getRequiredInput("matrix"), logger);
|
||||
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(checkoutPath), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), category, util.getRequiredEnvParam("GITHUB_WORKFLOW"), workflow.getWorkflowRunID(), workflow.getWorkflowRunAttempt(), checkoutPath, actionsUtil.getRequiredInput("matrix"), logger);
|
||||
}
|
||||
exports.uploadFromActions = uploadFromActions;
|
||||
function getSarifFilePaths(sarifPath) {
|
||||
|
|
@ -197,7 +197,7 @@ function validateSarifFileSchema(sarifFilePath, logger) {
|
|||
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||
// buildPayload constructs a map ready to be uploaded to the API from the given
|
||||
// parameters, respecting the current mode and target GitHub instance version.
|
||||
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, mergeBaseCommitOid) {
|
||||
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, mergeBaseCommitOid) {
|
||||
const payloadObj = {
|
||||
commit_oid: commitOid,
|
||||
ref,
|
||||
|
|
@ -205,6 +205,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
|||
analysis_name: analysisName,
|
||||
sarif: zippedSarif,
|
||||
workflow_run_id: workflowRunID,
|
||||
workflow_run_attempt: workflowRunAttempt,
|
||||
checkout_uri: checkoutURI,
|
||||
environment,
|
||||
started_at: process.env[shared_environment_1.CODEQL_WORKFLOW_STARTED_AT],
|
||||
|
|
@ -235,7 +236,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
|||
exports.buildPayload = buildPayload;
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, sourceRoot, environment, logger) {
|
||||
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, workflowRunAttempt, sourceRoot, environment, logger) {
|
||||
logger.startGroup("Uploading results");
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
|
|
@ -252,7 +253,7 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
|||
const sarifPayload = JSON.stringify(sarif);
|
||||
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = (0, file_url_1.default)(sourceRoot);
|
||||
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, await actionsUtil.determineMergeBaseCommitOid());
|
||||
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, await actionsUtil.determineMergeBaseCommitOid());
|
||||
// Log some useful debug info about the info
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
6
lib/upload-lib.test.js
generated
6
lib/upload-lib.test.js
generated
|
|
@ -48,7 +48,7 @@ ava_1.default.beforeEach(() => {
|
|||
});
|
||||
(0, ava_1.default)("validate correct payload used for push, PR merge commit, and PR head", async (t) => {
|
||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||
const pushPayload = uploadLib.buildPayload("commit", "refs/heads/master", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], "mergeBaseCommit");
|
||||
const pushPayload = uploadLib.buildPayload("commit", "refs/heads/master", "key", undefined, "", 1234, 1, "/opt/src", undefined, ["CodeQL", "eslint"], "mergeBaseCommit");
|
||||
// Not triggered by a pull request
|
||||
t.falsy(pushPayload.base_ref);
|
||||
t.falsy(pushPayload.base_sha);
|
||||
|
|
@ -56,11 +56,11 @@ ava_1.default.beforeEach(() => {
|
|||
process.env["GITHUB_SHA"] = "commit";
|
||||
process.env["GITHUB_BASE_REF"] = "master";
|
||||
process.env["GITHUB_EVENT_PATH"] = `${__dirname}/../src/testdata/pull_request.json`;
|
||||
const prMergePayload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], "mergeBaseCommit");
|
||||
const prMergePayload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", 1234, 1, "/opt/src", undefined, ["CodeQL", "eslint"], "mergeBaseCommit");
|
||||
// Uploads for a merge commit use the merge base
|
||||
t.deepEqual(prMergePayload.base_ref, "refs/heads/master");
|
||||
t.deepEqual(prMergePayload.base_sha, "mergeBaseCommit");
|
||||
const prHeadPayload = uploadLib.buildPayload("headCommit", "refs/pull/123/head", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], "mergeBaseCommit");
|
||||
const prHeadPayload = uploadLib.buildPayload("headCommit", "refs/pull/123/head", "key", undefined, "", 1234, 1, "/opt/src", undefined, ["CodeQL", "eslint"], "mergeBaseCommit");
|
||||
// Uploads for the head use the PR base
|
||||
t.deepEqual(prHeadPayload.base_ref, "refs/heads/master");
|
||||
t.deepEqual(prHeadPayload.base_sha, "f95f852bd8fca8fcc58a9a2d6c842781e32a215e");
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
8
lib/util.js
generated
8
lib/util.js
generated
|
|
@ -337,9 +337,11 @@ exports.assertNever = assertNever;
|
|||
* knowing what version of CodeQL we're running.
|
||||
*/
|
||||
function initializeEnvironment(version) {
|
||||
core.exportVariable(shared_environment_1.EnvVar.VERSION, version);
|
||||
core.exportVariable(shared_environment_1.EnvVar.FEATURE_SARIF_COMBINE, "true");
|
||||
core.exportVariable(shared_environment_1.EnvVar.FEATURE_WILL_UPLOAD, "true");
|
||||
core.exportVariable(String(shared_environment_1.EnvVar.FEATURE_MULTI_LANGUAGE), "false");
|
||||
core.exportVariable(String(shared_environment_1.EnvVar.FEATURE_SANDWICH), "false");
|
||||
core.exportVariable(String(shared_environment_1.EnvVar.FEATURE_SARIF_COMBINE), "true");
|
||||
core.exportVariable(String(shared_environment_1.EnvVar.FEATURE_WILL_UPLOAD), "true");
|
||||
core.exportVariable(String(shared_environment_1.EnvVar.VERSION), version);
|
||||
}
|
||||
exports.initializeEnvironment = initializeEnvironment;
|
||||
/**
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
75
lib/workflow.js
generated
75
lib/workflow.js
generated
|
|
@ -22,10 +22,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getCheckoutPathInputOrThrow = exports.getUploadInputOrThrow = exports.getCategoryInputOrThrow = exports.getWorkflowRunID = exports.getWorkflowPath = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = void 0;
|
||||
exports.getCheckoutPathInputOrThrow = exports.getUploadInputOrThrow = exports.getCategoryInputOrThrow = exports.getWorkflowRunAttempt = exports.getWorkflowRunID = exports.getWorkflowRelativePath = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const zlib_1 = __importDefault(require("zlib"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
|
|
@ -157,10 +161,10 @@ function getWorkflowErrors(doc) {
|
|||
return errors;
|
||||
}
|
||||
exports.getWorkflowErrors = getWorkflowErrors;
|
||||
async function validateWorkflow() {
|
||||
async function validateWorkflow(logger) {
|
||||
let workflow;
|
||||
try {
|
||||
workflow = await getWorkflow();
|
||||
workflow = await getWorkflow(logger);
|
||||
}
|
||||
catch (e) {
|
||||
return `error: getWorkflow() failed: ${String(e)}`;
|
||||
|
|
@ -198,25 +202,37 @@ function formatWorkflowCause(errors) {
|
|||
return errors.map((e) => e.code).join(",");
|
||||
}
|
||||
exports.formatWorkflowCause = formatWorkflowCause;
|
||||
async function getWorkflow() {
|
||||
const relativePath = await getWorkflowPath();
|
||||
const absolutePath = path.join((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), relativePath);
|
||||
try {
|
||||
return yaml.load(fs.readFileSync(absolutePath, "utf-8"));
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error && e["code"] === "ENOENT") {
|
||||
throw new Error(`Unable to load code scanning workflow from ${absolutePath}. This can happen if the currently ` +
|
||||
"running workflow checks out a branch that doesn't contain the corresponding workflow file.");
|
||||
}
|
||||
throw e;
|
||||
async function getWorkflow(logger) {
|
||||
// In default setup, the currently executing workflow is not checked into the repository.
|
||||
// Instead, a gzipped then base64 encoded version of the workflow file is provided via the
|
||||
// `CODE_SCANNING_WORKFLOW_FILE` environment variable.
|
||||
const maybeWorkflow = process.env["CODE_SCANNING_WORKFLOW_FILE"];
|
||||
if (maybeWorkflow) {
|
||||
logger.debug("Using the workflow specified by the CODE_SCANNING_WORKFLOW_FILE environment variable.");
|
||||
return yaml.load(zlib_1.default.gunzipSync(Buffer.from(maybeWorkflow, "base64")).toString());
|
||||
}
|
||||
const workflowPath = await getWorkflowAbsolutePath(logger);
|
||||
return yaml.load(fs.readFileSync(workflowPath, "utf-8"));
|
||||
}
|
||||
exports.getWorkflow = getWorkflow;
|
||||
/**
|
||||
* Get the path of the currently executing workflow.
|
||||
* Get the absolute path of the currently executing workflow.
|
||||
*/
|
||||
async function getWorkflowPath() {
|
||||
async function getWorkflowAbsolutePath(logger) {
|
||||
const relativePath = await getWorkflowRelativePath();
|
||||
const absolutePath = path.join((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), relativePath);
|
||||
if (fs.existsSync(absolutePath)) {
|
||||
logger.debug(`Derived the following absolute path for the currently executing workflow: ${absolutePath}.`);
|
||||
return absolutePath;
|
||||
}
|
||||
throw new Error(`Expected to find a code scanning workflow file at ${absolutePath}, but no such file existed. ` +
|
||||
"This can happen if the currently running workflow checks out a branch that doesn't contain " +
|
||||
"the corresponding workflow file.");
|
||||
}
|
||||
/**
|
||||
* Get the path of the currently executing workflow relative to the repository root.
|
||||
*/
|
||||
async function getWorkflowRelativePath() {
|
||||
const repo_nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY").split("/");
|
||||
const owner = repo_nwo[0];
|
||||
const repo = repo_nwo[1];
|
||||
|
|
@ -231,18 +247,37 @@ async function getWorkflowPath() {
|
|||
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
||||
return workflowResponse.data.path;
|
||||
}
|
||||
exports.getWorkflowPath = getWorkflowPath;
|
||||
exports.getWorkflowRelativePath = getWorkflowRelativePath;
|
||||
/**
|
||||
* Get the workflow run ID.
|
||||
*/
|
||||
function getWorkflowRunID() {
|
||||
const workflowRunID = parseInt((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"), 10);
|
||||
const workflowRunIdString = (0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID");
|
||||
const workflowRunID = parseInt(workflowRunIdString, 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
||||
throw new Error(`GITHUB_RUN_ID must define a non NaN workflow run ID. Current value is ${workflowRunIdString}`);
|
||||
}
|
||||
if (workflowRunID < 0) {
|
||||
throw new Error(`GITHUB_RUN_ID must be a non-negative integer. Current value is ${workflowRunIdString}`);
|
||||
}
|
||||
return workflowRunID;
|
||||
}
|
||||
exports.getWorkflowRunID = getWorkflowRunID;
|
||||
/**
|
||||
* Get the workflow run attempt number.
|
||||
*/
|
||||
function getWorkflowRunAttempt() {
|
||||
const workflowRunAttemptString = (0, util_1.getRequiredEnvParam)("GITHUB_RUN_ATTEMPT");
|
||||
const workflowRunAttempt = parseInt(workflowRunAttemptString, 10);
|
||||
if (Number.isNaN(workflowRunAttempt)) {
|
||||
throw new Error(`GITHUB_RUN_ATTEMPT must define a non NaN workflow run attempt. Current value is ${workflowRunAttemptString}`);
|
||||
}
|
||||
if (workflowRunAttempt <= 0) {
|
||||
throw new Error(`GITHUB_RUN_ATTEMPT must be a positive integer. Current value is ${workflowRunAttemptString}`);
|
||||
}
|
||||
return workflowRunAttempt;
|
||||
}
|
||||
exports.getWorkflowRunAttempt = getWorkflowRunAttempt;
|
||||
function getStepsCallingAction(job, actionName) {
|
||||
if (job.uses) {
|
||||
throw new Error(`Could not get steps calling ${actionName} since the job calls a reusable workflow.`);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
Loading…
Add table
Add a link
Reference in a new issue