Merge branch 'main' into rneatherway/optional-merge

This commit is contained in:
Robin Neatherway 2020-09-29 13:02:37 +01:00 committed by GitHub
commit 2d6f6077bf
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
566 changed files with 76813 additions and 151832 deletions

View file

@ -6,7 +6,7 @@ function isInterpretedLanguage(language): boolean {
}
// Matches a string containing only characters that are legal to include in paths on windows.
export const legalWindowsPathCharactersRegex = /^[^<>:"\|?]*$/;
export const legalWindowsPathCharactersRegex = /^[^<>:"|?]*$/;
// Builds an environment variable suitable for LGTM_INDEX_INCLUDE or LGTM_INDEX_EXCLUDE
function buildIncludeExcludeEnvVar(paths: string[]): string {

76
src/analyze.test.ts Normal file
View file

@ -0,0 +1,76 @@
import test from "ava";
import * as fs from "fs";
import { runQueries } from "./analyze";
import { setCodeQL } from "./codeql";
import { Config } from "./config-utils";
import { Language } from "./languages";
import { getRunnerLogger } from "./logging";
import { setupTests } from "./testing-utils";
import * as util from "./util";
setupTests(test);
// Checks that the duration fields are populated for the correct language
// and correct case of builtin or custom.
test("status report fields", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
setCodeQL({
databaseAnalyze: async () => undefined,
});
const memoryFlag = "";
const addSnippetsFlag = "";
const threadsFlag = "";
for (const language of Object.values(Language)) {
const config: Config = {
languages: [language],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
};
fs.mkdirSync(util.getCodeQLDatabasePath(config.tempDir, language), {
recursive: true,
});
config.queries[language] = {
builtin: ["foo.ql"],
custom: [],
};
const builtinStatusReport = await runQueries(
tmpDir,
memoryFlag,
addSnippetsFlag,
threadsFlag,
config,
getRunnerLogger(true)
);
t.deepEqual(Object.keys(builtinStatusReport).length, 1);
t.true(
`analyze_builtin_queries_${language}_duration_ms` in builtinStatusReport
);
config.queries[language] = {
builtin: [],
custom: ["foo.ql"],
};
const customStatusReport = await runQueries(
tmpDir,
memoryFlag,
addSnippetsFlag,
threadsFlag,
config,
getRunnerLogger(true)
);
t.deepEqual(Object.keys(customStatusReport).length, 1);
t.true(
`analyze_custom_queries_${language}_duration_ms` in customStatusReport
);
}
});
});

View file

@ -82,7 +82,7 @@ async function finalizeDatabaseCreation(
}
// Runs queries and creates sarif files in the given folder
async function runQueries(
export async function runQueries(
sarifFolder: string,
memoryFlag: string,
addSnippetsFlag: string,
@ -90,52 +90,70 @@ async function runQueries(
config: configUtils.Config,
logger: Logger
): Promise<QueriesStatusReport> {
const codeql = getCodeQL(config.codeQLCmd);
const statusReport: QueriesStatusReport = {};
for (const language of config.languages) {
logger.startGroup(`Analyzing ${language}`);
const queries = config.queries[language] || [];
if (queries.length === 0) {
const queries = config.queries[language];
if (queries.builtin.length === 0 && queries.custom.length === 0) {
throw new Error(
`Unable to analyse ${language} as no queries were selected for this language`
);
}
try {
const databasePath = util.getCodeQLDatabasePath(config.tempDir, language);
// Pass the queries to codeql using a file instead of using the command
// line to avoid command line length restrictions, particularly on windows.
const querySuite = `${databasePath}-queries.qls`;
const querySuiteContents = queries.map((q) => `- query: ${q}`).join("\n");
fs.writeFileSync(querySuite, querySuiteContents);
logger.debug(
`Query suite file for ${language}...\n${querySuiteContents}`
);
for (const type of ["builtin", "custom"]) {
if (queries[type].length > 0) {
const startTime = new Date().getTime();
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
const databasePath = util.getCodeQLDatabasePath(
config.tempDir,
language
);
// Pass the queries to codeql using a file instead of using the command
// line to avoid command line length restrictions, particularly on windows.
const querySuitePath = `${databasePath}-queries-${type}.qls`;
const querySuiteContents = queries[type]
.map((q: string) => `- query: ${q}`)
.join("\n");
fs.writeFileSync(querySuitePath, querySuiteContents);
logger.debug(
`Query suite file for ${language}...\n${querySuiteContents}`
);
await codeql.databaseAnalyze(
databasePath,
sarifFile,
querySuite,
memoryFlag,
addSnippetsFlag,
threadsFlag
);
const sarifFile = path.join(sarifFolder, `${language}-${type}.sarif`);
logger.debug(
`SARIF results for database ${language} created at "${sarifFile}"`
);
logger.endGroup();
const codeql = getCodeQL(config.codeQLCmd);
await codeql.databaseAnalyze(
databasePath,
sarifFile,
querySuitePath,
memoryFlag,
addSnippetsFlag,
threadsFlag
);
logger.debug(
`SARIF results for database ${language} created at "${sarifFile}"`
);
logger.endGroup();
// Record the performance
const endTime = new Date().getTime();
statusReport[`analyze_${type}_queries_${language}_duration_ms`] =
endTime - startTime;
}
}
} catch (e) {
// For now the fields about query performance are not populated
return {
analyze_failure_language: language,
};
logger.error(`Error running analysis for ${language}: ${e}`);
logger.info(e);
statusReport.analyze_failure_language = language;
return statusReport;
}
}
return {};
return statusReport;
}
export async function runAnalyze(

View file

@ -1,4 +1,5 @@
import * as github from "@actions/github";
import * as githubUtils from "@actions/github/lib/utils";
import * as retry from "@octokit/plugin-retry";
import consoleLogLevel from "console-log-level";
import * as path from "path";
@ -13,12 +14,14 @@ export const getApiClient = function (
if (isLocalRun() && !allowLocalRun) {
throw new Error("Invalid API call in local run");
}
return new github.GitHub({
auth: githubAuth,
baseUrl: getApiUrl(githubUrl),
userAgent: "CodeQL Action",
log: consoleLogLevel({ level: "debug" }),
});
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
return new retryingOctokit(
githubUtils.getOctokitOptions(githubAuth, {
baseUrl: getApiUrl(githubUrl),
userAgent: "CodeQL Action",
log: consoleLogLevel({ level: "debug" }),
})
);
};
function getApiUrl(githubUrl: string): string {

View file

@ -7,6 +7,7 @@ import * as codeql from "./codeql";
import { getRunnerLogger } from "./logging";
import { setupTests } from "./testing-utils";
import * as util from "./util";
import * as defaults from "./defaults.json";
setupTests(test);
@ -43,6 +44,177 @@ test("download codeql bundle cache", async (t) => {
});
});
test("download codeql bundle cache explicitly requested with pinned different version cached", async (t) => {
await util.withTmpDir(async (tmpDir) => {
nock("https://example.com")
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
.replyWithFile(
200,
path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`)
);
await codeql.setupCodeQL(
"https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz",
"token",
"https://github.com",
tmpDir,
tmpDir,
"runner",
getRunnerLogger(true)
);
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
nock("https://example.com")
.get(`/download/codeql-bundle-20200610/codeql-bundle.tar.gz`)
.replyWithFile(
200,
path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`)
);
await codeql.setupCodeQL(
"https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz",
"token",
"https://github.com",
tmpDir,
tmpDir,
"runner",
getRunnerLogger(true)
);
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
});
});
test("don't download codeql bundle cache with pinned different version cached", async (t) => {
await util.withTmpDir(async (tmpDir) => {
nock("https://example.com")
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
.replyWithFile(
200,
path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`)
);
await codeql.setupCodeQL(
"https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz",
"token",
"https://github.com",
tmpDir,
tmpDir,
"runner",
getRunnerLogger(true)
);
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
await codeql.setupCodeQL(
undefined,
"token",
"https://github.com",
tmpDir,
tmpDir,
"runner",
getRunnerLogger(true)
);
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 1);
});
});
test("download codeql bundle cache with different version cached (not pinned)", async (t) => {
await util.withTmpDir(async (tmpDir) => {
nock("https://example.com")
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
.replyWithFile(
200,
path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`)
);
await codeql.setupCodeQL(
"https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz",
"token",
"https://github.com",
tmpDir,
tmpDir,
"runner",
getRunnerLogger(true)
);
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
nock("https://github.com")
.get(
`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle.tar.gz`
)
.replyWithFile(
200,
path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`)
);
await codeql.setupCodeQL(
undefined,
"token",
"https://github.com",
tmpDir,
tmpDir,
"runner",
getRunnerLogger(true)
);
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
});
test('download codeql bundle cache with pinned different version cached if "latests" tools specied', async (t) => {
await util.withTmpDir(async (tmpDir) => {
nock("https://example.com")
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
.replyWithFile(
200,
path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`)
);
await codeql.setupCodeQL(
"https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz",
"token",
"https://github.com",
tmpDir,
tmpDir,
"runner",
getRunnerLogger(true)
);
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
nock("https://github.com")
.get(
`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle.tar.gz`
)
.replyWithFile(
200,
path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`)
);
await codeql.setupCodeQL(
"latest",
"token",
"https://github.com",
tmpDir,
tmpDir,
"runner",
getRunnerLogger(true)
);
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
});
test("parse codeql bundle url version", (t) => {
const tests = {
"20200601": "0.0.0-20200601",

View file

@ -235,12 +235,37 @@ export async function setupCodeQL(
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
try {
// We use the special value of 'latest' to prioritize the version in the
// defaults over any pinned cached version.
const forceLatest = codeqlURL === "latest";
if (forceLatest) {
codeqlURL = undefined;
}
const codeqlURLVersion = getCodeQLURLVersion(
codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`,
logger
);
// If we find the specified version, we always use that.
let codeqlFolder = toolcache.find("CodeQL", codeqlURLVersion);
// If we don't find the requested version, in some cases we may allow a
// different version to save download time if the version hasn't been
// specified explicitly (in which case we always honor it).
if (!codeqlFolder && !codeqlURL && !forceLatest) {
const codeqlVersions = toolcache.findAllVersions("CodeQL");
if (codeqlVersions.length === 1) {
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0]);
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
logger.debug(
`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`
);
codeqlFolder = tmpCodeqlFolder;
}
}
}
if (codeqlFolder) {
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
} else {
@ -595,8 +620,10 @@ function getExtraOptionsFromEnv(path: string[]) {
*
* - the special terminal step name '*' in `options` matches all path steps
* - throws an exception if this conversion is impossible.
*
* Exported for testing.
*/
export /* exported for testing */ function getExtraOptions(
export function getExtraOptions(
options: any,
path: string[],
pathInfo: string[]

View file

@ -27,12 +27,12 @@ function mockGetContents(
content: GetContentsResponse
): sinon.SinonStub<any, any> {
// Passing an auth token is required, so we just use a dummy value
const client = new github.GitHub("123");
const client = github.getOctokit("123");
const response = {
data: content,
};
const spyGetContents = sinon
.stub(client.repos, "getContents")
.stub(client.repos, "getContent")
.resolves(response as any);
sinon.stub(api, "getApiClient").value(() => client);
return spyGetContents;
@ -40,7 +40,7 @@ function mockGetContents(
function mockListLanguages(languages: string[]) {
// Passing an auth token is required, so we just use a dummy value
const client = new github.GitHub("123");
const client = github.getOctokit("123");
const response = {
data: {},
};
@ -272,7 +272,12 @@ test("load non-empty input", async (t) => {
// And the config we expect it to parse to
const expectedConfig: configUtils.Config = {
languages: [Language.javascript],
queries: { javascript: ["/foo/a.ql", "/bar/b.ql"] },
queries: {
javascript: {
builtin: [],
custom: ["/foo/a.ql", "/bar/b.ql"],
},
},
pathsIgnore: ["a", "b"],
paths: ["c/d"],
originalUserInput: {
@ -381,9 +386,9 @@ test("Default queries are used", async (t) => {
*/
function queriesToResolvedQueryForm(queries: string[]) {
const dummyResolvedQueries = {};
queries.forEach((q) => {
for (const q of queries) {
dummyResolvedQueries[q] = {};
});
}
return {
byLanguage: {
javascript: dummyResolvedQueries,
@ -442,9 +447,13 @@ test("Queries can be specified in config file", async (t) => {
t.regex(resolveQueriesArgs[1].queries[0], /.*\/foo$/);
// Now check that the end result contains the default queries and the query from config
t.deepEqual(config.queries["javascript"].length, 2);
t.regex(config.queries["javascript"][0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"][1], /.*\/foo$/);
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 1);
t.regex(
config.queries["javascript"].builtin[0],
/javascript-code-scanning.qls$/
);
t.regex(config.queries["javascript"].custom[0], /.*\/foo$/);
});
});
@ -501,9 +510,13 @@ test("Queries from config file can be overridden in workflow file", async (t) =>
t.regex(resolveQueriesArgs[1].queries[0], /.*\/override$/);
// Now check that the end result contains only the default queries and the override query
t.deepEqual(config.queries["javascript"].length, 2);
t.regex(config.queries["javascript"][0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"][1], /.*\/override$/);
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 1);
t.regex(
config.queries["javascript"].builtin[0],
/javascript-code-scanning.qls$/
);
t.regex(config.queries["javascript"].custom[0], /.*\/override$/);
});
});
@ -558,8 +571,9 @@ test("Queries in workflow file can be used in tandem with the 'disable default q
t.regex(resolveQueriesArgs[0].queries[0], /.*\/workflow-query$/);
// Now check that the end result contains only the workflow query, and not the default one
t.deepEqual(config.queries["javascript"].length, 1);
t.regex(config.queries["javascript"][0], /.*\/workflow-query$/);
t.deepEqual(config.queries["javascript"].builtin.length, 0);
t.deepEqual(config.queries["javascript"].custom.length, 1);
t.regex(config.queries["javascript"].custom[0], /.*\/workflow-query$/);
});
});
@ -610,10 +624,14 @@ test("Multiple queries can be specified in workflow file, no config file require
t.regex(resolveQueriesArgs[2].queries[0], /.*\/override2$/);
// Now check that the end result contains both the queries from the workflow, as well as the defaults
t.deepEqual(config.queries["javascript"].length, 3);
t.regex(config.queries["javascript"][0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"][1], /.*\/override1$/);
t.regex(config.queries["javascript"][2], /.*\/override2$/);
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 2);
t.regex(
config.queries["javascript"].builtin[0],
/javascript-code-scanning.qls$/
);
t.regex(config.queries["javascript"].custom[0], /.*\/override1$/);
t.regex(config.queries["javascript"].custom[1], /.*\/override2$/);
});
});
@ -678,11 +696,15 @@ test("Queries in workflow file can be added to the set of queries without overri
t.regex(resolveQueriesArgs[3].queries[0], /.*\/foo$/);
// Now check that the end result contains all the queries
t.deepEqual(config.queries["javascript"].length, 4);
t.regex(config.queries["javascript"][0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"][1], /.*\/additional1$/);
t.regex(config.queries["javascript"][2], /.*\/additional2$/);
t.regex(config.queries["javascript"][3], /.*\/foo$/);
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 3);
t.regex(
config.queries["javascript"].builtin[0],
/javascript-code-scanning.qls$/
);
t.regex(config.queries["javascript"].custom[0], /.*\/additional1$/);
t.regex(config.queries["javascript"].custom[1], /.*\/additional2$/);
t.regex(config.queries["javascript"].custom[2], /.*\/foo$/);
});
});

View file

@ -31,6 +31,24 @@ export interface UserConfig {
paths?: string[];
}
/**
* Lists of query files for each language.
* Will only contain .ql files and not other kinds of files,
* and all file paths will be absolute.
*
* The queries are split between ones from a builtin suite
* and custom queries from unknown locations. This allows us to treat
* them separately if we want to, for example to measure performance.
*/
type Queries = {
[language: string]: {
/** Queries from one of the builtin suites */
builtin: string[];
/** Custom queries, from a non-standard location */
custom: string[];
};
};
/**
* Format of the parsed config file.
*/
@ -41,10 +59,8 @@ export interface Config {
languages: Language[];
/**
* Map from language to query files.
* Will only contain .ql files and not other kinds of files,
* and all file paths will be absolute.
*/
queries: { [language: string]: string[] };
queries: Queries;
/**
* List of paths to ignore from analysis.
*/
@ -131,32 +147,44 @@ function validateQueries(resolvedQueries: ResolveQueriesOutput) {
/**
* Run 'codeql resolve queries' and add the results to resultMap
*
* If a checkout path is given then the queries are assumed to be custom queries
* and an error will be thrown if there is anything invalid about the queries.
* If a checkout path is not given then the queries are assumed to be builtin
* queries, and error checking will be suppressed.
*/
async function runResolveQueries(
codeQL: CodeQL,
resultMap: { [language: string]: string[] },
resultMap: Queries,
toResolve: string[],
extraSearchPath: string | undefined,
errorOnInvalidQueries: boolean
extraSearchPath: string | undefined
) {
const resolvedQueries = await codeQL.resolveQueries(
toResolve,
extraSearchPath
);
for (const [language, queries] of Object.entries(
if (extraSearchPath !== undefined) {
validateQueries(resolvedQueries);
}
for (const [language, queryPaths] of Object.entries(
resolvedQueries.byLanguage
)) {
if (resultMap[language] === undefined) {
resultMap[language] = [];
resultMap[language] = {
builtin: [],
custom: [],
};
}
resultMap[language].push(
...Object.keys(queries).filter((q) => !queryIsDisabled(language, q))
const queries = Object.keys(queryPaths).filter(
(q) => !queryIsDisabled(language, q)
);
}
if (errorOnInvalidQueries) {
validateQueries(resolvedQueries);
if (extraSearchPath !== undefined) {
resultMap[language].custom.push(...queries);
} else {
resultMap[language].builtin.push(...queries);
}
}
}
@ -166,10 +194,10 @@ async function runResolveQueries(
async function addDefaultQueries(
codeQL: CodeQL,
languages: string[],
resultMap: { [language: string]: string[] }
resultMap: Queries
) {
const suites = languages.map((l) => `${l}-code-scanning.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined, false);
await runResolveQueries(codeQL, resultMap, suites, undefined);
}
// The set of acceptable values for built-in suites from the codeql bundle
@ -182,7 +210,7 @@ const builtinSuites = ["security-extended", "security-and-quality"] as const;
async function addBuiltinSuiteQueries(
languages: string[],
codeQL: CodeQL,
resultMap: { [language: string]: string[] },
resultMap: Queries,
suiteName: string,
configFile?: string
) {
@ -192,7 +220,7 @@ async function addBuiltinSuiteQueries(
}
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined, false);
await runResolveQueries(codeQL, resultMap, suites, undefined);
}
/**
@ -200,7 +228,7 @@ async function addBuiltinSuiteQueries(
*/
async function addLocalQueries(
codeQL: CodeQL,
resultMap: { [language: string]: string[] },
resultMap: Queries,
localQueryPath: string,
checkoutPath: string,
configFile?: string
@ -228,13 +256,7 @@ async function addLocalQueries(
);
}
await runResolveQueries(
codeQL,
resultMap,
[absoluteQueryPath],
checkoutPath,
true
);
await runResolveQueries(codeQL, resultMap, [absoluteQueryPath], checkoutPath);
}
/**
@ -242,7 +264,7 @@ async function addLocalQueries(
*/
async function addRemoteQueries(
codeQL: CodeQL,
resultMap: { [language: string]: string[] },
resultMap: Queries,
queryUses: string,
tempDir: string,
githubUrl: string,
@ -283,7 +305,7 @@ async function addRemoteQueries(
? path.join(checkoutPath, tok.slice(2).join("/"))
: checkoutPath;
await runResolveQueries(codeQL, resultMap, [queryPath], checkoutPath, true);
await runResolveQueries(codeQL, resultMap, [queryPath], checkoutPath);
}
/**
@ -297,7 +319,7 @@ async function addRemoteQueries(
async function parseQueryUses(
languages: string[],
codeQL: CodeQL,
resultMap: { [language: string]: string[] },
resultMap: Queries,
queryUses: string,
tempDir: string,
checkoutPath: string,
@ -353,7 +375,7 @@ const pathStarsRegex = /.*(?:\*\*[^/].*|\*\*$|[^/]\*\*.*)/;
// Characters that are supported by filters in workflows, but not by us.
// See https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet
const filterPatternCharactersRegex = /.*[\?\+\[\]!].*/;
const filterPatternCharactersRegex = /.*[?+[\]!].*/;
// Checks that a paths of paths-ignore entry is valid, possibly modifying it
// to make it valid, or if not possible then throws an error.
@ -660,7 +682,7 @@ async function addQueriesFromWorkflow(
codeQL: CodeQL,
queriesInput: string,
languages: string[],
resultMap: { [language: string]: string[] },
resultMap: Queries,
tempDir: string,
checkoutPath: string,
githubUrl: string,
@ -718,7 +740,7 @@ export async function getDefaultConfig(
githubUrl,
logger
);
const queries = {};
const queries: Queries = {};
await addDefaultQueries(codeQL, languages, queries);
if (queriesInput) {
await addQueriesFromWorkflow(
@ -790,7 +812,7 @@ async function loadConfig(
logger
);
const queries = {};
const queries: Queries = {};
const pathsIgnore: string[] = [];
const paths: string[] = [];
@ -853,34 +875,38 @@ async function loadConfig(
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
throw new Error(getPathsIgnoreInvalid(configFile));
}
parsedYAML[PATHS_IGNORE_PROPERTY]!.forEach((path) => {
for (const path of parsedYAML[PATHS_IGNORE_PROPERTY]!) {
if (typeof path !== "string" || path === "") {
throw new Error(getPathsIgnoreInvalid(configFile));
}
pathsIgnore.push(
validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile, logger)
);
});
}
}
if (PATHS_PROPERTY in parsedYAML) {
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
throw new Error(getPathsInvalid(configFile));
}
parsedYAML[PATHS_PROPERTY]!.forEach((path) => {
for (const path of parsedYAML[PATHS_PROPERTY]!) {
if (typeof path !== "string" || path === "") {
throw new Error(getPathsInvalid(configFile));
}
paths.push(
validateAndSanitisePath(path, PATHS_PROPERTY, configFile, logger)
);
});
}
}
// The list of queries should not be empty for any language. If it is then
// it is a user configuration error.
for (const language of languages) {
if (queries[language] === undefined || queries[language].length === 0) {
if (
queries[language] === undefined ||
(queries[language].builtin.length === 0 &&
queries[language].custom.length === 0)
) {
throw new Error(
`Did not detect any queries to run for ${language}. ` +
"Please make sure that the default queries are enabled, or you are specifying queries to run."
@ -997,7 +1023,7 @@ async function getRemoteConfig(
const response = await api
.getApiClient(githubAuth, githubUrl, true)
.repos.getContents({
.repos.getContent({
owner: pieces.groups.owner,
repo: pieces.groups.repo,
path: pieces.groups.path,

View file

@ -276,14 +276,16 @@ export function addFingerprints(
}
// Now hash each file that was found
Object.entries(callbacksByFile).forEach(([filepath, callbacks]) => {
for (const [filepath, callbacks] of Object.entries(callbacksByFile)) {
// A callback that forwards the hash to all other callbacks for that file
const teeCallback = function (lineNumber: number, hash: string) {
Object.values(callbacks).forEach((c) => c(lineNumber, hash));
for (const c of Object.values(callbacks)) {
c(lineNumber, hash);
}
};
const fileContents = fs.readFileSync(filepath).toString();
hash(teeCallback, fileContents);
});
}
return JSON.stringify(sarif);
}

View file

@ -19,7 +19,7 @@ interface InitSuccessStatusReport extends actionsUtil.StatusReportBase {
paths_ignore: string;
// Commas-separated list of languages where the default queries are disabled
disable_default_queries: string;
// Comma-separated list of queries sources, from the 'queries' config field
// Comma-separated list of queries sources, from the 'queries' config field or workflow input
queries: string;
}
@ -44,9 +44,20 @@ async function sendSuccessStatusReport(
]
? languages
: "";
const queries = (config.originalUserInput.queries || [])
.map((q) => q.uses)
.join(",");
const queries: string[] = [];
let queriesInput = actionsUtil.getOptionalInput("queries")?.trim();
if (queriesInput === undefined || queriesInput.startsWith("+")) {
queries.push(
...(config.originalUserInput.queries || []).map((q) => q.uses)
);
}
if (queriesInput !== undefined) {
queriesInput = queriesInput.startsWith("+")
? queriesInput.substr(1)
: queriesInput;
queries.push(...queriesInput.split(","));
}
const statusReport: InitSuccessStatusReport = {
...statusReportBase,
@ -55,7 +66,7 @@ async function sendSuccessStatusReport(
paths,
paths_ignore: pathsIgnore,
disable_default_queries: disableDefaultQueries,
queries,
queries: queries.join(","),
};
await actionsUtil.sendStatusReport(statusReport);
@ -130,9 +141,9 @@ async function run() {
const tracerConfig = await runInit(codeql, config);
if (tracerConfig !== undefined) {
Object.entries(tracerConfig.env).forEach(([key, value]) =>
core.exportVariable(key, value)
);
for (const [key, value] of Object.entries(tracerConfig.env)) {
core.exportVariable(key, value);
}
if (process.platform === "win32") {
await injectWindowsTracer(

View file

@ -61,7 +61,9 @@ function importTracerEnvironment(config: Config) {
if (!("ODASA_TRACER_CONFIGURATION" in process.env)) {
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
const env = JSON.parse(fs.readFileSync(jsonEnvFile).toString("utf-8"));
Object.keys(env).forEach((key) => (process.env[key] = env[key]));
for (const key of Object.keys(env)) {
process.env[key] = env[key];
}
}
}

BIN
src/testdata/codeql-bundle-pinned.tar.gz vendored Normal file

Binary file not shown.

View file

@ -77,7 +77,7 @@ export async function toolrunnerErrorCatcher(
return returnState;
} else {
throw new Error(
`The process \'${commandLine}\' failed with exit code ${returnState}`
`The process '${commandLine}' failed with exit code ${returnState}`
);
}
} else {

View file

@ -13,13 +13,9 @@ export type TracerConfig = {
const CRITICAL_TRACER_VARS = new Set([
"SEMMLE_PRELOAD_libtrace",
,
"SEMMLE_RUNNER",
,
"SEMMLE_COPY_EXECUTABLES_ROOT",
,
"SEMMLE_DEPTRACE_SOCKET",
,
"SEMMLE_JAVA_TOOL_OPTIONS",
]);

View file

@ -26,7 +26,9 @@ export function combineSarifFiles(sarifFiles: string[]): string {
if (combinedSarif.version === null) {
combinedSarif.version = sarifObject.version;
} else if (combinedSarif.version !== sarifObject.version) {
throw `Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`;
throw new Error(
`Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`
);
}
combinedSarif.runs.push(...sarifObject.runs);
@ -53,72 +55,20 @@ async function uploadPayload(
return;
}
// Make up to 4 attempts to upload, and sleep for these
// number of seconds between each attempt.
// We don't want to backoff too much to avoid wasting action
// minutes, but just waiting a little bit could maybe help.
const backoffPeriods = [1, 5, 15];
const client = api.getApiClient(githubAuth, githubUrl);
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
const reqURL =
mode === "actions"
? "PUT /repos/:owner/:repo/code-scanning/analysis"
: "POST /repos/:owner/:repo/code-scanning/sarifs";
const response = await client.request(reqURL, {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
data: payload,
});
const reqURL =
mode === "actions"
? "PUT /repos/:owner/:repo/code-scanning/analysis"
: "POST /repos/:owner/:repo/code-scanning/sarifs";
const response = await client.request(reqURL, {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
data: payload,
});
logger.debug(`response status: ${response.status}`);
const statusCode = response.status;
if (statusCode === 202) {
logger.info("Successfully uploaded results");
return;
}
const requestID = response.headers["x-github-request-id"];
// On any other status code that's not 5xx mark the upload as failed
if (!statusCode || statusCode < 500 || statusCode >= 600) {
throw new Error(
`Upload failed (${requestID}): (${statusCode}) ${JSON.stringify(
response.data
)}`
);
}
// On a 5xx status code we may retry the request
if (attempt < backoffPeriods.length) {
// Log the failure as a warning but don't mark the action as failed yet
logger.warning(
`Upload attempt (${attempt + 1} of ${
backoffPeriods.length + 1
}) failed (${requestID}). Retrying in ${
backoffPeriods[attempt]
} seconds: (${statusCode}) ${JSON.stringify(response.data)}`
);
// Sleep for the backoff period
await new Promise((r) => setTimeout(r, backoffPeriods[attempt] * 1000));
continue;
} else {
// If the upload fails with 5xx then we assume it is a temporary problem
// and not an error that the user has caused or can fix.
// We avoid marking the job as failed to avoid breaking CI workflows.
throw new Error(
`Upload failed (${requestID}): (${statusCode}) ${JSON.stringify(
response.data
)}`
);
}
}
// This case shouldn't ever happen as the final iteration of the loop
// will always throw an error instead of exiting to here.
throw new Error("Upload failed");
logger.debug(`response status: ${response.status}`);
logger.info("Successfully uploaded results");
}
export interface UploadStatusReport {
@ -153,10 +103,13 @@ export async function upload(
throw new Error(`Path does not exist: ${sarifPath}`);
}
if (fs.lstatSync(sarifPath).isDirectory()) {
fs.readdirSync(sarifPath)
const paths = fs
.readdirSync(sarifPath)
.filter((f) => f.endsWith(".sarif"))
.map((f) => path.resolve(sarifPath, f))
.forEach((f) => sarifFiles.push(f));
.map((f) => path.resolve(sarifPath, f));
for (const path of paths) {
sarifFiles.push(path);
}
if (sarifFiles.length === 0) {
throw new Error(`No SARIF files found to upload in "${sarifPath}".`);
}