Initial commit (from f5274cbdce4ae7c9e4b937dcdf95ac70ae436d5f)
This commit is contained in:
commit
28ccc3db2d
13974 changed files with 2618436 additions and 0 deletions
18
src/analysis-paths.test.ts
Normal file
18
src/analysis-paths.test.ts
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
import * as analysisPaths from './analysis-paths';
|
||||
import * as configUtils from './config-utils';
|
||||
|
||||
test("emptyPaths", async () => {
|
||||
let config = new configUtils.Config();
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||
expect(process.env['LGTM_INDEX_INCLUDE']).toBeUndefined();
|
||||
expect(process.env['LGTM_INDEX_EXCLUDE']).toBeUndefined();
|
||||
});
|
||||
|
||||
test("nonEmptyPaths", async () => {
|
||||
let config = new configUtils.Config();
|
||||
config.paths.push('path1', 'path2');
|
||||
config.pathsIgnore.push('path3', 'path4');
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||
expect(process.env['LGTM_INDEX_INCLUDE']).toEqual('path1\npath2');
|
||||
expect(process.env['LGTM_INDEX_EXCLUDE']).toEqual('path3\npath4');
|
||||
});
|
||||
23
src/analysis-paths.ts
Normal file
23
src/analysis-paths.ts
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
import * as core from '@actions/core';
|
||||
|
||||
import * as configUtils from './config-utils';
|
||||
|
||||
export function includeAndExcludeAnalysisPaths(config: configUtils.Config, languages: string[]) {
|
||||
if (config.paths.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_INCLUDE', config.paths.join('\n'));
|
||||
}
|
||||
|
||||
if (config.pathsIgnore.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_EXCLUDE', config.pathsIgnore.join('\n'));
|
||||
}
|
||||
|
||||
function isInterpretedLanguage(language): boolean {
|
||||
return language === 'javascript' && language === 'python';
|
||||
}
|
||||
|
||||
// Index include/exclude only work in javascript and python
|
||||
// If some other language is detected/configured show a warning
|
||||
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) && !languages.every(isInterpretedLanguage)) {
|
||||
core.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
|
||||
}
|
||||
}
|
||||
58
src/autobuild.ts
Normal file
58
src/autobuild.ts
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as sharedEnv from './shared-environment';
|
||||
import * as util from './util';
|
||||
|
||||
async function run() {
|
||||
try {
|
||||
if (util.should_abort('autobuild', true) || !await util.reportActionStarting('autobuild')) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Attempt to find a language to autobuild
|
||||
// We want pick the dominant language in the repo from the ones we're able to build
|
||||
// The languages are sorted in order specified by user or by lines of code if we got
|
||||
// them from the GitHub API, so try to build the first language on the list.
|
||||
const language = process.env[sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES]?.split(',')[0];
|
||||
|
||||
if (!language) {
|
||||
core.info("None of the languages in this project require extra build steps");
|
||||
return;
|
||||
}
|
||||
|
||||
core.debug(`Detected dominant traced language: ${language}`);
|
||||
|
||||
core.startGroup(`Attempting to automatically build ${language} code`);
|
||||
// TODO: share config accross actions better via env variables
|
||||
const codeqlCmd = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_CMD);
|
||||
|
||||
const cmdName = process.platform === 'win32' ? 'autobuild.cmd' : 'autobuild.sh';
|
||||
const autobuildCmd = path.join(path.dirname(codeqlCmd), language, 'tools', cmdName);
|
||||
|
||||
|
||||
// Update JAVA_TOOL_OPTIONS to contain '-Dhttp.keepAlive=false'
|
||||
// This is because of an issue with Azure pipelines timing out connections after 4 minutes
|
||||
// and Maven not properly handling closed connections
|
||||
// Otherwise long build processes will timeout when pulling down Java packages
|
||||
// https://developercommunity.visualstudio.com/content/problem/292284/maven-hosted-agent-connection-timeout.html
|
||||
let javaToolOptions = process.env['JAVA_TOOL_OPTIONS'] || "";
|
||||
process.env['JAVA_TOOL_OPTIONS'] = [...javaToolOptions.split(/\s+/), '-Dhttp.keepAlive=false', '-Dmaven.wagon.http.pool=false'].join(' ');
|
||||
|
||||
await exec.exec(autobuildCmd);
|
||||
core.endGroup();
|
||||
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('autobuild', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
|
||||
await util.reportActionSucceeded('autobuild');
|
||||
}
|
||||
|
||||
run().catch(e => {
|
||||
core.setFailed("autobuild action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
140
src/config-utils.ts
Normal file
140
src/config-utils.ts
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
import * as core from '@actions/core';
|
||||
import * as io from '@actions/io';
|
||||
import * as fs from 'fs';
|
||||
import * as yaml from 'js-yaml';
|
||||
import * as path from 'path';
|
||||
|
||||
export class ExternalQuery {
|
||||
public repository: string;
|
||||
public ref: string;
|
||||
public path = '';
|
||||
|
||||
constructor(repository: string, ref: string) {
|
||||
this.repository = repository;
|
||||
this.ref = ref;
|
||||
}
|
||||
}
|
||||
|
||||
export class Config {
|
||||
public name = "";
|
||||
public additionalQueries: string[] = [];
|
||||
public externalQueries: ExternalQuery[] = [];
|
||||
public pathsIgnore: string[] = [];
|
||||
public paths: string[] = [];
|
||||
|
||||
public addQuery(queryUses: string) {
|
||||
// The logic for parsing the string is based on what actions does for
|
||||
// parsing the 'uses' actions in the workflow file
|
||||
|
||||
if (queryUses === "") {
|
||||
throw '"uses" value for queries cannot be blank';
|
||||
}
|
||||
|
||||
if (queryUses.startsWith("./")) {
|
||||
this.additionalQueries.push(queryUses.slice(2));
|
||||
return;
|
||||
}
|
||||
|
||||
let tok = queryUses.split('@');
|
||||
if (tok.length !== 2) {
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
}
|
||||
|
||||
const ref = tok[1];
|
||||
tok = tok[0].split('/');
|
||||
// The first token is the owner
|
||||
// The second token is the repo
|
||||
// The rest is a path, if there is more than one token combine them to form the full path
|
||||
if (tok.length > 3) {
|
||||
tok = [tok[0], tok[1], tok.slice(2).join('/')];
|
||||
}
|
||||
|
||||
if (tok.length < 2) {
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
}
|
||||
|
||||
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
|
||||
if (tok.length === 3) {
|
||||
external.path = tok[2];
|
||||
}
|
||||
this.externalQueries.push(external);
|
||||
}
|
||||
}
|
||||
|
||||
const configFolder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
||||
|
||||
function initConfig(): Config {
|
||||
const configFile = core.getInput('config-file');
|
||||
|
||||
const config = new Config();
|
||||
|
||||
// If no config file was provided create an empty one
|
||||
if (configFile === '') {
|
||||
core.debug('No configuration file was provided');
|
||||
return config;
|
||||
}
|
||||
|
||||
try {
|
||||
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
|
||||
if (parsedYAML.name && typeof parsedYAML.name === "string") {
|
||||
config.name = parsedYAML.name;
|
||||
}
|
||||
|
||||
const queries = parsedYAML.queries;
|
||||
if (queries && queries instanceof Array) {
|
||||
queries.forEach(query => {
|
||||
if (query.uses && typeof query.uses === "string") {
|
||||
config.addQuery(query.uses);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const pathsIgnore = parsedYAML['paths-ignore'];
|
||||
if (pathsIgnore && queries instanceof Array) {
|
||||
pathsIgnore.forEach(path => {
|
||||
if (typeof path === "string") {
|
||||
config.pathsIgnore.push(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const paths = parsedYAML.paths;
|
||||
if (paths && paths instanceof Array) {
|
||||
paths.forEach(path => {
|
||||
if (typeof path === "string") {
|
||||
config.paths.push(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
core.setFailed(err);
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
async function saveConfig(config: Config) {
|
||||
const configString = JSON.stringify(config);
|
||||
await io.mkdirP(configFolder);
|
||||
fs.writeFileSync(path.join(configFolder, 'config'), configString, 'utf8');
|
||||
core.debug('Saved config:');
|
||||
core.debug(configString);
|
||||
}
|
||||
|
||||
export async function loadConfig(): Promise<Config> {
|
||||
const configFile = path.join(configFolder, 'config');
|
||||
if (fs.existsSync(configFile)) {
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
core.debug(configString);
|
||||
return JSON.parse(configString);
|
||||
|
||||
} else {
|
||||
const config = initConfig();
|
||||
core.debug('Initialized config:');
|
||||
core.debug(JSON.stringify(config));
|
||||
await saveConfig(config);
|
||||
return config;
|
||||
}
|
||||
}
|
||||
17
src/external-queries.test.ts
Normal file
17
src/external-queries.test.ts
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import * as configUtils from "./config-utils";
|
||||
import * as externalQueries from "./external-queries";
|
||||
|
||||
test("checkoutExternalQueries", async () => {
|
||||
let config = new configUtils.Config();
|
||||
config.externalQueries = [
|
||||
new configUtils.ExternalQuery("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b"),
|
||||
];
|
||||
await externalQueries.checkoutExternalQueries(config);
|
||||
|
||||
let destination = process.env["RUNNER_WORKSPACE"] || "/tmp/codeql-action/";
|
||||
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in master
|
||||
expect(fs.existsSync(path.join(destination, "github", "codeql-go", "COPYRIGHT"))).toBeTruthy();
|
||||
});
|
||||
27
src/external-queries.ts
Normal file
27
src/external-queries.ts
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as configUtils from './config-utils';
|
||||
|
||||
export async function checkoutExternalQueries(config: configUtils.Config) {
|
||||
const folder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
||||
|
||||
for (const externalQuery of config.externalQueries) {
|
||||
core.info('Checking out ' + externalQuery.repository);
|
||||
|
||||
const checkoutLocation = path.join(folder, externalQuery.repository);
|
||||
if (!fs.existsSync(checkoutLocation)) {
|
||||
const repoURL = 'https://github.com/' + externalQuery.repository + '.git';
|
||||
await exec.exec('git', ['clone', repoURL, checkoutLocation]);
|
||||
await exec.exec('git', [
|
||||
'--work-tree=' + checkoutLocation,
|
||||
'--git-dir=' + checkoutLocation + '/.git',
|
||||
'checkout', externalQuery.ref,
|
||||
]);
|
||||
}
|
||||
|
||||
config.additionalQueries.push(path.join(checkoutLocation, externalQuery.path));
|
||||
}
|
||||
}
|
||||
164
src/finalize-db.ts
Normal file
164
src/finalize-db.ts
Normal file
|
|
@ -0,0 +1,164 @@
|
|||
import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as io from '@actions/io';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as configUtils from './config-utils';
|
||||
import * as externalQueries from "./external-queries";
|
||||
import * as sharedEnv from './shared-environment';
|
||||
import * as upload_lib from './upload-lib';
|
||||
import * as util from './util';
|
||||
|
||||
async function createdDBForScannedLanguages(codeqlCmd: string, databaseFolder: string) {
|
||||
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
|
||||
if (scannedLanguages) {
|
||||
for (const language of scannedLanguages.split(',')) {
|
||||
core.startGroup('Extracting ' + language);
|
||||
|
||||
// Get extractor location
|
||||
let extractorPath = '';
|
||||
await exec.exec(codeqlCmd, ['resolve', 'extractor', '--format=json', '--language=' + language], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => { extractorPath += data.toString(); },
|
||||
stderr: (data) => { process.stderr.write(data); }
|
||||
}
|
||||
});
|
||||
|
||||
// Set trace command
|
||||
const ext = process.platform === 'win32' ? '.cmd' : '.sh';
|
||||
const traceCommand = path.resolve(JSON.parse(extractorPath), 'tools', 'autobuild' + ext);
|
||||
|
||||
// Run trace command
|
||||
await exec.exec(
|
||||
codeqlCmd,
|
||||
['database', 'trace-command', path.join(databaseFolder, language), '--', traceCommand]);
|
||||
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function finalizeDatabaseCreation(codeqlCmd: string, databaseFolder: string) {
|
||||
await createdDBForScannedLanguages(codeqlCmd, databaseFolder);
|
||||
|
||||
const languages = process.env[sharedEnv.CODEQL_ACTION_LANGUAGES] || '';
|
||||
for (const language of languages.split(',')) {
|
||||
core.startGroup('Finalizing ' + language);
|
||||
await exec.exec(codeqlCmd, ['database', 'finalize', path.join(databaseFolder, language)]);
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
|
||||
async function resolveQueryLanguages(codeqlCmd: string, config: configUtils.Config): Promise<Map<string, string[]>> {
|
||||
let res = new Map();
|
||||
|
||||
if (config.additionalQueries.length !== 0) {
|
||||
let resolveQueriesOutput = '';
|
||||
const options = {
|
||||
listeners: {
|
||||
stdout: (data: Buffer) => {
|
||||
resolveQueriesOutput += data.toString();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
await exec.exec(
|
||||
codeqlCmd, [
|
||||
'resolve',
|
||||
'queries',
|
||||
...config.additionalQueries,
|
||||
'--format=bylanguage'
|
||||
],
|
||||
options);
|
||||
|
||||
const resolveQueriesOutputObject = JSON.parse(resolveQueriesOutput);
|
||||
|
||||
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||
res[language] = Object.keys(<any>queries);
|
||||
}
|
||||
|
||||
const noDeclaredLanguage = resolveQueriesOutputObject.noDeclaredLanguage;
|
||||
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
|
||||
if (noDeclaredLanguageQueries.length !== 0) {
|
||||
core.warning('Some queries do not declare a language:\n' + noDeclaredLanguageQueries.join('\n'));
|
||||
}
|
||||
|
||||
const multipleDeclaredLanguages = resolveQueriesOutputObject.multipleDeclaredLanguages;
|
||||
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
|
||||
if (multipleDeclaredLanguagesQueries.length !== 0) {
|
||||
core.warning('Some queries declare multiple languages:\n' + multipleDeclaredLanguagesQueries.join('\n'));
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(codeqlCmd: string, databaseFolder: string, sarifFolder: string, config: configUtils.Config) {
|
||||
const queriesPerLanguage = await resolveQueryLanguages(codeqlCmd, config);
|
||||
|
||||
for (let database of fs.readdirSync(databaseFolder)) {
|
||||
core.startGroup('Analyzing ' + database);
|
||||
|
||||
const additionalQueries = queriesPerLanguage[database] || [];
|
||||
const sarifFile = path.join(sarifFolder, database + '.sarif');
|
||||
|
||||
await exec.exec(codeqlCmd, [
|
||||
'database',
|
||||
'analyze',
|
||||
path.join(databaseFolder, database),
|
||||
'--format=sarif-latest',
|
||||
'--output=' + sarifFile,
|
||||
'--no-sarif-add-snippets',
|
||||
database + '-code-scanning.qls',
|
||||
...additionalQueries,
|
||||
]);
|
||||
|
||||
core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"');
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
|
||||
async function run() {
|
||||
try {
|
||||
if (util.should_abort('finish', true) || !await util.reportActionStarting('finish')) {
|
||||
return;
|
||||
}
|
||||
const config = await configUtils.loadConfig();
|
||||
|
||||
core.exportVariable(sharedEnv.ODASA_TRACER_CONFIGURATION, '');
|
||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||
|
||||
const codeqlCmd = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_CMD);
|
||||
const databaseFolder = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_DATABASE_DIR);
|
||||
|
||||
const sarifFolder = core.getInput('output');
|
||||
await io.mkdirP(sarifFolder);
|
||||
|
||||
core.info('Finalizing database creation');
|
||||
await finalizeDatabaseCreation(codeqlCmd, databaseFolder);
|
||||
|
||||
await externalQueries.checkoutExternalQueries(config);
|
||||
|
||||
core.info('Analyzing database');
|
||||
await runQueries(codeqlCmd, databaseFolder, sarifFolder, config);
|
||||
|
||||
if ('true' === core.getInput('upload')) {
|
||||
await upload_lib.upload(sarifFolder);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('finish', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
|
||||
await util.reportActionSucceeded('finish');
|
||||
}
|
||||
|
||||
run().catch(e => {
|
||||
core.setFailed("analyze action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
180
src/fingerprints.test.ts
Normal file
180
src/fingerprints.test.ts
Normal file
|
|
@ -0,0 +1,180 @@
|
|||
import * as fs from 'fs';
|
||||
|
||||
import * as fingerprints from './fingerprints';
|
||||
|
||||
function testHash(input: string, expectedHashes: string[]) {
|
||||
let index = 0;
|
||||
let callback = function (lineNumber: number, hash: string) {
|
||||
expect(lineNumber).toBe(index + 1);
|
||||
expect(hash).toBe(expectedHashes[index]);
|
||||
index++;
|
||||
};
|
||||
fingerprints.hash(callback, input);
|
||||
expect(index).toBe(input.split(/\r\n|\r|\n/).length);
|
||||
}
|
||||
|
||||
test('hash', () => {
|
||||
// Try empty file
|
||||
testHash("", ["c129715d7a2bc9a3:1"]);
|
||||
|
||||
// Try various combinations of newline characters
|
||||
testHash(
|
||||
" a\nb\n \t\tc\n d",
|
||||
[
|
||||
"271789c17abda88f:1",
|
||||
"54703d4cd895b18:1",
|
||||
"180aee12dab6264:1",
|
||||
"a23a3dc5e078b07b:1"
|
||||
]);
|
||||
testHash(
|
||||
" hello; \t\nworld!!!\n\n\n \t\tGreetings\n End",
|
||||
[
|
||||
"8b7cf3e952e7aeb2:1",
|
||||
"b1ae1287ec4718d9:1",
|
||||
"bff680108adb0fcc:1",
|
||||
"c6805c5e1288b612:1",
|
||||
"b86d3392aea1be30:1",
|
||||
"e6ceba753e1a442:1",
|
||||
]);
|
||||
testHash(
|
||||
" hello; \t\nworld!!!\n\n\n \t\tGreetings\n End\n",
|
||||
[
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(
|
||||
" hello; \t\nworld!!!\r\r\r \t\tGreetings\r End\r",
|
||||
[
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(
|
||||
" hello; \t\r\nworld!!!\r\n\r\n\r\n \t\tGreetings\r\n End\r\n",
|
||||
[
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(
|
||||
" hello; \t\nworld!!!\r\n\n\r \t\tGreetings\r End\r\n",
|
||||
[
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
|
||||
// Try repeating line that will generate identical hashes
|
||||
testHash(
|
||||
"Lorem ipsum dolor sit amet.\n".repeat(10),
|
||||
[
|
||||
"a7f2ff13bc495cf2:1",
|
||||
"a7f2ff13bc495cf2:2",
|
||||
"a7f2ff13bc495cf2:3",
|
||||
"a7f2ff13bc495cf2:4",
|
||||
"a7f2ff13bc495cf2:5",
|
||||
"a7f2ff13bc495cf2:6",
|
||||
"a7f2ff1481e87703:1",
|
||||
"a9cf91f7bbf1862b:1",
|
||||
"55ec222b86bcae53:1",
|
||||
"cc97dc7b1d7d8f7b:1",
|
||||
"c129715d7a2bc9a3:1"
|
||||
]);
|
||||
});
|
||||
|
||||
function testResolveUriToFile(uri: any, index: any, artifactsURIs: any[]) {
|
||||
const location = { "uri": uri, "index": index };
|
||||
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
|
||||
return fingerprints.resolveUriToFile(location, artifacts);
|
||||
}
|
||||
|
||||
test('resolveUriToFile', () => {
|
||||
// The resolveUriToFile method checks that the file exists and is in the right directory
|
||||
// so we need to give it real files to look at. We will use this file as an example.
|
||||
// For this to work we require the current working directory to be a parent, but this
|
||||
// should generally always be the case so this is fine.
|
||||
const cwd = process.cwd();
|
||||
const filepath = __filename;
|
||||
expect(filepath.startsWith(cwd + '/')).toBeTruthy();
|
||||
const relativeFilepaht = filepath.substring(cwd.length + 1);
|
||||
|
||||
process.env['GITHUB_WORKSPACE'] = cwd;
|
||||
|
||||
// Absolute paths are unmodified
|
||||
expect(testResolveUriToFile(filepath, undefined, [])).toBe(filepath);
|
||||
expect(testResolveUriToFile('file://' + filepath, undefined, [])).toBe(filepath);
|
||||
|
||||
// Relative paths are made absolute
|
||||
expect(testResolveUriToFile(relativeFilepaht, undefined, [])).toBe(filepath);
|
||||
expect(testResolveUriToFile('file://' + relativeFilepaht, undefined, [])).toBe(filepath);
|
||||
|
||||
// Absolute paths outside the src root are discarded
|
||||
expect(testResolveUriToFile('/src/foo/bar.js', undefined, [])).toBe(undefined);
|
||||
expect(testResolveUriToFile('file:///src/foo/bar.js', undefined, [])).toBe(undefined);
|
||||
|
||||
// Other schemes are discarded
|
||||
expect(testResolveUriToFile('https://' + filepath, undefined, [])).toBe(undefined);
|
||||
expect(testResolveUriToFile('ftp://' + filepath, undefined, [])).toBe(undefined);
|
||||
|
||||
// Invalid URIs are discarded
|
||||
expect(testResolveUriToFile(1, undefined, [])).toBe(undefined);
|
||||
expect(testResolveUriToFile(undefined, undefined, [])).toBe(undefined);
|
||||
|
||||
// Non-existant files are discarded
|
||||
expect(testResolveUriToFile(filepath + '2', undefined, [])).toBe(undefined);
|
||||
|
||||
// Index is resolved
|
||||
expect(testResolveUriToFile(undefined, 0, [filepath])).toBe(filepath);
|
||||
expect(testResolveUriToFile(undefined, 1, ['foo', filepath])).toBe(filepath);
|
||||
|
||||
// Invalid indexes are discarded
|
||||
expect(testResolveUriToFile(undefined, 1, [filepath])).toBe(undefined);
|
||||
expect(testResolveUriToFile(undefined, '0', [filepath])).toBe(undefined);
|
||||
});
|
||||
|
||||
test('addFingerprints', () => {
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs.readFileSync(__dirname + '/testdata/fingerprinting.input.sarif').toString();
|
||||
let expected = fs.readFileSync(__dirname + '/testdata/fingerprinting.expected.sarif').toString();
|
||||
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = __dirname + '/testdata';
|
||||
|
||||
expect(fingerprints.addFingerprints(input)).toBe(expected);
|
||||
});
|
||||
|
||||
test('missingRegions', () => {
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs.readFileSync(__dirname + '/testdata/fingerprinting2.input.sarif').toString();
|
||||
let expected = fs.readFileSync(__dirname + '/testdata/fingerprinting2.expected.sarif').toString();
|
||||
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = __dirname + '/testdata';
|
||||
|
||||
expect(fingerprints.addFingerprints(input)).toBe(expected);
|
||||
});
|
||||
260
src/fingerprints.ts
Normal file
260
src/fingerprints.ts
Normal file
|
|
@ -0,0 +1,260 @@
|
|||
import * as core from '@actions/core';
|
||||
import * as fs from 'fs';
|
||||
import Long from 'long';
|
||||
|
||||
const tab = '\t'.charCodeAt(0);
|
||||
const space = ' '.charCodeAt(0);
|
||||
const lf = '\n'.charCodeAt(0);
|
||||
const cr = '\r'.charCodeAt(0);
|
||||
const BLOCK_SIZE = 100;
|
||||
const MOD = Long.fromInt(37); // L
|
||||
|
||||
// Compute the starting point for the hash mod
|
||||
function computeFirstMod(): Long {
|
||||
let firstMod = Long.ONE; // L
|
||||
for (let i = 0; i < BLOCK_SIZE; i++) {
|
||||
firstMod = firstMod.multiply(MOD);
|
||||
}
|
||||
return firstMod;
|
||||
}
|
||||
|
||||
// Type signature of callback passed to hash function.
|
||||
// Will be called with the line number (1-based) and hash for every line.
|
||||
type hashCallback = (lineNumber: number, hash: string) => void;
|
||||
|
||||
/**
|
||||
* Hash the contents of a file
|
||||
*
|
||||
* The hash method computes a rolling hash for every line in the input. The hash is computed using the first
|
||||
* BLOCK_SIZE non-space/tab characters counted from the start of the line. For the computation of the hash all
|
||||
* line endings (i.e. \r, \n, and \r\n) are normalized to '\n'. A special value (-1) is added at the end of the
|
||||
* file followed by enough '\0' characters to ensure that there are BLOCK_SIZE characters available for computing
|
||||
* the hashes of the lines near the end of the file.
|
||||
*
|
||||
* @param callback function that is called with the line number (1-based) and hash for every line
|
||||
* @param input The file's contents
|
||||
*/
|
||||
export function hash(callback: hashCallback, input: string) {
|
||||
// A rolling view in to the input
|
||||
const window = Array(BLOCK_SIZE).fill(0);
|
||||
|
||||
// If the character in the window is the start of a new line
|
||||
// then records the line number, otherwise will be -1.
|
||||
// Indexes match up with those from the window variable.
|
||||
const lineNumbers = Array(BLOCK_SIZE).fill(-1);
|
||||
|
||||
// The current hash value, updated as we read each character
|
||||
let hash = Long.ZERO;
|
||||
let firstMod = computeFirstMod();
|
||||
|
||||
// The current index in the window, will wrap around to zero when we reach BLOCK_SIZE
|
||||
let index = 0;
|
||||
// The line number of the character we are currently processing from the input
|
||||
let lineNumber = 0;
|
||||
// Is the next character to be read the start of a new line
|
||||
let lineStart = true;
|
||||
// Was the previous character a CR (carriage return)
|
||||
let prevCR = false;
|
||||
// A map of hashes we've seen before and how many times,
|
||||
// so we can disambiguate identical hashes
|
||||
const hashCounts: { [hashValue: string]: number } = {};
|
||||
|
||||
// Output the current hash and line number to the callback function
|
||||
const outputHash = function () {
|
||||
let hashValue = hash.toUnsigned().toString(16);
|
||||
if (!hashCounts[hashValue]) {
|
||||
hashCounts[hashValue] = 0;
|
||||
}
|
||||
hashCounts[hashValue]++;
|
||||
callback(lineNumbers[index], hashValue + ":" + hashCounts[hashValue]);
|
||||
lineNumbers[index] = -1;
|
||||
};
|
||||
|
||||
// Update the current hash value and increment the index in the window
|
||||
const updateHash = function (current: number) {
|
||||
const begin = window[index];
|
||||
window[index] = current;
|
||||
hash = MOD.multiply(hash)
|
||||
.add(Long.fromInt(current))
|
||||
.subtract(firstMod.multiply(Long.fromInt(begin)));
|
||||
|
||||
index = (index + 1) % BLOCK_SIZE;
|
||||
};
|
||||
|
||||
// First process every character in the input, updating the hash and lineNumbers
|
||||
// as we go. Once we reach a point in the window again then we've processed
|
||||
// BLOCK_SIZE characters and if the last character at this point in the window
|
||||
// was the start of a line then we should output the hash for that line.
|
||||
for (let i = 0, len = input.length; i <= len; i++) {
|
||||
let current = i === len ? 65535 : input.charCodeAt(i);
|
||||
// skip tabs, spaces, and line feeds that come directly after a carriage return
|
||||
if (current === space || current === tab || (prevCR && current === lf)) {
|
||||
prevCR = false;
|
||||
continue;
|
||||
}
|
||||
// replace CR with LF
|
||||
if (current === cr) {
|
||||
current = lf;
|
||||
prevCR = true;
|
||||
} else {
|
||||
prevCR = false;
|
||||
}
|
||||
if (lineNumbers[index] !== -1) {
|
||||
outputHash();
|
||||
}
|
||||
if (lineStart) {
|
||||
lineStart = false;
|
||||
lineNumber++;
|
||||
lineNumbers[index] = lineNumber;
|
||||
}
|
||||
if (current === lf) {
|
||||
lineStart = true;
|
||||
}
|
||||
updateHash(current);
|
||||
}
|
||||
|
||||
// Flush the remaining lines
|
||||
for (let i = 0; i < BLOCK_SIZE; i++) {
|
||||
if (lineNumbers[index] !== -1) {
|
||||
outputHash();
|
||||
}
|
||||
updateHash(0);
|
||||
}
|
||||
}
|
||||
|
||||
// Generate a hash callback function that updates the given result in-place
|
||||
// when it recieves a hash for the correct line number. Ignores hashes for other lines.
|
||||
function locationUpdateCallback(result: any, location: any): hashCallback {
|
||||
let locationStartLine = location.physicalLocation?.region?.startLine;
|
||||
if (locationStartLine === undefined) {
|
||||
// We expect the region section to be present, but it can be absent if the
|
||||
// alert pertains to the entire file. In this case, we compute the fingerprint
|
||||
// using the hash of the first line of the file.
|
||||
locationStartLine = 1;
|
||||
}
|
||||
return function (lineNumber: number, hash: string) {
|
||||
// Ignore hashes for lines that don't concern us
|
||||
if (locationStartLine !== lineNumber) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!result.partialFingerprints) {
|
||||
result.partialFingerprints = {};
|
||||
}
|
||||
const existingFingerprint = result.partialFingerprints.primaryLocationLineHash;
|
||||
|
||||
// If the hash doesn't match the existing fingerprint then
|
||||
// output a warning and don't overwrite it.
|
||||
if (!existingFingerprint) {
|
||||
result.partialFingerprints.primaryLocationLineHash = hash;
|
||||
} else if (existingFingerprint !== hash) {
|
||||
core.warning("Calculated fingerprint of " + hash +
|
||||
" for file " + location.physicalLocation.artifactLocation.uri +
|
||||
" line " + lineNumber +
|
||||
", but found existing inconsistent fingerprint value " + existingFingerprint);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Can we fingerprint the given location. This requires access to
|
||||
// the source file so we can hash it.
|
||||
// If possible returns a absolute file path for the source file,
|
||||
// or if not possible then returns undefined.
|
||||
export function resolveUriToFile(location: any, artifacts: any[]): string | undefined {
|
||||
// This may be referencing an artifact
|
||||
if (!location.uri && location.index !== undefined) {
|
||||
if (typeof location.index !== 'number' ||
|
||||
location.index < 0 ||
|
||||
location.index >= artifacts.length ||
|
||||
typeof artifacts[location.index].location !== 'object') {
|
||||
core.debug('Ignoring location as index "' + location.index + '" is invalid');
|
||||
return undefined;
|
||||
}
|
||||
location = artifacts[location.index].location;
|
||||
}
|
||||
|
||||
// Get the URI and decode
|
||||
if (typeof location.uri !== 'string') {
|
||||
core.debug('Ignoring location as uri "' + location.uri + '" is invalid');
|
||||
return undefined;
|
||||
}
|
||||
let uri = decodeURIComponent(location.uri);
|
||||
|
||||
// Remove a file scheme, and abort if the scheme is anything else
|
||||
const fileUriPrefix = 'file://';
|
||||
if (uri.startsWith(fileUriPrefix)) {
|
||||
uri = uri.substring(fileUriPrefix.length);
|
||||
}
|
||||
if (uri.indexOf('://') !== -1) {
|
||||
core.debug('Ignoring location URI "' + uri + "' as the scheme is not recognised");
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Discard any absolute paths that aren't in the src root
|
||||
const srcRootPrefix = process.env['GITHUB_WORKSPACE'] + '/';
|
||||
if (uri.startsWith('/') && !uri.startsWith(srcRootPrefix)) {
|
||||
core.debug('Ignoring location URI "' + uri + "' as it is outside of the src root");
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Just assume a relative path is relative to the src root.
|
||||
// This is not necessarily true but should be a good approximation
|
||||
// and here we likely want to err on the side of handling more cases.
|
||||
if (!uri.startsWith('/')) {
|
||||
uri = srcRootPrefix + uri;
|
||||
}
|
||||
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(uri)) {
|
||||
core.debug("Unable to compute fingerprint for non-existent file: " + uri);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return uri;
|
||||
}
|
||||
|
||||
// Compute fingerprints for results in the given sarif file
|
||||
// and return an updated sarif file contents.
|
||||
export function addFingerprints(sarifContents: string): string {
|
||||
let sarif = JSON.parse(sarifContents);
|
||||
|
||||
// Gather together results for the same file and construct
|
||||
// callbacks to accept hashes for that file and update the location
|
||||
const callbacksByFile: { [filename: string]: hashCallback[] } = {};
|
||||
for (const run of sarif.runs || []) {
|
||||
// We may need the list of artifacts to resolve against
|
||||
let artifacts = run.artifacts || [];
|
||||
|
||||
for (const result of run.results || []) {
|
||||
// Check the primary location is defined correctly and is in the src root
|
||||
const primaryLocation = (result.locations || [])[0];
|
||||
if (!primaryLocation ||
|
||||
!primaryLocation.physicalLocation ||
|
||||
!primaryLocation.physicalLocation.artifactLocation) {
|
||||
core.debug("Unable to compute fingerprint for invalid location: " + JSON.stringify(primaryLocation));
|
||||
continue;
|
||||
}
|
||||
|
||||
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts);
|
||||
if (!filepath) {
|
||||
continue;
|
||||
}
|
||||
if (!callbacksByFile[filepath]) {
|
||||
callbacksByFile[filepath] = [];
|
||||
}
|
||||
callbacksByFile[filepath].push(locationUpdateCallback(result, primaryLocation));
|
||||
}
|
||||
}
|
||||
|
||||
// Now hash each file that was found
|
||||
Object.entries(callbacksByFile).forEach(([filepath, callbacks]) => {
|
||||
// A callback that forwards the hash to all other callbacks for that file
|
||||
const teeCallback = function (lineNumber: number, hash: string) {
|
||||
Object.values(callbacks).forEach(c => c(lineNumber, hash));
|
||||
};
|
||||
const fileContents = fs.readFileSync(filepath).toString();
|
||||
hash(teeCallback, fileContents);
|
||||
});
|
||||
|
||||
return JSON.stringify(sarif);
|
||||
}
|
||||
9
src/inject-tracer.ps1
Normal file
9
src/inject-tracer.ps1
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
Param(
|
||||
[Parameter(Position=0)]
|
||||
[String]
|
||||
$tracer
|
||||
)
|
||||
Get-Process -Name Runner.Worker
|
||||
$process=Get-Process -Name Runner.Worker
|
||||
$id=$process.Id
|
||||
Invoke-Expression "&$tracer --inject=$id"
|
||||
44
src/setup-tools.ts
Normal file
44
src/setup-tools.ts
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
import * as core from '@actions/core';
|
||||
import * as toolcache from '@actions/tool-cache';
|
||||
import * as path from 'path';
|
||||
|
||||
export class CodeQLSetup {
|
||||
public dist: string;
|
||||
public tools: string;
|
||||
public cmd: string;
|
||||
public platform: string;
|
||||
|
||||
constructor(codeqlDist: string) {
|
||||
this.dist = codeqlDist;
|
||||
this.tools = path.join(this.dist, 'tools');
|
||||
this.cmd = path.join(codeqlDist, 'codeql');
|
||||
// TODO check process.arch ?
|
||||
if (process.platform === 'win32') {
|
||||
this.platform = 'win64';
|
||||
if (this.cmd.endsWith('codeql')) {
|
||||
this.cmd += ".cmd";
|
||||
}
|
||||
} else if (process.platform === 'linux') {
|
||||
this.platform = 'linux64';
|
||||
} else if (process.platform === 'darwin') {
|
||||
this.platform = 'osx64';
|
||||
} else {
|
||||
throw new Error("Unsupported plaform: " + process.platform);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function setupCodeQL(): Promise<CodeQLSetup> {
|
||||
const version = '1.0.0';
|
||||
const codeqlURL = core.getInput('tools', { required: true });
|
||||
|
||||
let codeqlFolder = toolcache.find('CodeQL', version);
|
||||
if (codeqlFolder) {
|
||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
} else {
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', version);
|
||||
}
|
||||
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
|
||||
}
|
||||
248
src/setup-tracer.ts
Normal file
248
src/setup-tracer.ts
Normal file
|
|
@ -0,0 +1,248 @@
|
|||
import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as io from '@actions/io';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as analysisPaths from './analysis-paths';
|
||||
import * as configUtils from './config-utils';
|
||||
import * as setuptools from './setup-tools';
|
||||
import * as sharedEnv from './shared-environment';
|
||||
import * as util from './util';
|
||||
|
||||
type TracerConfig = {
|
||||
spec: string;
|
||||
env: { [key: string]: string };
|
||||
};
|
||||
|
||||
const CRITICAL_TRACER_VARS = new Set(
|
||||
['SEMMLE_PRELOAD_libtrace',
|
||||
, 'SEMMLE_RUNNER',
|
||||
, 'SEMMLE_COPY_EXECUTABLES_ROOT',
|
||||
, 'SEMMLE_DEPTRACE_SOCKET',
|
||||
, 'SEMMLE_JAVA_TOOL_OPTIONS'
|
||||
]);
|
||||
|
||||
async function tracerConfig(
|
||||
codeql: setuptools.CodeQLSetup,
|
||||
database: string,
|
||||
compilerSpec?: string): Promise<TracerConfig> {
|
||||
|
||||
const compilerSpecArg = compilerSpec ? ["--compiler-spec=" + compilerSpec] : [];
|
||||
|
||||
let envFile = path.resolve(database, 'working', 'env.tmp');
|
||||
await exec.exec(codeql.cmd, ['database', 'trace-command', database,
|
||||
...compilerSpecArg,
|
||||
process.execPath, path.resolve(__dirname, 'tracer-env.js'), envFile]
|
||||
);
|
||||
|
||||
const env: { [key: string]: string } = JSON.parse(fs.readFileSync(envFile, 'utf-8'));
|
||||
|
||||
const config = env['ODASA_TRACER_CONFIGURATION'];
|
||||
const info: TracerConfig = { spec: config, env: {} };
|
||||
|
||||
// Extract critical tracer variables from the environment
|
||||
for (let entry of Object.entries(env)) {
|
||||
const key = entry[0];
|
||||
const value = entry[1];
|
||||
// skip ODASA_TRACER_CONFIGURATION as it is handled separately
|
||||
if (key === 'ODASA_TRACER_CONFIGURATION') {
|
||||
continue;
|
||||
}
|
||||
// skip undefined values
|
||||
if (typeof value === 'undefined') {
|
||||
continue;
|
||||
}
|
||||
// Keep variables that do not exist in current environment. In addition always keep
|
||||
// critical and CODEQL_ variables
|
||||
if (typeof process.env[key] === 'undefined' || CRITICAL_TRACER_VARS.has(key) || key.startsWith('CODEQL_')) {
|
||||
info.env[key] = value;
|
||||
}
|
||||
}
|
||||
return info;
|
||||
}
|
||||
|
||||
function concatTracerConfigs(configs: { [lang: string]: TracerConfig }): TracerConfig {
|
||||
// A tracer config is a map containing additional environment variables and a tracer 'spec' file.
|
||||
// A tracer 'spec' file has the following format [log_file, number_of_blocks, blocks_text]
|
||||
|
||||
// Merge the environments
|
||||
const env: { [key: string]: string; } = {};
|
||||
let copyExecutables = false;
|
||||
let envSize = 0;
|
||||
for (let v of Object.values(configs)) {
|
||||
for (let e of Object.entries(v.env)) {
|
||||
const name = e[0];
|
||||
const value = e[1];
|
||||
// skip SEMMLE_COPY_EXECUTABLES_ROOT as it is handled separately
|
||||
if (name === 'SEMMLE_COPY_EXECUTABLES_ROOT') {
|
||||
copyExecutables = true;
|
||||
} else if (name in env) {
|
||||
if (env[name] !== value) {
|
||||
throw Error('Incompatible values in environment parameter ' +
|
||||
name + ': ' + env[name] + ' and ' + value);
|
||||
}
|
||||
} else {
|
||||
env[name] = value;
|
||||
envSize += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Concatenate spec files into a new spec file
|
||||
let languages = Object.keys(configs);
|
||||
const cppIndex = languages.indexOf('cpp');
|
||||
// Make sure cpp is the last language, if it's present since it must be concatenated last
|
||||
if (cppIndex !== -1) {
|
||||
let lastLang = languages[languages.length - 1];
|
||||
languages[languages.length - 1] = languages[cppIndex];
|
||||
languages[cppIndex] = lastLang;
|
||||
}
|
||||
|
||||
let totalLines: string[] = [];
|
||||
let totalCount = 0;
|
||||
for (let lang of languages) {
|
||||
const lines = fs.readFileSync(configs[lang].spec, 'utf8').split(/\r?\n/);
|
||||
const count = parseInt(lines[1], 10);
|
||||
totalCount += count;
|
||||
totalLines.push(...lines.slice(2));
|
||||
}
|
||||
|
||||
const newLogFilePath = path.resolve(util.workspaceFolder(), 'compound-build-tracer.log');
|
||||
const spec = path.resolve(util.workspaceFolder(), 'compound-spec');
|
||||
const tempFolder = path.resolve(util.workspaceFolder(), 'compound-temp');
|
||||
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
|
||||
|
||||
if (copyExecutables) {
|
||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = tempFolder;
|
||||
envSize += 1;
|
||||
}
|
||||
|
||||
fs.writeFileSync(spec, newSpecContent.join('\n'));
|
||||
|
||||
// Prepare the content of the compound environment file
|
||||
let buffer = Buffer.alloc(4);
|
||||
buffer.writeInt32LE(envSize, 0);
|
||||
for (let e of Object.entries(env)) {
|
||||
const key = e[0];
|
||||
const value = e[1];
|
||||
const lineBuffer = new Buffer(key + '=' + value + '\0', 'utf8');
|
||||
const sizeBuffer = Buffer.alloc(4);
|
||||
sizeBuffer.writeInt32LE(lineBuffer.length, 0);
|
||||
buffer = Buffer.concat([buffer, sizeBuffer, lineBuffer]);
|
||||
}
|
||||
// Write the compound environment
|
||||
const envPath = spec + '.environment';
|
||||
fs.writeFileSync(envPath, buffer);
|
||||
|
||||
return { env, spec };
|
||||
}
|
||||
|
||||
|
||||
|
||||
async function run() {
|
||||
try {
|
||||
if (util.should_abort('init', false) || !await util.reportActionStarting('init')) {
|
||||
return;
|
||||
}
|
||||
|
||||
// The config file MUST be parsed in the init action
|
||||
const config = await configUtils.loadConfig();
|
||||
|
||||
core.startGroup('Load language configuration');
|
||||
|
||||
const languages = await util.getLanguages();
|
||||
// If the languages parameter was not given and no languages were
|
||||
// detected then fail here as this is a workflow configuration error.
|
||||
if (languages.length === 0) {
|
||||
core.setFailed("Did not detect any languages to analyze. Please update input in workflow.");
|
||||
return;
|
||||
}
|
||||
|
||||
core.endGroup();
|
||||
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, languages);
|
||||
|
||||
const sourceRoot = path.resolve();
|
||||
|
||||
core.startGroup('Setup CodeQL tools');
|
||||
const codeqlSetup = await setuptools.setupCodeQL();
|
||||
await exec.exec(codeqlSetup.cmd, ['version', '--format=json']);
|
||||
core.endGroup();
|
||||
|
||||
// Forward Go flags
|
||||
const goFlags = process.env['GOFLAGS'];
|
||||
if (goFlags) {
|
||||
core.exportVariable('GOFLAGS', goFlags);
|
||||
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
|
||||
}
|
||||
|
||||
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
||||
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
|
||||
core.exportVariable('CODEQL_RAM', codeqlRam);
|
||||
|
||||
const databaseFolder = path.resolve(util.workspaceFolder(), 'codeql_databases');
|
||||
await io.mkdirP(databaseFolder);
|
||||
|
||||
let tracedLanguages: { [key: string]: TracerConfig } = {};
|
||||
let scannedLanguages: string[] = [];
|
||||
|
||||
// TODO: replace this code once CodeQL supports multi-language tracing
|
||||
for (let language of languages) {
|
||||
const languageDatabase = path.join(databaseFolder, language);
|
||||
|
||||
// Init language database
|
||||
await exec.exec(codeqlSetup.cmd, ['database', 'init', languageDatabase, '--language=' + language, '--source-root=' + sourceRoot]);
|
||||
// TODO: add better detection of 'traced languages' instead of using a hard coded list
|
||||
if (['cpp', 'java', 'csharp'].includes(language)) {
|
||||
const config: TracerConfig = await tracerConfig(codeqlSetup, languageDatabase);
|
||||
tracedLanguages[language] = config;
|
||||
} else {
|
||||
scannedLanguages.push(language);
|
||||
}
|
||||
}
|
||||
const tracedLanguageKeys = Object.keys(tracedLanguages);
|
||||
if (tracedLanguageKeys.length > 0) {
|
||||
const mainTracerConfig = concatTracerConfigs(tracedLanguages);
|
||||
if (mainTracerConfig.spec) {
|
||||
for (let entry of Object.entries(mainTracerConfig.env)) {
|
||||
core.exportVariable(entry[0], entry[1]);
|
||||
}
|
||||
|
||||
core.exportVariable('ODASA_TRACER_CONFIGURATION', mainTracerConfig.spec);
|
||||
if (process.platform === 'darwin') {
|
||||
core.exportVariable(
|
||||
'DYLD_INSERT_LIBRARIES',
|
||||
path.join(codeqlSetup.tools, 'osx64', 'libtrace.dylib'));
|
||||
} else if (process.platform === 'win32') {
|
||||
await exec.exec(
|
||||
'powershell',
|
||||
[path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
|
||||
path.resolve(codeqlSetup.tools, 'win64', 'tracer.exe')],
|
||||
{ env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
|
||||
} else {
|
||||
core.exportVariable('LD_PRELOAD', path.join(codeqlSetup.tools, 'linux64', '${LIB}trace.so'));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES, scannedLanguages.join(','));
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES, tracedLanguageKeys.join(','));
|
||||
|
||||
// TODO: make this a "private" environment variable of the action
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_DATABASE_DIR, databaseFolder);
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_CMD, codeqlSetup.cmd);
|
||||
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('init', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||
await util.reportActionSucceeded('init');
|
||||
}
|
||||
|
||||
run().catch(e => {
|
||||
core.setFailed("init action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
14
src/shared-environment.ts
Normal file
14
src/shared-environment.ts
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
export const CODEQL_ACTION_CMD = 'CODEQL_ACTION_CMD';
|
||||
export const CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR';
|
||||
export const CODEQL_ACTION_LANGUAGES = 'CODEQL_ACTION_LANGUAGES';
|
||||
export const ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
|
||||
export const CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES';
|
||||
export const CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
||||
// The time at which the first action (normally init) started executing.
|
||||
// If a workflow invokes a different action without first invoking the init
|
||||
// action (i.e. the upload action is being used by a third-party integrator)
|
||||
// then this variable will be assigned the start time of the action invoked
|
||||
// rather that the init action.
|
||||
export const CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT';
|
||||
// Populated when the init action completes successfully
|
||||
export const CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
|
||||
92
src/testdata/fingerprinting.expected.sarif
vendored
Normal file
92
src/testdata/fingerprinting.expected.sarif
vendored
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
{
|
||||
"$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",
|
||||
"version": "2.1.0",
|
||||
"runs": [
|
||||
{
|
||||
"tool": {
|
||||
"driver": {
|
||||
"name": "CodeQL command-line toolchain",
|
||||
"organization": "GitHub",
|
||||
"semanticVersion": "2.0.0",
|
||||
"rules": []
|
||||
}
|
||||
},
|
||||
"artifacts": [
|
||||
{
|
||||
"location": {
|
||||
"uri": "testFile1.js",
|
||||
"uriBaseId": "%SRCROOT%",
|
||||
"index": 0
|
||||
}
|
||||
},
|
||||
{
|
||||
"location": {
|
||||
"uri": "testFile2.js",
|
||||
"uriBaseId": "%SRCROOT%",
|
||||
"index": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"results": [
|
||||
{
|
||||
"ruleId": "js/unused-local-variable",
|
||||
"ruleIndex": 0,
|
||||
"message": {
|
||||
"text": "Unused variable a."
|
||||
},
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "testFile1.js",
|
||||
"uriBaseId": "%SRCROOT%",
|
||||
"index": 0
|
||||
},
|
||||
"region": {
|
||||
"startLine": 1,
|
||||
"startColumn": 1,
|
||||
"endColumn": 10
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"partialFingerprints": {
|
||||
"primaryLocationLineHash": "5e4d5a9cf1294ad9:1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"ruleId": "js/unused-local-variable",
|
||||
"ruleIndex": 0,
|
||||
"message": {
|
||||
"text": "Unused variable bar."
|
||||
},
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"index": 1
|
||||
},
|
||||
"region": {
|
||||
"startLine": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"partialFingerprints": {
|
||||
"primaryLocationLineHash": "f7592a95a9381ac0:1"
|
||||
}
|
||||
}
|
||||
],
|
||||
"newlineSequences": [
|
||||
"\r\n",
|
||||
"\n",
|
||||
"
",
|
||||
"
"
|
||||
],
|
||||
"columnKind": "utf16CodeUnits",
|
||||
"properties": {
|
||||
"semmle.formatSpecifier": "sarif-latest"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
86
src/testdata/fingerprinting.input.sarif
vendored
Normal file
86
src/testdata/fingerprinting.input.sarif
vendored
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
{
|
||||
"$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",
|
||||
"version": "2.1.0",
|
||||
"runs": [
|
||||
{
|
||||
"tool": {
|
||||
"driver": {
|
||||
"name": "CodeQL command-line toolchain",
|
||||
"organization": "GitHub",
|
||||
"semanticVersion": "2.0.0",
|
||||
"rules": []
|
||||
}
|
||||
},
|
||||
"artifacts": [
|
||||
{
|
||||
"location": {
|
||||
"uri": "testFile1.js",
|
||||
"uriBaseId": "%SRCROOT%",
|
||||
"index": 0
|
||||
}
|
||||
},
|
||||
{
|
||||
"location": {
|
||||
"uri": "testFile2.js",
|
||||
"uriBaseId": "%SRCROOT%",
|
||||
"index": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"results": [
|
||||
{
|
||||
"ruleId": "js/unused-local-variable",
|
||||
"ruleIndex": 0,
|
||||
"message": {
|
||||
"text": "Unused variable a."
|
||||
},
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": "testFile1.js",
|
||||
"uriBaseId": "%SRCROOT%",
|
||||
"index": 0
|
||||
},
|
||||
"region": {
|
||||
"startLine": 1,
|
||||
"startColumn": 1,
|
||||
"endColumn": 10
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"ruleId": "js/unused-local-variable",
|
||||
"ruleIndex": 0,
|
||||
"message": {
|
||||
"text": "Unused variable bar."
|
||||
},
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"index": 1
|
||||
},
|
||||
"region": {
|
||||
"startLine": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"newlineSequences": [
|
||||
"\r\n",
|
||||
"\n",
|
||||
"
",
|
||||
"
"
|
||||
],
|
||||
"columnKind": "utf16CodeUnits",
|
||||
"properties": {
|
||||
"semmle.formatSpecifier": "sarif-latest"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
77
src/testdata/fingerprinting2.expected.sarif
vendored
Normal file
77
src/testdata/fingerprinting2.expected.sarif
vendored
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
{
|
||||
"runs": [
|
||||
{
|
||||
"artifacts": [
|
||||
{
|
||||
"location": {
|
||||
"index": 0,
|
||||
"uri": "src/ex_cmds.h",
|
||||
"uriBaseId": "%SRCROOT%"
|
||||
}
|
||||
}
|
||||
],
|
||||
"columnKind": "utf16CodeUnits",
|
||||
"properties": {
|
||||
"semmle.formatSpecifier": "sarif-latest"
|
||||
},
|
||||
"results": [
|
||||
{
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"index": 0,
|
||||
"uri": "no_header_guard.h",
|
||||
"uriBaseId": "%SRCROOT%"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": {
|
||||
"text": "This header file should contain a header guard to prevent multiple inclusion."
|
||||
},
|
||||
"partialFingerprints": {
|
||||
"primaryLocationLineHash": "599c824c91d0f75e:1"
|
||||
},
|
||||
"ruleId": "cpp/missing-header-guard",
|
||||
"ruleIndex": 0
|
||||
}
|
||||
],
|
||||
"tool": {
|
||||
"driver": {
|
||||
"name": "CodeQL command-line toolchain",
|
||||
"organization": "GitHub",
|
||||
"rules": [
|
||||
{
|
||||
"defaultConfiguration": {},
|
||||
"fullDescription": {
|
||||
"text": "Header files should contain header guards (#defines to prevent the file from being included twice). This prevents errors and inefficiencies caused by repeated inclusion."
|
||||
},
|
||||
"id": "cpp/missing-header-guard",
|
||||
"name": "cpp/missing-header-guard",
|
||||
"properties": {
|
||||
"description": "Header files should contain header guards (#defines to prevent\n the file from being included twice). This prevents errors and\n inefficiencies caused by repeated inclusion.",
|
||||
"id": "cpp/missing-header-guard",
|
||||
"kind": "problem",
|
||||
"name": "Missing header guard",
|
||||
"precision": "high",
|
||||
"problem.severity": "warning",
|
||||
"tags": [
|
||||
"efficiency",
|
||||
"maintainability",
|
||||
"modularity",
|
||||
"external/jsf"
|
||||
]
|
||||
},
|
||||
"shortDescription": {
|
||||
"text": "Missing header guard"
|
||||
}
|
||||
}
|
||||
],
|
||||
"semanticVersion": "2.0.0+202002031536"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"version": "2.1.0"
|
||||
}
|
||||
76
src/testdata/fingerprinting2.input.sarif
vendored
Normal file
76
src/testdata/fingerprinting2.input.sarif
vendored
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
{
|
||||
"runs": [
|
||||
{
|
||||
"artifacts": [
|
||||
{
|
||||
"location": {
|
||||
"index": 0,
|
||||
"uri": "src/ex_cmds.h",
|
||||
"uriBaseId": "%SRCROOT%"
|
||||
}
|
||||
}
|
||||
],
|
||||
"columnKind": "utf16CodeUnits",
|
||||
"properties": {
|
||||
"semmle.formatSpecifier": "sarif-latest"
|
||||
},
|
||||
"results": [
|
||||
{
|
||||
"locations": [
|
||||
{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"index": 0,
|
||||
"uri": "no_header_guard.h",
|
||||
"uriBaseId": "%SRCROOT%"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"message": {
|
||||
"text": "This header file should contain a header guard to prevent multiple inclusion."
|
||||
},
|
||||
"partialFingerprints": {
|
||||
},
|
||||
"ruleId": "cpp/missing-header-guard",
|
||||
"ruleIndex": 0
|
||||
}
|
||||
],
|
||||
"tool": {
|
||||
"driver": {
|
||||
"name": "CodeQL command-line toolchain",
|
||||
"organization": "GitHub",
|
||||
"rules": [
|
||||
{
|
||||
"defaultConfiguration": {},
|
||||
"fullDescription": {
|
||||
"text": "Header files should contain header guards (#defines to prevent the file from being included twice). This prevents errors and inefficiencies caused by repeated inclusion."
|
||||
},
|
||||
"id": "cpp/missing-header-guard",
|
||||
"name": "cpp/missing-header-guard",
|
||||
"properties": {
|
||||
"description": "Header files should contain header guards (#defines to prevent\n the file from being included twice). This prevents errors and\n inefficiencies caused by repeated inclusion.",
|
||||
"id": "cpp/missing-header-guard",
|
||||
"kind": "problem",
|
||||
"name": "Missing header guard",
|
||||
"precision": "high",
|
||||
"problem.severity": "warning",
|
||||
"tags": [
|
||||
"efficiency",
|
||||
"maintainability",
|
||||
"modularity",
|
||||
"external/jsf"
|
||||
]
|
||||
},
|
||||
"shortDescription": {
|
||||
"text": "Missing header guard"
|
||||
}
|
||||
}
|
||||
],
|
||||
"semanticVersion": "2.0.0+202002031536"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"version": "2.1.0"
|
||||
}
|
||||
5
src/testdata/no_header_guard.h
vendored
Normal file
5
src/testdata/no_header_guard.h
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
/*
|
||||
* Blah blah
|
||||
*/
|
||||
|
||||
#define BLAH 1234
|
||||
4
src/testdata/testFile1.js
vendored
Normal file
4
src/testdata/testFile1.js
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
var a = 0;
|
||||
var b = 0;
|
||||
var c = 0;
|
||||
var d = 0;
|
||||
4
src/testdata/testFile2.js
vendored
Normal file
4
src/testdata/testFile2.js
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
var foo = 0;
|
||||
var bar = 0;
|
||||
var baz = 0;
|
||||
var qux = 0;
|
||||
13
src/tracer-env.ts
Normal file
13
src/tracer-env.ts
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
import * as fs from 'fs';
|
||||
|
||||
const env = {};
|
||||
for (let entry of Object.entries(process.env)) {
|
||||
const key = entry[0];
|
||||
const value = entry[1];
|
||||
if (typeof value !== 'undefined' && key !== '_' && !key.startsWith('JAVA_MAIN_CLASS_')) {
|
||||
env[key] = value;
|
||||
}
|
||||
}
|
||||
process.stdout.write(process.argv[2]);
|
||||
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');
|
||||
|
||||
139
src/upload-lib.ts
Normal file
139
src/upload-lib.ts
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
import * as core from '@actions/core';
|
||||
import * as http from '@actions/http-client';
|
||||
import * as auth from '@actions/http-client/auth';
|
||||
import * as io from '@actions/io';
|
||||
import fileUrl from 'file-url';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import zlib from 'zlib';
|
||||
|
||||
import * as fingerprints from './fingerprints';
|
||||
import * as sharedEnv from './shared-environment';
|
||||
import * as util from './util';
|
||||
|
||||
// Construct the location of the sentinel file for detecting multiple uploads.
|
||||
// The returned location should be writable.
|
||||
async function getSentinelFilePath(): Promise<string> {
|
||||
// Use the temp dir instead of placing next to the sarif file because of
|
||||
// issues with docker actions. The directory containing the sarif file
|
||||
// may not be writable by us.
|
||||
const uploadsTmpDir = path.join(process.env['RUNNER_TEMP'] || '/tmp/codeql-action', 'uploads');
|
||||
await io.mkdirP(uploadsTmpDir);
|
||||
// Hash the absolute path so we'll behave correctly in the unlikely
|
||||
// scenario a file is referenced twice with different paths.
|
||||
return path.join(uploadsTmpDir, 'codeql-action-upload-sentinel');
|
||||
}
|
||||
|
||||
// Takes a list of paths to sarif files and combines them together,
|
||||
// returning the contents of the combined sarif file.
|
||||
export function combineSarifFiles(sarifFiles: string[]): string {
|
||||
let combinedSarif = {
|
||||
version: null,
|
||||
runs: [] as any[]
|
||||
};
|
||||
|
||||
for (let sarifFile of sarifFiles) {
|
||||
let sarifObject = JSON.parse(fs.readFileSync(sarifFile, 'utf8'));
|
||||
// Check SARIF version
|
||||
if (combinedSarif.version === null) {
|
||||
combinedSarif.version = sarifObject.version;
|
||||
} else if (combinedSarif.version !== sarifObject.version) {
|
||||
throw "Different SARIF versions encountered: " + combinedSarif.version + " and " + sarifObject.version;
|
||||
}
|
||||
|
||||
combinedSarif.runs.push(...sarifObject.runs);
|
||||
}
|
||||
|
||||
return JSON.stringify(combinedSarif);
|
||||
}
|
||||
|
||||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
export async function upload(input: string) {
|
||||
if (fs.lstatSync(input).isDirectory()) {
|
||||
const sarifFiles = fs.readdirSync(input)
|
||||
.filter(f => f.endsWith(".sarif"))
|
||||
.map(f => path.resolve(input, f));
|
||||
await uploadFiles(sarifFiles);
|
||||
} else {
|
||||
await uploadFiles([input]);
|
||||
}
|
||||
}
|
||||
|
||||
// Uploads the given set of sarif files.
|
||||
async function uploadFiles(sarifFiles: string[]) {
|
||||
core.startGroup("Uploading results");
|
||||
try {
|
||||
// Check if an upload has happened before. If so then abort.
|
||||
// This is intended to catch when the finish and upload-sarif actions
|
||||
// are used together, and then the upload-sarif action is invoked twice.
|
||||
const sentinelFile = await getSentinelFilePath();
|
||||
if (fs.existsSync(sentinelFile)) {
|
||||
core.info("Aborting as an upload has already happened from this job");
|
||||
return;
|
||||
}
|
||||
|
||||
const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRequiredEnvParam('GITHUB_REF'); // it's in the form "refs/heads/master"
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||
|
||||
core.debug("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
|
||||
const zipped_sarif = zlib.gzipSync(sarifPayload).toString('base64');
|
||||
let checkoutPath = core.getInput('checkout_path');
|
||||
let checkoutURI = fileUrl(checkoutPath);
|
||||
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
return;
|
||||
}
|
||||
|
||||
let matrix: string | undefined = core.getInput('matrix');
|
||||
if (matrix === "null" || matrix === "") {
|
||||
matrix = undefined;
|
||||
}
|
||||
|
||||
const payload = JSON.stringify({
|
||||
"commit_oid": commitOid,
|
||||
"ref": ref,
|
||||
"analysis_name": analysisName,
|
||||
"sarif": zipped_sarif,
|
||||
"workflow_run_id": workflowRunID,
|
||||
"checkout_uri": checkoutURI,
|
||||
"environment": matrix,
|
||||
"started_at": startedAt
|
||||
});
|
||||
|
||||
core.info('Uploading results');
|
||||
const githubToken = core.getInput('token');
|
||||
const ph: auth.BearerCredentialHandler = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Upload SARIF', [ph]);
|
||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY'] + '/code-scanning/analysis';
|
||||
const res: http.HttpClientResponse = await client.put(url, payload);
|
||||
const requestID = res.message.headers["x-github-request-id"];
|
||||
|
||||
core.debug('response status: ' + res.message.statusCode);
|
||||
if (res.message.statusCode === 500) {
|
||||
// If the upload fails with 500 then we assume it is a temporary problem
|
||||
// with turbo-scan and not an error that the user has caused or can fix.
|
||||
// We avoid marking the job as failed to avoid breaking CI workflows.
|
||||
core.error('Upload failed (' + requestID + '): ' + await res.readBody());
|
||||
} else if (res.message.statusCode !== 202) {
|
||||
core.setFailed('Upload failed (' + requestID + '): ' + await res.readBody());
|
||||
} else {
|
||||
core.info("Successfully uploaded results");
|
||||
}
|
||||
|
||||
// Mark that we have made an upload
|
||||
fs.writeFileSync(sentinelFile, '');
|
||||
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
}
|
||||
core.endGroup();
|
||||
}
|
||||
25
src/upload-sarif.ts
Normal file
25
src/upload-sarif.ts
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
import * as core from '@actions/core';
|
||||
|
||||
import * as upload_lib from './upload-lib';
|
||||
import * as util from './util';
|
||||
|
||||
async function run() {
|
||||
if (util.should_abort('upload-sarif', false) || !await util.reportActionStarting('upload-sarif')) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await upload_lib.upload(core.getInput('sarif_file'));
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('upload-sarif', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
|
||||
await util.reportActionSucceeded('upload-sarif');
|
||||
}
|
||||
|
||||
run().catch(e => {
|
||||
core.setFailed("upload-sarif action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
295
src/util.ts
Normal file
295
src/util.ts
Normal file
|
|
@ -0,0 +1,295 @@
|
|||
import * as core from '@actions/core';
|
||||
import * as http from '@actions/http-client';
|
||||
import * as auth from '@actions/http-client/auth';
|
||||
import * as octokit from '@octokit/rest';
|
||||
import consoleLogLevel from 'console-log-level';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as sharedEnv from './shared-environment';
|
||||
|
||||
/**
|
||||
* Should the current action be aborted?
|
||||
*
|
||||
* This method should be called at the start of all CodeQL actions and they
|
||||
* should abort cleanly if this returns true without failing the action.
|
||||
* This method will call `core.setFailed` if necessary.
|
||||
*/
|
||||
export function should_abort(actionName: string, requireInitActionHasRun: boolean): boolean {
|
||||
|
||||
// Check that required aspects of the environment are present
|
||||
const ref = process.env['GITHUB_REF'];
|
||||
if (ref === undefined) {
|
||||
core.setFailed('GITHUB_REF must be set.');
|
||||
return true;
|
||||
}
|
||||
|
||||
// Should abort if called on a merge commit for a pull request.
|
||||
if (ref.startsWith('refs/pull/')) {
|
||||
core.warning('The CodeQL ' + actionName + ' action is intended for workflows triggered on `push` events, '
|
||||
+ 'but the current workflow is running on a pull request. Aborting.');
|
||||
return true;
|
||||
}
|
||||
|
||||
// If the init action is required, then check the it completed successfully.
|
||||
if (requireInitActionHasRun && process.env[sharedEnv.CODEQL_ACTION_INIT_COMPLETED] === undefined) {
|
||||
core.setFailed('The CodeQL ' + actionName + ' action cannot be used unless the CodeQL init action is run first. Aborting.');
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the path to the workspace folder.
|
||||
*/
|
||||
export function workspaceFolder(): string {
|
||||
let workspaceFolder = process.env['RUNNER_WORKSPACE'];
|
||||
if (!workspaceFolder)
|
||||
workspaceFolder = path.resolve('..');
|
||||
|
||||
return workspaceFolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an environment parameter, but throw an error if it is not set.
|
||||
*/
|
||||
export function getRequiredEnvParam(paramName: string): string {
|
||||
const value = process.env[paramName];
|
||||
if (value === undefined) {
|
||||
throw new Error(paramName + ' environment variable must be set');
|
||||
}
|
||||
core.debug(paramName + '=' + value);
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the set of languages in the current repository
|
||||
*/
|
||||
async function getLanguagesInRepo(): Promise<string[]> {
|
||||
// Translate between GitHub's API names for languages and ours
|
||||
const codeqlLanguages = {
|
||||
'C': 'cpp',
|
||||
'C++': 'cpp',
|
||||
'C#': 'csharp',
|
||||
'Go': 'go',
|
||||
'Java': 'java',
|
||||
'JavaScript': 'javascript',
|
||||
'TypeScript': 'javascript',
|
||||
'Python': 'python',
|
||||
};
|
||||
let repo_nwo = process.env['GITHUB_REPOSITORY']?.split("/");
|
||||
if (repo_nwo) {
|
||||
let owner = repo_nwo[0];
|
||||
let repo = repo_nwo[1];
|
||||
|
||||
core.debug(`GitHub repo ${owner} ${repo}`);
|
||||
let ok = new octokit.Octokit({
|
||||
auth: core.getInput('token'),
|
||||
userAgent: "CodeQL Action",
|
||||
log: consoleLogLevel({ level: "debug" })
|
||||
});
|
||||
const response = await ok.request("GET /repos/:owner/:repo/languages", ({
|
||||
owner,
|
||||
repo
|
||||
}));
|
||||
|
||||
core.debug("Languages API response: " + JSON.stringify(response));
|
||||
|
||||
// The GitHub API is going to return languages in order of popularity,
|
||||
// When we pick a language to autobuild we want to pick the most popular traced language
|
||||
// Since sets in javascript maintain insertion order, using a set here and then splatting it
|
||||
// into an array gives us an array of languages ordered by popularity
|
||||
let languages: Set<string> = new Set();
|
||||
for (let lang in response.data) {
|
||||
if (lang in codeqlLanguages) {
|
||||
languages.add(codeqlLanguages[lang]);
|
||||
}
|
||||
}
|
||||
return [...languages];
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the languages to analyse.
|
||||
*
|
||||
* The result is obtained from the environment parameter CODEQL_ACTION_LANGUAGES
|
||||
* if that has been set, otherwise it is obtained from the action input parameter
|
||||
* 'languages' if that has been set, otherwise it is deduced as all languages in the
|
||||
* repo that can be analysed.
|
||||
*
|
||||
* If the languages are obtained from either of the second choices, the
|
||||
* CODEQL_ACTION_LANGUAGES environment variable will be exported with the
|
||||
* deduced list.
|
||||
*/
|
||||
export async function getLanguages(): Promise<string[]> {
|
||||
|
||||
// Obtain from CODEQL_ACTION_LANGUAGES if set
|
||||
const langsVar = process.env[sharedEnv.CODEQL_ACTION_LANGUAGES];
|
||||
if (langsVar) {
|
||||
return langsVar.split(',')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
}
|
||||
// Obtain from action input 'languages' if set
|
||||
let languages = core.getInput('languages', { required: false })
|
||||
.split(',')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
core.info("Languages from configuration: " + JSON.stringify(languages));
|
||||
|
||||
if (languages.length === 0) {
|
||||
// Obtain languages as all languages in the repo that can be analysed
|
||||
languages = await getLanguagesInRepo();
|
||||
core.info("Automatically detected languages: " + JSON.stringify(languages));
|
||||
}
|
||||
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_LANGUAGES, languages.join(','));
|
||||
|
||||
return languages;
|
||||
}
|
||||
|
||||
interface StatusReport {
|
||||
"workflow_run_id": number;
|
||||
"workflow_name": string;
|
||||
"job_name": string;
|
||||
"matrix_vars"?: string;
|
||||
"languages": string;
|
||||
"commit_oid": string;
|
||||
"action_name": string;
|
||||
"action_oid": string;
|
||||
"started_at": string;
|
||||
"completed_at"?: string;
|
||||
"status": string;
|
||||
"cause"?: string;
|
||||
"exception"?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compose a StatusReport.
|
||||
*
|
||||
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
|
||||
* @param status The status. Must be 'success', 'failure', or 'starting'
|
||||
* @param cause Cause of failure (only supply if status is 'failure')
|
||||
* @param exception Exception (only supply if status is 'failure')
|
||||
*/
|
||||
async function createStatusReport(
|
||||
actionName: string,
|
||||
status: string,
|
||||
cause?: string,
|
||||
exception?: string):
|
||||
Promise<StatusReport> {
|
||||
|
||||
const commitOid = process.env['GITHUB_SHA'] || '';
|
||||
const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
|
||||
let workflowRunID = -1;
|
||||
if (workflowRunIDStr) {
|
||||
workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
}
|
||||
const workflowName = process.env['GITHUB_WORKFLOW'] || '';
|
||||
const jobName = process.env['GITHUB_JOB'] || '';
|
||||
const languages = (await getLanguages()).sort().join(',');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT] || new Date().toISOString();
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_STARTED_AT, startedAt);
|
||||
|
||||
let statusReport: StatusReport = {
|
||||
workflow_run_id: workflowRunID,
|
||||
workflow_name: workflowName,
|
||||
job_name: jobName,
|
||||
languages: languages,
|
||||
commit_oid: commitOid,
|
||||
action_name: actionName,
|
||||
action_oid: "unknown", // TODO decide if it's possible to fill this in
|
||||
started_at: startedAt,
|
||||
status: status
|
||||
};
|
||||
|
||||
// Add optional parameters
|
||||
if (cause) {
|
||||
statusReport.cause = cause;
|
||||
}
|
||||
if (exception) {
|
||||
statusReport.exception = exception;
|
||||
}
|
||||
if (status === 'success' || status === 'failure') {
|
||||
statusReport.completed_at = new Date().toISOString();
|
||||
}
|
||||
let matrix: string | undefined = core.getInput('matrix');
|
||||
if (matrix) {
|
||||
statusReport.matrix_vars = matrix;
|
||||
}
|
||||
|
||||
return statusReport;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||
*
|
||||
* Returns the status code of the response to the status request, or
|
||||
* undefined if the given statusReport is undefined or no response was
|
||||
* received.
|
||||
*/
|
||||
async function sendStatusReport(statusReport: StatusReport): Promise<number | undefined> {
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
|
||||
core.debug('Sending status report: ' + statusReportJSON);
|
||||
|
||||
const githubToken = core.getInput('token');
|
||||
const ph: auth.BearerCredentialHandler = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Status Report', [ph]);
|
||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY']
|
||||
+ '/code-scanning/analysis/status';
|
||||
const res: http.HttpClientResponse = await client.put(url, statusReportJSON);
|
||||
|
||||
return res.message?.statusCode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a status report that an action is starting.
|
||||
*
|
||||
* If the action is `init` then this also records the start time in the environment,
|
||||
* and ensures that the analysed languages are also recorded in the envirenment.
|
||||
*
|
||||
* Returns true unless a problem occurred and the action should abort.
|
||||
*/
|
||||
export async function reportActionStarting(action: string): Promise<boolean> {
|
||||
const statusCode = await sendStatusReport(await createStatusReport(action, 'starting'));
|
||||
|
||||
// If the status report request fails with a 403 or a 404, then this is a deliberate
|
||||
// message from the endpoint that the SARIF upload can be expected to fail too,
|
||||
// so the action should fail to avoid wasting actions minutes.
|
||||
//
|
||||
// Other failure responses (or lack thereof) could be transitory and should not
|
||||
// cause the action to fail.
|
||||
if (statusCode === 403) {
|
||||
core.setFailed('The repo on which this action is running is not opted-in to CodeQL code scanning.');
|
||||
return false;
|
||||
}
|
||||
if (statusCode === 404) {
|
||||
core.setFailed('Not authorized to used the CodeQL code scanning feature on this repo.');
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Report that an action has failed.
|
||||
*
|
||||
* Note that the started_at date is always that of the `init` action, since
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
export async function reportActionFailed(action: string, cause?: string, exception?: string) {
|
||||
await sendStatusReport(await createStatusReport(action, 'failure', cause, exception));
|
||||
}
|
||||
|
||||
/**
|
||||
* Report that an action has succeeded.
|
||||
*
|
||||
* Note that the started_at date is always that of the `init` action, since
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
export async function reportActionSucceeded(action: string) {
|
||||
await sendStatusReport(await createStatusReport(action, 'success'));
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue