Skip validating SARIF produced by CodeQL
This commit is contained in:
parent
f681ad69a7
commit
2f70a988e7
9 changed files with 97 additions and 63 deletions
6
lib/analyze.js
generated
6
lib/analyze.js
generated
|
|
@ -64,7 +64,6 @@ const logging_1 = require("./logging");
|
||||||
const repository_1 = require("./repository");
|
const repository_1 = require("./repository");
|
||||||
const tools_features_1 = require("./tools-features");
|
const tools_features_1 = require("./tools-features");
|
||||||
const tracer_config_1 = require("./tracer-config");
|
const tracer_config_1 = require("./tracer-config");
|
||||||
const upload_lib_1 = require("./upload-lib");
|
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
class CodeQLAnalysisError extends Error {
|
class CodeQLAnalysisError extends Error {
|
||||||
|
|
@ -429,7 +428,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
logger.info(analysisSummary);
|
logger.info(analysisSummary);
|
||||||
if (await features.getValue(feature_flags_1.Feature.QaTelemetryEnabled)) {
|
if (await features.getValue(feature_flags_1.Feature.QaTelemetryEnabled)) {
|
||||||
const perQueryAlertCounts = getPerQueryAlertCounts(sarifFile, logger);
|
const perQueryAlertCounts = getPerQueryAlertCounts(sarifFile);
|
||||||
const perQueryAlertCountEventReport = {
|
const perQueryAlertCountEventReport = {
|
||||||
event: "codeql database interpret-results",
|
event: "codeql database interpret-results",
|
||||||
started_at: startTimeInterpretResults.toISOString(),
|
started_at: startTimeInterpretResults.toISOString(),
|
||||||
|
|
@ -457,8 +456,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", sarifRunPropertyFlag, automationDetailsId, config, features);
|
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", sarifRunPropertyFlag, automationDetailsId, config, features);
|
||||||
}
|
}
|
||||||
/** Get an object with all queries and their counts parsed from a SARIF file path. */
|
/** Get an object with all queries and their counts parsed from a SARIF file path. */
|
||||||
function getPerQueryAlertCounts(sarifPath, log) {
|
function getPerQueryAlertCounts(sarifPath) {
|
||||||
(0, upload_lib_1.validateSarifFileSchema)(sarifPath, log);
|
|
||||||
const sarifObject = JSON.parse(fs.readFileSync(sarifPath, "utf8"));
|
const sarifObject = JSON.parse(fs.readFileSync(sarifPath, "utf8"));
|
||||||
// We do not need to compute fingerprints because we are not sending data based off of locations.
|
// We do not need to compute fingerprints because we are not sending data based off of locations.
|
||||||
// Generate the query: alert count object
|
// Generate the query: alert count object
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
47
lib/upload-lib.js
generated
47
lib/upload-lib.js
generated
|
|
@ -40,6 +40,7 @@ exports.InvalidSarifUploadError = void 0;
|
||||||
exports.shouldShowCombineSarifFilesDeprecationWarning = shouldShowCombineSarifFilesDeprecationWarning;
|
exports.shouldShowCombineSarifFilesDeprecationWarning = shouldShowCombineSarifFilesDeprecationWarning;
|
||||||
exports.populateRunAutomationDetails = populateRunAutomationDetails;
|
exports.populateRunAutomationDetails = populateRunAutomationDetails;
|
||||||
exports.findSarifFilesInDir = findSarifFilesInDir;
|
exports.findSarifFilesInDir = findSarifFilesInDir;
|
||||||
|
exports.readSarifFile = readSarifFile;
|
||||||
exports.validateSarifFileSchema = validateSarifFileSchema;
|
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||||
exports.buildPayload = buildPayload;
|
exports.buildPayload = buildPayload;
|
||||||
exports.uploadFiles = uploadFiles;
|
exports.uploadFiles = uploadFiles;
|
||||||
|
|
@ -324,17 +325,24 @@ function countResultsInSarif(sarif) {
|
||||||
}
|
}
|
||||||
return numResults;
|
return numResults;
|
||||||
}
|
}
|
||||||
// Validates that the given file path refers to a valid SARIF file.
|
function readSarifFile(sarifFilePath) {
|
||||||
// Throws an error if the file is invalid.
|
|
||||||
function validateSarifFileSchema(sarifFilePath, logger) {
|
|
||||||
logger.info(`Validating ${sarifFilePath}`);
|
|
||||||
let sarif;
|
|
||||||
try {
|
try {
|
||||||
sarif = JSON.parse(fs.readFileSync(sarifFilePath, "utf8"));
|
return JSON.parse(fs.readFileSync(sarifFilePath, "utf8"));
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
throw new InvalidSarifUploadError(`Invalid SARIF. JSON syntax error: ${(0, util_1.getErrorMessage)(e)}`);
|
throw new InvalidSarifUploadError(`Invalid SARIF. JSON syntax error: ${(0, util_1.getErrorMessage)(e)}`);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
// Validates that the given file path refers to a valid SARIF file.
|
||||||
|
// Throws an error if the file is invalid.
|
||||||
|
function validateSarifFileSchema(sarif, sarifFilePath, logger) {
|
||||||
|
if (areAllRunsProducedByCodeQL([sarif]) &&
|
||||||
|
// We want to validate CodeQL SARIF in testing environments.
|
||||||
|
!util.getTestingEnvironment()) {
|
||||||
|
logger.debug(`Skipping SARIF schema validation for ${sarifFilePath} as all runs are produced by CodeQL.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
logger.info(`Validating ${sarifFilePath}`);
|
||||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||||
const schema = require("../src/sarif-schema-2.1.0.json");
|
const schema = require("../src/sarif-schema-2.1.0.json");
|
||||||
const result = new jsonschema.Validator().validate(sarif, schema);
|
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||||
|
|
@ -402,27 +410,28 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
||||||
return payloadObj;
|
return payloadObj;
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Uploads a single SARIF file or a directory of SARIF files depending on what `sarifPath` refers
|
* Uploads a single SARIF file or a directory of SARIF files depending on what `inputSarifPath` refers
|
||||||
* to.
|
* to.
|
||||||
*/
|
*/
|
||||||
async function uploadFiles(sarifPath, checkoutPath, category, features, logger) {
|
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger) {
|
||||||
const sarifFiles = getSarifFilePaths(sarifPath);
|
const sarifPaths = getSarifFilePaths(inputSarifPath);
|
||||||
logger.startGroup("Uploading results");
|
logger.startGroup("Uploading results");
|
||||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
|
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||||
try {
|
let sarif;
|
||||||
|
if (sarifPaths.length > 1) {
|
||||||
// Validate that the files we were asked to upload are all valid SARIF files
|
// Validate that the files we were asked to upload are all valid SARIF files
|
||||||
for (const file of sarifFiles) {
|
for (const sarifPath of sarifPaths) {
|
||||||
validateSarifFileSchema(file, logger);
|
const parsedSarif = readSarifFile(sarifPath);
|
||||||
|
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
||||||
}
|
}
|
||||||
|
sarif = await combineSarifFilesUsingCLI(sarifPaths, gitHubVersion, features, logger);
|
||||||
}
|
}
|
||||||
catch (e) {
|
else {
|
||||||
if (e instanceof SyntaxError) {
|
const sarifPath = sarifPaths[0];
|
||||||
throw new InvalidSarifUploadError(e.message);
|
sarif = readSarifFile(sarifPath);
|
||||||
}
|
validateSarifFileSchema(sarif, sarifPath, logger);
|
||||||
throw e;
|
|
||||||
}
|
}
|
||||||
let sarif = await combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger);
|
|
||||||
sarif = filterAlertsByDiffRange(logger, sarif);
|
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||||
sarif = await fingerprints.addFingerprints(sarif, checkoutPath, logger);
|
sarif = await fingerprints.addFingerprints(sarif, checkoutPath, logger);
|
||||||
const analysisKey = await api.getAnalysisKey();
|
const analysisKey = await api.getAnalysisKey();
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
6
lib/upload-lib.test.js
generated
6
lib/upload-lib.test.js
generated
|
|
@ -49,11 +49,11 @@ ava_1.default.beforeEach(() => {
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("validateSarifFileSchema - valid", (t) => {
|
(0, ava_1.default)("validateSarifFileSchema - valid", (t) => {
|
||||||
const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`;
|
const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`;
|
||||||
t.notThrows(() => uploadLib.validateSarifFileSchema(inputFile, (0, logging_1.getRunnerLogger)(true)));
|
t.notThrows(() => uploadLib.validateSarifFileSchema(uploadLib.readSarifFile(inputFile), inputFile, (0, logging_1.getRunnerLogger)(true)));
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("validateSarifFileSchema - invalid", (t) => {
|
(0, ava_1.default)("validateSarifFileSchema - invalid", (t) => {
|
||||||
const inputFile = `${__dirname}/../src/testdata/invalid-sarif.sarif`;
|
const inputFile = `${__dirname}/../src/testdata/invalid-sarif.sarif`;
|
||||||
t.throws(() => uploadLib.validateSarifFileSchema(inputFile, (0, logging_1.getRunnerLogger)(true)));
|
t.throws(() => uploadLib.validateSarifFileSchema(uploadLib.readSarifFile(inputFile), inputFile, (0, logging_1.getRunnerLogger)(true)));
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("validate correct payload used for push, PR merge commit, and PR head", async (t) => {
|
(0, ava_1.default)("validate correct payload used for push, PR merge commit, and PR head", async (t) => {
|
||||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||||
|
|
@ -202,7 +202,7 @@ ava_1.default.beforeEach(() => {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
const sarifFile = `${__dirname}/../src/testdata/with-invalid-uri.sarif`;
|
const sarifFile = `${__dirname}/../src/testdata/with-invalid-uri.sarif`;
|
||||||
uploadLib.validateSarifFileSchema(sarifFile, mockLogger);
|
uploadLib.validateSarifFileSchema(uploadLib.readSarifFile(sarifFile), sarifFile, mockLogger);
|
||||||
t.deepEqual(loggedMessages.length, 3);
|
t.deepEqual(loggedMessages.length, 3);
|
||||||
t.deepEqual(loggedMessages[1], "Warning: 'not a valid URI' is not a valid URI in 'instance.runs[0].tool.driver.rules[0].helpUri'.", "Warning: 'not a valid URI' is not a valid URI in 'instance.runs[0].results[0].locations[0].physicalLocation.artifactLocation.uri'.");
|
t.deepEqual(loggedMessages[1], "Warning: 'not a valid URI' is not a valid URI in 'instance.runs[0].tool.driver.rules[0].helpUri'.", "Warning: 'not a valid URI' is not a valid URI in 'instance.runs[0].results[0].locations[0].physicalLocation.artifactLocation.uri'.");
|
||||||
});
|
});
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -26,7 +26,6 @@ import { getRepositoryNwoFromEnv } from "./repository";
|
||||||
import { DatabaseCreationTimings, EventReport } from "./status-report";
|
import { DatabaseCreationTimings, EventReport } from "./status-report";
|
||||||
import { ToolsFeature } from "./tools-features";
|
import { ToolsFeature } from "./tools-features";
|
||||||
import { endTracingForCluster } from "./tracer-config";
|
import { endTracingForCluster } from "./tracer-config";
|
||||||
import { validateSarifFileSchema } from "./upload-lib";
|
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
import { BuildMode } from "./util";
|
import { BuildMode } from "./util";
|
||||||
|
|
||||||
|
|
@ -630,7 +629,7 @@ export async function runQueries(
|
||||||
logger.info(analysisSummary);
|
logger.info(analysisSummary);
|
||||||
|
|
||||||
if (await features.getValue(Feature.QaTelemetryEnabled)) {
|
if (await features.getValue(Feature.QaTelemetryEnabled)) {
|
||||||
const perQueryAlertCounts = getPerQueryAlertCounts(sarifFile, logger);
|
const perQueryAlertCounts = getPerQueryAlertCounts(sarifFile);
|
||||||
|
|
||||||
const perQueryAlertCountEventReport: EventReport = {
|
const perQueryAlertCountEventReport: EventReport = {
|
||||||
event: "codeql database interpret-results",
|
event: "codeql database interpret-results",
|
||||||
|
|
@ -682,11 +681,7 @@ export async function runQueries(
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Get an object with all queries and their counts parsed from a SARIF file path. */
|
/** Get an object with all queries and their counts parsed from a SARIF file path. */
|
||||||
function getPerQueryAlertCounts(
|
function getPerQueryAlertCounts(sarifPath: string): Record<string, number> {
|
||||||
sarifPath: string,
|
|
||||||
log: Logger,
|
|
||||||
): Record<string, number> {
|
|
||||||
validateSarifFileSchema(sarifPath, log);
|
|
||||||
const sarifObject = JSON.parse(
|
const sarifObject = JSON.parse(
|
||||||
fs.readFileSync(sarifPath, "utf8"),
|
fs.readFileSync(sarifPath, "utf8"),
|
||||||
) as util.SarifFile;
|
) as util.SarifFile;
|
||||||
|
|
|
||||||
|
|
@ -17,14 +17,22 @@ test.beforeEach(() => {
|
||||||
test("validateSarifFileSchema - valid", (t) => {
|
test("validateSarifFileSchema - valid", (t) => {
|
||||||
const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`;
|
const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`;
|
||||||
t.notThrows(() =>
|
t.notThrows(() =>
|
||||||
uploadLib.validateSarifFileSchema(inputFile, getRunnerLogger(true)),
|
uploadLib.validateSarifFileSchema(
|
||||||
|
uploadLib.readSarifFile(inputFile),
|
||||||
|
inputFile,
|
||||||
|
getRunnerLogger(true),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("validateSarifFileSchema - invalid", (t) => {
|
test("validateSarifFileSchema - invalid", (t) => {
|
||||||
const inputFile = `${__dirname}/../src/testdata/invalid-sarif.sarif`;
|
const inputFile = `${__dirname}/../src/testdata/invalid-sarif.sarif`;
|
||||||
t.throws(() =>
|
t.throws(() =>
|
||||||
uploadLib.validateSarifFileSchema(inputFile, getRunnerLogger(true)),
|
uploadLib.validateSarifFileSchema(
|
||||||
|
uploadLib.readSarifFile(inputFile),
|
||||||
|
inputFile,
|
||||||
|
getRunnerLogger(true),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -314,7 +322,11 @@ test("accept results with invalid artifactLocation.uri value", (t) => {
|
||||||
} as Logger;
|
} as Logger;
|
||||||
|
|
||||||
const sarifFile = `${__dirname}/../src/testdata/with-invalid-uri.sarif`;
|
const sarifFile = `${__dirname}/../src/testdata/with-invalid-uri.sarif`;
|
||||||
uploadLib.validateSarifFileSchema(sarifFile, mockLogger);
|
uploadLib.validateSarifFileSchema(
|
||||||
|
uploadLib.readSarifFile(sarifFile),
|
||||||
|
sarifFile,
|
||||||
|
mockLogger,
|
||||||
|
);
|
||||||
|
|
||||||
t.deepEqual(loggedMessages.length, 3);
|
t.deepEqual(loggedMessages.length, 3);
|
||||||
t.deepEqual(
|
t.deepEqual(
|
||||||
|
|
|
||||||
|
|
@ -434,18 +434,35 @@ function countResultsInSarif(sarif: string): number {
|
||||||
return numResults;
|
return numResults;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validates that the given file path refers to a valid SARIF file.
|
export function readSarifFile(sarifFilePath: string): SarifFile {
|
||||||
// Throws an error if the file is invalid.
|
|
||||||
export function validateSarifFileSchema(sarifFilePath: string, logger: Logger) {
|
|
||||||
logger.info(`Validating ${sarifFilePath}`);
|
|
||||||
let sarif;
|
|
||||||
try {
|
try {
|
||||||
sarif = JSON.parse(fs.readFileSync(sarifFilePath, "utf8")) as SarifFile;
|
return JSON.parse(fs.readFileSync(sarifFilePath, "utf8")) as SarifFile;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
throw new InvalidSarifUploadError(
|
throw new InvalidSarifUploadError(
|
||||||
`Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}`,
|
`Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validates that the given file path refers to a valid SARIF file.
|
||||||
|
// Throws an error if the file is invalid.
|
||||||
|
export function validateSarifFileSchema(
|
||||||
|
sarif: SarifFile,
|
||||||
|
sarifFilePath: string,
|
||||||
|
logger: Logger,
|
||||||
|
) {
|
||||||
|
if (
|
||||||
|
areAllRunsProducedByCodeQL([sarif]) &&
|
||||||
|
// We want to validate CodeQL SARIF in testing environments.
|
||||||
|
!util.getTestingEnvironment()
|
||||||
|
) {
|
||||||
|
logger.debug(
|
||||||
|
`Skipping SARIF schema validation for ${sarifFilePath} as all runs are produced by CodeQL.`,
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Validating ${sarifFilePath}`);
|
||||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||||
const schema = require("../src/sarif-schema-2.1.0.json") as jsonschema.Schema;
|
const schema = require("../src/sarif-schema-2.1.0.json") as jsonschema.Schema;
|
||||||
|
|
||||||
|
|
@ -551,41 +568,44 @@ export function buildPayload(
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Uploads a single SARIF file or a directory of SARIF files depending on what `sarifPath` refers
|
* Uploads a single SARIF file or a directory of SARIF files depending on what `inputSarifPath` refers
|
||||||
* to.
|
* to.
|
||||||
*/
|
*/
|
||||||
export async function uploadFiles(
|
export async function uploadFiles(
|
||||||
sarifPath: string,
|
inputSarifPath: string,
|
||||||
checkoutPath: string,
|
checkoutPath: string,
|
||||||
category: string | undefined,
|
category: string | undefined,
|
||||||
features: FeatureEnablement,
|
features: FeatureEnablement,
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
): Promise<UploadResult> {
|
): Promise<UploadResult> {
|
||||||
const sarifFiles = getSarifFilePaths(sarifPath);
|
const sarifPaths = getSarifFilePaths(inputSarifPath);
|
||||||
|
|
||||||
logger.startGroup("Uploading results");
|
logger.startGroup("Uploading results");
|
||||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
|
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||||
|
|
||||||
const gitHubVersion = await getGitHubVersion();
|
const gitHubVersion = await getGitHubVersion();
|
||||||
|
|
||||||
try {
|
let sarif: SarifFile;
|
||||||
|
|
||||||
|
if (sarifPaths.length > 1) {
|
||||||
// Validate that the files we were asked to upload are all valid SARIF files
|
// Validate that the files we were asked to upload are all valid SARIF files
|
||||||
for (const file of sarifFiles) {
|
for (const sarifPath of sarifPaths) {
|
||||||
validateSarifFileSchema(file, logger);
|
const parsedSarif = readSarifFile(sarifPath);
|
||||||
|
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
|
||||||
if (e instanceof SyntaxError) {
|
sarif = await combineSarifFilesUsingCLI(
|
||||||
throw new InvalidSarifUploadError(e.message);
|
sarifPaths,
|
||||||
}
|
gitHubVersion,
|
||||||
throw e;
|
features,
|
||||||
|
logger,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
const sarifPath = sarifPaths[0];
|
||||||
|
sarif = readSarifFile(sarifPath);
|
||||||
|
validateSarifFileSchema(sarif, sarifPath, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
let sarif = await combineSarifFilesUsingCLI(
|
|
||||||
sarifFiles,
|
|
||||||
gitHubVersion,
|
|
||||||
features,
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
sarif = filterAlertsByDiffRange(logger, sarif);
|
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||||
sarif = await fingerprints.addFingerprints(sarif, checkoutPath, logger);
|
sarif = await fingerprints.addFingerprints(sarif, checkoutPath, logger);
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue