Send new per-query alert count event reports for QA telemetry (#1741)

This commit is contained in:
Angela P Wen 2023-06-30 07:53:13 -07:00 committed by GitHub
parent cff3d9e3c9
commit 46a6823b81
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
19 changed files with 213 additions and 51 deletions

File diff suppressed because one or more lines are too long

6
lib/analyze-action.js generated
View file

@ -44,7 +44,7 @@ const logging_1 = require("./logging");
const repository_1 = require("./repository");
const shared_environment_1 = require("./shared-environment");
const trap_caching_1 = require("./trap-caching");
const upload_lib = __importStar(require("./upload-lib"));
const uploadLib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
const util_1 = require("./util");
async function sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger) {
@ -177,7 +177,7 @@ async function run() {
core.setOutput("db-locations", dbLocations);
const uploadInput = actionsUtil.getOptionalInput("upload");
if (runStats && actionsUtil.getUploadValue(uploadInput) === "always") {
uploadResult = await upload_lib.uploadFromActions(outputDir, actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getOptionalInput("category"), logger);
uploadResult = await uploadLib.uploadFromActions(outputDir, actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getOptionalInput("category"), logger);
core.setOutput("sarif-id", uploadResult.sarifID);
}
else {
@ -196,7 +196,7 @@ async function run() {
}
else if (uploadResult !== undefined &&
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, (0, logging_1.getActionsLogger)());
await uploadLib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, (0, logging_1.getActionsLogger)());
}
// If we did not throw an error yet here, but we expect one, throw it.
if (actionsUtil.getOptionalInput("expect-error") === "true") {

File diff suppressed because one or more lines are too long

51
lib/analyze.js generated
View file

@ -39,6 +39,7 @@ const configUtils = __importStar(require("./config-utils"));
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const tracer_config_1 = require("./tracer-config");
const upload_lib_1 = require("./upload-lib");
const util = __importStar(require("./util"));
class CodeQLAnalysisError extends Error {
constructor(queriesStatusReport, message) {
@ -138,6 +139,9 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
const queryFilters = validateQueryFilters(config.originalUserInput["query-filters"]);
const packsWithVersion = config.packs[language] || [];
try {
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
let startTimeInterpretResults;
let endTimeInterpretResults;
if (await util.useCodeScanningConfigInCli(codeql, features)) {
// If we are using the code scanning config in the CLI,
// much of the work needed to generate the query suites
@ -152,11 +156,11 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
new Date().getTime() - startTimeBuiltIn;
logger.startGroup(`Interpreting results for ${language}`);
const startTimeInterpretResults = new Date().getTime();
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
startTimeInterpretResults = new Date().getTime();
const analysisSummary = await runInterpretResults(language, undefined, sarifFile, config.debugMode);
endTimeInterpretResults = new Date().getTime();
statusReport[`interpret_results_${language}_duration_ms`] =
new Date().getTime() - startTimeInterpretResults;
endTimeInterpretResults - startTimeInterpretResults;
logger.endGroup();
logger.info(analysisSummary);
}
@ -201,14 +205,29 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
}
logger.endGroup();
logger.startGroup(`Interpreting results for ${language}`);
const startTimeInterpretResults = new Date().getTime();
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
startTimeInterpretResults = new Date().getTime();
const analysisSummary = await runInterpretResults(language, querySuitePaths, sarifFile, config.debugMode);
endTimeInterpretResults = new Date().getTime();
statusReport[`interpret_results_${language}_duration_ms`] =
new Date().getTime() - startTimeInterpretResults;
endTimeInterpretResults - startTimeInterpretResults;
logger.endGroup();
logger.info(analysisSummary);
}
if (await features.getValue(feature_flags_1.Feature.QaTelemetryEnabled)) {
const perQueryAlertCounts = getPerQueryAlertCounts(sarifFile, logger);
const perQueryAlertCountEventReport = {
event: "codeql database interpret-results",
started_at: startTimeInterpretResults.toString(),
completed_at: endTimeInterpretResults.toString(),
exit_status: "success",
language,
properties: perQueryAlertCounts,
};
if (statusReport["event_reports"] === undefined) {
statusReport["event_reports"] = [];
}
statusReport["event_reports"].push(perQueryAlertCountEventReport);
}
await runPrintLinesOfCode(language);
}
catch (e) {
@ -225,6 +244,26 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
const databasePath = util.getCodeQLDatabasePath(config, language);
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId, config, features, logger);
}
/** Get an object with all queries and their counts parsed from a SARIF file path. */
function getPerQueryAlertCounts(sarifPath, log) {
(0, upload_lib_1.validateSarifFileSchema)(sarifPath, log);
const sarifObject = JSON.parse(fs.readFileSync(sarifPath, "utf8"));
// We do not need to compute fingerprints because we are not sending data based off of locations.
// Generate the query: alert count object
const perQueryAlertCounts = {};
// All rules (queries), from all results, from all runs
for (const sarifRun of sarifObject.runs) {
if (sarifRun.results) {
for (const result of sarifRun.results) {
const query = result.rule?.id || result.ruleId;
if (query) {
perQueryAlertCounts[query] = (perQueryAlertCounts[query] || 0) + 1;
}
}
}
}
return perQueryAlertCounts;
}
async function runPrintLinesOfCode(language) {
const databasePath = util.getCodeQLDatabasePath(config, language);
return await codeql.databasePrintBaseline(databasePath);

File diff suppressed because one or more lines are too long

37
lib/analyze.test.js generated
View file

@ -37,11 +37,16 @@ const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const uploadLib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
(0, testing_utils_1.setupTests)(ava_1.default);
// Checks that the duration fields are populated for the correct language
// and correct case of builtin or custom. Also checks the correct search
// paths are set in the database analyze invocation.
/** Checks that the duration fields are populated for the correct language
* and correct case of builtin or custom. Also checks the correct search
* paths are set in the database analyze invocation.
*
* Mocks the QA telemetry feature flag and checks the appropriate status report
* fields.
*/
(0, ava_1.default)("status report fields and search path setting", async (t) => {
let searchPathsUsed = [];
return await util.withTmpDir(async (tmpDir) => {
@ -53,6 +58,7 @@ const util = __importStar(require("./util"));
[languages_1.Language.cpp]: ["a/b@1.0.0"],
[languages_1.Language.java]: ["c/d@2.0.0"],
};
sinon.stub(uploadLib, "validateSarifFileSchema");
for (const language of Object.values(languages_1.Language)) {
(0, codeql_1.setCodeQL)({
packDownload: async () => ({ packs: [] }),
@ -130,18 +136,25 @@ const util = __importStar(require("./util"));
builtin: ["foo.ql"],
custom: [],
};
const builtinStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([]));
const builtinStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.QaTelemetryEnabled]));
const hasPacks = language in packs;
const statusReportKeys = Object.keys(builtinStatusReport).sort();
if (hasPacks) {
t.deepEqual(statusReportKeys.length, 3, statusReportKeys.toString());
t.deepEqual(statusReportKeys.length, 4, statusReportKeys.toString());
t.deepEqual(statusReportKeys[0], `analyze_builtin_queries_${language}_duration_ms`);
t.deepEqual(statusReportKeys[1], `analyze_custom_queries_${language}_duration_ms`);
t.deepEqual(statusReportKeys[2], `interpret_results_${language}_duration_ms`);
t.deepEqual(statusReportKeys[2], "event_reports");
t.deepEqual(statusReportKeys[3], `interpret_results_${language}_duration_ms`);
}
else {
t.deepEqual(statusReportKeys[0], `analyze_builtin_queries_${language}_duration_ms`);
t.deepEqual(statusReportKeys[1], `interpret_results_${language}_duration_ms`);
t.deepEqual(statusReportKeys[1], "event_reports");
t.deepEqual(statusReportKeys[2], `interpret_results_${language}_duration_ms`);
}
if (builtinStatusReport.event_reports) {
for (const eventReport of builtinStatusReport.event_reports) {
t.deepEqual(eventReport.event, "codeql database interpret-results");
}
}
config.queries[language] = {
builtin: [],
@ -156,14 +169,20 @@ const util = __importStar(require("./util"));
},
],
};
const customStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([]));
t.deepEqual(Object.keys(customStatusReport).length, 2);
const customStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.QaTelemetryEnabled]));
t.deepEqual(Object.keys(customStatusReport).length, 3);
t.true(`analyze_custom_queries_${language}_duration_ms` in customStatusReport);
const expectedSearchPathsUsed = hasPacks
? [undefined, undefined, "/1", "/2", undefined]
: [undefined, "/1", "/2"];
t.deepEqual(searchPathsUsed, expectedSearchPathsUsed);
t.true(`interpret_results_${language}_duration_ms` in customStatusReport);
t.true("event_reports" in customStatusReport);
if (customStatusReport.event_reports) {
for (const eventReport of customStatusReport.event_reports) {
t.deepEqual(eventReport.event, "codeql database interpret-results");
}
}
}
verifyQuerySuites(tmpDir);
});

File diff suppressed because one or more lines are too long

6
lib/feature-flags.js generated
View file

@ -40,6 +40,7 @@ var Feature;
Feature["ExportCodeScanningConfigEnabled"] = "export_code_scanning_config_enabled";
Feature["ExportDiagnosticsEnabled"] = "export_diagnostics_enabled";
Feature["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
Feature["QaTelemetryEnabled"] = "qa_telemetry_enabled";
Feature["UploadFailedSarifEnabled"] = "upload_failed_sarif_enabled";
})(Feature = exports.Feature || (exports.Feature = {}));
exports.featureConfig = {
@ -68,6 +69,11 @@ exports.featureConfig = {
minimumVersion: "2.7.5",
defaultValue: false,
},
[Feature.QaTelemetryEnabled]: {
envVar: "CODEQL_ACTION_QA_TELEMETRY",
minimumVersion: undefined,
defaultValue: false,
},
[Feature.UploadFailedSarifEnabled]: {
envVar: "CODEQL_ACTION_UPLOAD_FAILED_SARIF",
minimumVersion: "2.11.3",

File diff suppressed because one or more lines are too long

4
lib/upload-lib.js generated
View file

@ -26,7 +26,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.pruneInvalidResults = exports.validateUniqueCategory = exports.waitForProcessing = exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
exports.pruneInvalidResults = exports.validateUniqueCategory = exports.waitForProcessing = exports.buildPayload = exports.validateSarifFileSchema = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const process_1 = require("process");
@ -62,7 +62,6 @@ function combineSarifFiles(sarifFiles) {
}
return combinedSarif;
}
exports.combineSarifFiles = combineSarifFiles;
// Populates the run.automationDetails.id field using the analysis_key and environment
// and return an updated sarif file contents.
function populateRunAutomationDetails(sarif, category, analysis_key, environment) {
@ -174,7 +173,6 @@ function countResultsInSarif(sarif) {
}
return numResults;
}
exports.countResultsInSarif = countResultsInSarif;
// Validates that the given file path refers to a valid SARIF file.
// Throws an error if the file is invalid.
function validateSarifFileSchema(sarifFilePath, logger) {

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -315,6 +315,25 @@ export type ActionStatus =
| "failure"
| "user-error";
// Any status report may include an array of EventReports associated with it.
export interface EventReport {
/** An enumerable description of the event. */
event: string;
/** Time this event started. */
started_at: string;
/** Time this event ended. */
completed_at: string;
/** eg: `success`, `failure`, `timeout`, etc. */
exit_status?: string;
/** If the event is language-specific. */
language?: string;
/**
* A generic JSON blob of data related to this event.
* Use Object.assign() to append additional fields to the object.
*/
properties?: object;
}
export interface StatusReportBase {
/**
* UUID representing the job run that this status report belongs to. We

View file

@ -28,13 +28,13 @@ import {
CODEQL_ACTION_DID_AUTOBUILD_GOLANG,
} from "./shared-environment";
import { getTotalCacheSize, uploadTrapCaches } from "./trap-caching";
import * as upload_lib from "./upload-lib";
import * as uploadLib from "./upload-lib";
import { UploadResult } from "./upload-lib";
import * as util from "./util";
import { checkForTimeout, wrapError } from "./util";
interface AnalysisStatusReport
extends upload_lib.UploadStatusReport,
extends uploadLib.UploadStatusReport,
QueriesStatusReport {}
interface FinishStatusReport
@ -269,7 +269,7 @@ async function run() {
core.setOutput("db-locations", dbLocations);
const uploadInput = actionsUtil.getOptionalInput("upload");
if (runStats && actionsUtil.getUploadValue(uploadInput) === "always") {
uploadResult = await upload_lib.uploadFromActions(
uploadResult = await uploadLib.uploadFromActions(
outputDir,
actionsUtil.getRequiredInput("checkout_path"),
actionsUtil.getOptionalInput("category"),
@ -296,7 +296,7 @@ async function run() {
uploadResult !== undefined &&
actionsUtil.getRequiredInput("wait-for-processing") === "true"
) {
await upload_lib.waitForProcessing(
await uploadLib.waitForProcessing(
parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")),
uploadResult.sarifID,
getActionsLogger()

View file

@ -18,13 +18,18 @@ import { Feature } from "./feature-flags";
import { Language } from "./languages";
import { getRunnerLogger } from "./logging";
import { setupTests, setupActionsVars, createFeatures } from "./testing-utils";
import * as uploadLib from "./upload-lib";
import * as util from "./util";
setupTests(test);
// Checks that the duration fields are populated for the correct language
// and correct case of builtin or custom. Also checks the correct search
// paths are set in the database analyze invocation.
/** Checks that the duration fields are populated for the correct language
* and correct case of builtin or custom. Also checks the correct search
* paths are set in the database analyze invocation.
*
* Mocks the QA telemetry feature flag and checks the appropriate status report
* fields.
*/
test("status report fields and search path setting", async (t) => {
let searchPathsUsed: Array<string | undefined> = [];
return await util.withTmpDir(async (tmpDir) => {
@ -38,6 +43,8 @@ test("status report fields and search path setting", async (t) => {
[Language.java]: ["c/d@2.0.0"],
};
sinon.stub(uploadLib, "validateSarifFileSchema");
for (const language of Object.values(Language)) {
setCodeQL({
packDownload: async () => ({ packs: [] }),
@ -135,12 +142,12 @@ test("status report fields and search path setting", async (t) => {
undefined,
config,
getRunnerLogger(true),
createFeatures([])
createFeatures([Feature.QaTelemetryEnabled])
);
const hasPacks = language in packs;
const statusReportKeys = Object.keys(builtinStatusReport).sort();
if (hasPacks) {
t.deepEqual(statusReportKeys.length, 3, statusReportKeys.toString());
t.deepEqual(statusReportKeys.length, 4, statusReportKeys.toString());
t.deepEqual(
statusReportKeys[0],
`analyze_builtin_queries_${language}_duration_ms`
@ -149,8 +156,9 @@ test("status report fields and search path setting", async (t) => {
statusReportKeys[1],
`analyze_custom_queries_${language}_duration_ms`
);
t.deepEqual(statusReportKeys[2], "event_reports");
t.deepEqual(
statusReportKeys[2],
statusReportKeys[3],
`interpret_results_${language}_duration_ms`
);
} else {
@ -158,11 +166,17 @@ test("status report fields and search path setting", async (t) => {
statusReportKeys[0],
`analyze_builtin_queries_${language}_duration_ms`
);
t.deepEqual(statusReportKeys[1], "event_reports");
t.deepEqual(
statusReportKeys[1],
statusReportKeys[2],
`interpret_results_${language}_duration_ms`
);
}
if (builtinStatusReport.event_reports) {
for (const eventReport of builtinStatusReport.event_reports) {
t.deepEqual(eventReport.event, "codeql database interpret-results");
}
}
config.queries[language] = {
builtin: [],
@ -185,9 +199,9 @@ test("status report fields and search path setting", async (t) => {
undefined,
config,
getRunnerLogger(true),
createFeatures([])
createFeatures([Feature.QaTelemetryEnabled])
);
t.deepEqual(Object.keys(customStatusReport).length, 2);
t.deepEqual(Object.keys(customStatusReport).length, 3);
t.true(
`analyze_custom_queries_${language}_duration_ms` in customStatusReport
);
@ -196,6 +210,12 @@ test("status report fields and search path setting", async (t) => {
: [undefined, "/1", "/2"];
t.deepEqual(searchPathsUsed, expectedSearchPathsUsed);
t.true(`interpret_results_${language}_duration_ms` in customStatusReport);
t.true("event_reports" in customStatusReport);
if (customStatusReport.event_reports) {
for (const eventReport of customStatusReport.event_reports) {
t.deepEqual(eventReport.event, "codeql database interpret-results");
}
}
}
verifyQuerySuites(tmpDir);

View file

@ -6,7 +6,7 @@ import * as toolrunner from "@actions/exec/lib/toolrunner";
import del from "del";
import * as yaml from "js-yaml";
import { DatabaseCreationTimings } from "./actions-util";
import { DatabaseCreationTimings, EventReport } from "./actions-util";
import * as analysisPaths from "./analysis-paths";
import { CodeQL, getCodeQL } from "./codeql";
import * as configUtils from "./config-utils";
@ -14,6 +14,7 @@ import { FeatureEnablement, Feature } from "./feature-flags";
import { isScannedLanguage, Language } from "./languages";
import { Logger } from "./logging";
import { endTracingForCluster } from "./tracer-config";
import { validateSarifFileSchema } from "./upload-lib";
import * as util from "./util";
export class CodeQLAnalysisError extends Error {
@ -78,6 +79,8 @@ export interface QueriesStatusReport {
interpret_results_swift_duration_ms?: number;
/** Name of language that errored during analysis (or undefined if no language failed). */
analyze_failure_language?: string;
/** Reports on discrete events associated with this status report. */
event_reports?: EventReport[];
}
async function setupPythonExtractor(
@ -242,6 +245,9 @@ export async function runQueries(
const packsWithVersion = config.packs[language] || [];
try {
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
let startTimeInterpretResults: number;
let endTimeInterpretResults: number;
if (await util.useCodeScanningConfigInCli(codeql, features)) {
// If we are using the code scanning config in the CLI,
// much of the work needed to generate the query suites
@ -257,16 +263,16 @@ export async function runQueries(
new Date().getTime() - startTimeBuiltIn;
logger.startGroup(`Interpreting results for ${language}`);
const startTimeInterpretResults = new Date().getTime();
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
startTimeInterpretResults = new Date().getTime();
const analysisSummary = await runInterpretResults(
language,
undefined,
sarifFile,
config.debugMode
);
endTimeInterpretResults = new Date().getTime();
statusReport[`interpret_results_${language}_duration_ms`] =
new Date().getTime() - startTimeInterpretResults;
endTimeInterpretResults - startTimeInterpretResults;
logger.endGroup();
logger.info(analysisSummary);
} else {
@ -342,19 +348,37 @@ export async function runQueries(
}
logger.endGroup();
logger.startGroup(`Interpreting results for ${language}`);
const startTimeInterpretResults = new Date().getTime();
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
startTimeInterpretResults = new Date().getTime();
const analysisSummary = await runInterpretResults(
language,
querySuitePaths,
sarifFile,
config.debugMode
);
endTimeInterpretResults = new Date().getTime();
statusReport[`interpret_results_${language}_duration_ms`] =
new Date().getTime() - startTimeInterpretResults;
endTimeInterpretResults - startTimeInterpretResults;
logger.endGroup();
logger.info(analysisSummary);
}
if (await features.getValue(Feature.QaTelemetryEnabled)) {
const perQueryAlertCounts = getPerQueryAlertCounts(sarifFile, logger);
const perQueryAlertCountEventReport: EventReport = {
event: "codeql database interpret-results",
started_at: startTimeInterpretResults.toString(),
completed_at: endTimeInterpretResults.toString(),
exit_status: "success",
language,
properties: perQueryAlertCounts,
};
if (statusReport["event_reports"] === undefined) {
statusReport["event_reports"] = [];
}
statusReport["event_reports"].push(perQueryAlertCountEventReport);
}
await runPrintLinesOfCode(language);
} catch (e) {
logger.info(String(e));
@ -392,6 +416,34 @@ export async function runQueries(
);
}
/** Get an object with all queries and their counts parsed from a SARIF file path. */
function getPerQueryAlertCounts(
sarifPath: string,
log: Logger
): Record<string, number> {
validateSarifFileSchema(sarifPath, log);
const sarifObject = JSON.parse(
fs.readFileSync(sarifPath, "utf8")
) as util.SarifFile;
// We do not need to compute fingerprints because we are not sending data based off of locations.
// Generate the query: alert count object
const perQueryAlertCounts: Record<string, number> = {};
// All rules (queries), from all results, from all runs
for (const sarifRun of sarifObject.runs) {
if (sarifRun.results) {
for (const result of sarifRun.results) {
const query = result.rule?.id || result.ruleId;
if (query) {
perQueryAlertCounts[query] = (perQueryAlertCounts[query] || 0) + 1;
}
}
}
}
return perQueryAlertCounts;
}
async function runPrintLinesOfCode(language: Language): Promise<string> {
const databasePath = util.getCodeQLDatabasePath(config, language);
return await codeql.databasePrintBaseline(databasePath);

View file

@ -43,6 +43,7 @@ export enum Feature {
ExportCodeScanningConfigEnabled = "export_code_scanning_config_enabled",
ExportDiagnosticsEnabled = "export_diagnostics_enabled",
MlPoweredQueriesEnabled = "ml_powered_queries_enabled",
QaTelemetryEnabled = "qa_telemetry_enabled",
UploadFailedSarifEnabled = "upload_failed_sarif_enabled",
}
@ -76,6 +77,11 @@ export const featureConfig: Record<
minimumVersion: "2.7.5",
defaultValue: false,
},
[Feature.QaTelemetryEnabled]: {
envVar: "CODEQL_ACTION_QA_TELEMETRY",
minimumVersion: undefined,
defaultValue: false,
},
[Feature.UploadFailedSarifEnabled]: {
envVar: "CODEQL_ACTION_UPLOAD_FAILED_SARIF",
minimumVersion: "2.11.3",

View file

@ -20,7 +20,7 @@ import * as workflow from "./workflow";
// Takes a list of paths to sarif files and combines them together,
// returning the contents of the combined sarif file.
export function combineSarifFiles(sarifFiles: string[]): SarifFile {
function combineSarifFiles(sarifFiles: string[]): SarifFile {
const combinedSarif: SarifFile = {
version: null,
runs: [],
@ -198,7 +198,7 @@ function getSarifFilePaths(sarifPath: string) {
}
// Counts the number of results in the given SARIF file
export function countResultsInSarif(sarif: string): number {
function countResultsInSarif(sarif: string): number {
let numResults = 0;
let parsedSarif;
try {
@ -224,7 +224,7 @@ export function countResultsInSarif(sarif: string): number {
// Validates that the given file path refers to a valid SARIF file.
// Throws an error if the file is invalid.
export function validateSarifFileSchema(sarifFilePath: string, logger: Logger) {
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, "utf8"));
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, "utf8")) as SarifFile;
const schema = require("../src/sarif-schema-2.1.0.json") as jsonschema.Schema;
const result = new jsonschema.Validator().validate(sarif, schema);

View file

@ -72,6 +72,9 @@ export interface SarifInvocation {
export interface SarifResult {
ruleId?: string;
rule?: {
id?: string;
};
message?: {
text?: string;
};