Merge pull request #1210 from github/edoardo/record-db-creation-time

Telemetry: Record DB creation time
This commit is contained in:
Edoardo Pirovano 2022-08-24 15:14:54 +01:00 committed by GitHub
commit 92c650bfbd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 63 additions and 16 deletions

File diff suppressed because one or more lines are too long

16
lib/analyze-action.js generated
View file

@ -37,7 +37,7 @@ const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util")); const util = __importStar(require("./util"));
// eslint-disable-next-line import/no-commonjs // eslint-disable-next-line import/no-commonjs
const pkg = require("../package.json"); const pkg = require("../package.json");
async function sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, didUploadTrapCaches, logger) { async function sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger) {
const status = actionsUtil.getActionsStatus(error, stats === null || stats === void 0 ? void 0 : stats.analyze_failure_language); const status = actionsUtil.getActionsStatus(error, stats === null || stats === void 0 ? void 0 : stats.analyze_failure_language);
const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, error === null || error === void 0 ? void 0 : error.message, error === null || error === void 0 ? void 0 : error.stack); const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, error === null || error === void 0 ? void 0 : error.message, error === null || error === void 0 ? void 0 : error.stack);
const statusReport = { const statusReport = {
@ -48,6 +48,7 @@ async function sendStatusReport(startedAt, config, stats, error, trapCacheUpload
} }
: {}), : {}),
...(stats || {}), ...(stats || {}),
...(dbCreationTimings || {}),
}; };
if (config && didUploadTrapCaches) { if (config && didUploadTrapCaches) {
const trapCacheUploadStatusReport = { const trapCacheUploadStatusReport = {
@ -73,6 +74,7 @@ async function run() {
let runStats = undefined; let runStats = undefined;
let config = undefined; let config = undefined;
let trapCacheUploadTime = undefined; let trapCacheUploadTime = undefined;
let dbCreationTimings = undefined;
let didUploadTrapCaches = false; let didUploadTrapCaches = false;
util.initializeEnvironment(util.Mode.actions, pkg.version); util.initializeEnvironment(util.Mode.actions, pkg.version);
await util.checkActionVersion(pkg.version); await util.checkActionVersion(pkg.version);
@ -100,7 +102,7 @@ async function run() {
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")); const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
const gitHubVersion = await (0, api_client_1.getGitHubVersionActionsOnly)(); const gitHubVersion = await (0, api_client_1.getGitHubVersionActionsOnly)();
const featureFlags = new feature_flags_1.GitHubFeatureFlags(gitHubVersion, apiDetails, repositoryNwo, logger); const featureFlags = new feature_flags_1.GitHubFeatureFlags(gitHubVersion, apiDetails, repositoryNwo, logger);
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger, featureFlags); dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger, featureFlags);
if (actionsUtil.getRequiredInput("skip-queries") !== "true") { if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger); runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
} }
@ -148,10 +150,10 @@ async function run() {
console.log(error); console.log(error);
if (error instanceof analyze_1.CodeQLAnalysisError) { if (error instanceof analyze_1.CodeQLAnalysisError) {
const stats = { ...error.queriesStatusReport }; const stats = { ...error.queriesStatusReport };
await sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, didUploadTrapCaches, logger); await sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
} }
else { else {
await sendStatusReport(startedAt, config, undefined, error, trapCacheUploadTime, didUploadTrapCaches, logger); await sendStatusReport(startedAt, config, undefined, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
} }
return; return;
} }
@ -159,13 +161,13 @@ async function run() {
await sendStatusReport(startedAt, config, { await sendStatusReport(startedAt, config, {
...runStats, ...runStats,
...uploadResult.statusReport, ...uploadResult.statusReport,
}, undefined, trapCacheUploadTime, didUploadTrapCaches, logger); }, undefined, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
} }
else if (runStats) { else if (runStats) {
await sendStatusReport(startedAt, config, { ...runStats }, undefined, trapCacheUploadTime, didUploadTrapCaches, logger); await sendStatusReport(startedAt, config, { ...runStats }, undefined, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
} }
else { else {
await sendStatusReport(startedAt, config, undefined, undefined, trapCacheUploadTime, didUploadTrapCaches, logger); await sendStatusReport(startedAt, config, undefined, undefined, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
} }
} }
exports.runPromise = run(); exports.runPromise = run();

File diff suppressed because one or more lines are too long

12
lib/analyze.js generated
View file

@ -25,6 +25,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
exports.validateQueryFilters = exports.runCleanup = exports.runFinalize = exports.createQuerySuiteContents = exports.convertPackToQuerySuiteEntry = exports.runQueries = exports.dbIsFinalized = exports.createdDBForScannedLanguages = exports.CodeQLAnalysisError = void 0; exports.validateQueryFilters = exports.runCleanup = exports.runFinalize = exports.createQuerySuiteContents = exports.convertPackToQuerySuiteEntry = exports.runQueries = exports.dbIsFinalized = exports.createdDBForScannedLanguages = exports.CodeQLAnalysisError = void 0;
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const perf_hooks_1 = require("perf_hooks"); // We need to import `performance` on Node 12
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner")); const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const del_1 = __importDefault(require("del")); const del_1 = __importDefault(require("del"));
const yaml = __importStar(require("js-yaml")); const yaml = __importStar(require("js-yaml"));
@ -100,7 +101,10 @@ function dbIsFinalized(config, language, logger) {
exports.dbIsFinalized = dbIsFinalized; exports.dbIsFinalized = dbIsFinalized;
async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger, featureFlags) { async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger, featureFlags) {
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd); const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
const extractionStart = perf_hooks_1.performance.now();
await createdDBForScannedLanguages(codeql, config, logger, featureFlags); await createdDBForScannedLanguages(codeql, config, logger, featureFlags);
const extractionTime = perf_hooks_1.performance.now() - extractionStart;
const trapImportStart = perf_hooks_1.performance.now();
for (const language of config.languages) { for (const language of config.languages) {
if (dbIsFinalized(config, language, logger)) { if (dbIsFinalized(config, language, logger)) {
logger.info(`There is already a finalized database for ${language} at the location where the CodeQL Action places databases, so we did not create one.`); logger.info(`There is already a finalized database for ${language} at the location where the CodeQL Action places databases, so we did not create one.`);
@ -111,6 +115,11 @@ async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger,
logger.endGroup(); logger.endGroup();
} }
} }
const trapImportTime = perf_hooks_1.performance.now() - trapImportStart;
return {
scanned_language_extraction_duration_ms: Math.round(extractionTime),
trap_import_duration_ms: Math.round(trapImportTime),
};
} }
// Runs queries and creates sarif files in the given folder // Runs queries and creates sarif files in the given folder
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger) { async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger) {
@ -294,7 +303,7 @@ async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger, f
} }
} }
await fs.promises.mkdir(outputDir, { recursive: true }); await fs.promises.mkdir(outputDir, { recursive: true });
await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger, featureFlags); const timings = await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger, featureFlags);
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd); const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
// WARNING: This does not _really_ end tracing, as the tracer will restore its // WARNING: This does not _really_ end tracing, as the tracer will restore its
// critical environment variables and it'll still be active for all processes // critical environment variables and it'll still be active for all processes
@ -309,6 +318,7 @@ async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger, f
// Delete the tracer config env var to avoid tracing ourselves // Delete the tracer config env var to avoid tracing ourselves
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION]; delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
} }
return timings;
} }
exports.runFinalize = runFinalize; exports.runFinalize = runFinalize;
async function runCleanup(config, cleanupLevel, logger) { async function runCleanup(config, cleanupLevel, logger) {

File diff suppressed because one or more lines are too long

View file

@ -623,6 +623,11 @@ export interface StatusReportBase {
codeql_version?: string; codeql_version?: string;
} }
export interface DatabaseCreationTimings {
scanned_language_extraction_duration_ms?: number;
trap_import_duration_ms?: number;
}
export function getActionsStatus( export function getActionsStatus(
error?: unknown, error?: unknown,
otherFailureCause?: string otherFailureCause?: string

View file

@ -4,6 +4,7 @@ import { performance } from "perf_hooks";
import * as core from "@actions/core"; import * as core from "@actions/core";
import * as actionsUtil from "./actions-util"; import * as actionsUtil from "./actions-util";
import { DatabaseCreationTimings } from "./actions-util";
import { import {
CodeQLAnalysisError, CodeQLAnalysisError,
QueriesStatusReport, QueriesStatusReport,
@ -32,6 +33,7 @@ interface AnalysisStatusReport
interface FinishStatusReport interface FinishStatusReport
extends actionsUtil.StatusReportBase, extends actionsUtil.StatusReportBase,
actionsUtil.DatabaseCreationTimings,
AnalysisStatusReport {} AnalysisStatusReport {}
interface FinishWithTrapUploadStatusReport extends FinishStatusReport { interface FinishWithTrapUploadStatusReport extends FinishStatusReport {
@ -47,6 +49,7 @@ export async function sendStatusReport(
stats: AnalysisStatusReport | undefined, stats: AnalysisStatusReport | undefined,
error: Error | undefined, error: Error | undefined,
trapCacheUploadTime: number | undefined, trapCacheUploadTime: number | undefined,
dbCreationTimings: DatabaseCreationTimings | undefined,
didUploadTrapCaches: boolean, didUploadTrapCaches: boolean,
logger: Logger logger: Logger
) { ) {
@ -70,6 +73,7 @@ export async function sendStatusReport(
} }
: {}), : {}),
...(stats || {}), ...(stats || {}),
...(dbCreationTimings || {}),
}; };
if (config && didUploadTrapCaches) { if (config && didUploadTrapCaches) {
const trapCacheUploadStatusReport: FinishWithTrapUploadStatusReport = { const trapCacheUploadStatusReport: FinishWithTrapUploadStatusReport = {
@ -99,6 +103,7 @@ async function run() {
let runStats: QueriesStatusReport | undefined = undefined; let runStats: QueriesStatusReport | undefined = undefined;
let config: Config | undefined = undefined; let config: Config | undefined = undefined;
let trapCacheUploadTime: number | undefined = undefined; let trapCacheUploadTime: number | undefined = undefined;
let dbCreationTimings: DatabaseCreationTimings | undefined = undefined;
let didUploadTrapCaches = false; let didUploadTrapCaches = false;
util.initializeEnvironment(util.Mode.actions, pkg.version); util.initializeEnvironment(util.Mode.actions, pkg.version);
await util.checkActionVersion(pkg.version); await util.checkActionVersion(pkg.version);
@ -161,7 +166,14 @@ async function run() {
logger logger
); );
await runFinalize(outputDir, threads, memory, config, logger, featureFlags); dbCreationTimings = await runFinalize(
outputDir,
threads,
memory,
config,
logger,
featureFlags
);
if (actionsUtil.getRequiredInput("skip-queries") !== "true") { if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
runStats = await runQueries( runStats = await runQueries(
outputDir, outputDir,
@ -249,6 +261,7 @@ async function run() {
stats, stats,
error, error,
trapCacheUploadTime, trapCacheUploadTime,
dbCreationTimings,
didUploadTrapCaches, didUploadTrapCaches,
logger logger
); );
@ -259,6 +272,7 @@ async function run() {
undefined, undefined,
error, error,
trapCacheUploadTime, trapCacheUploadTime,
dbCreationTimings,
didUploadTrapCaches, didUploadTrapCaches,
logger logger
); );
@ -277,6 +291,7 @@ async function run() {
}, },
undefined, undefined,
trapCacheUploadTime, trapCacheUploadTime,
dbCreationTimings,
didUploadTrapCaches, didUploadTrapCaches,
logger logger
); );
@ -287,6 +302,7 @@ async function run() {
{ ...runStats }, { ...runStats },
undefined, undefined,
trapCacheUploadTime, trapCacheUploadTime,
dbCreationTimings,
didUploadTrapCaches, didUploadTrapCaches,
logger logger
); );
@ -297,6 +313,7 @@ async function run() {
undefined, undefined,
undefined, undefined,
trapCacheUploadTime, trapCacheUploadTime,
dbCreationTimings,
didUploadTrapCaches, didUploadTrapCaches,
logger logger
); );

View file

@ -1,10 +1,12 @@
import * as fs from "fs"; import * as fs from "fs";
import * as path from "path"; import * as path from "path";
import { performance } from "perf_hooks"; // We need to import `performance` on Node 12
import * as toolrunner from "@actions/exec/lib/toolrunner"; import * as toolrunner from "@actions/exec/lib/toolrunner";
import del from "del"; import del from "del";
import * as yaml from "js-yaml"; import * as yaml from "js-yaml";
import { DatabaseCreationTimings } from "./actions-util";
import * as analysisPaths from "./analysis-paths"; import * as analysisPaths from "./analysis-paths";
import { import {
CodeQL, CodeQL,
@ -168,10 +170,14 @@ async function finalizeDatabaseCreation(
memoryFlag: string, memoryFlag: string,
logger: Logger, logger: Logger,
featureFlags: FeatureFlags featureFlags: FeatureFlags
) { ): Promise<DatabaseCreationTimings> {
const codeql = await getCodeQL(config.codeQLCmd); const codeql = await getCodeQL(config.codeQLCmd);
await createdDBForScannedLanguages(codeql, config, logger, featureFlags);
const extractionStart = performance.now();
await createdDBForScannedLanguages(codeql, config, logger, featureFlags);
const extractionTime = performance.now() - extractionStart;
const trapImportStart = performance.now();
for (const language of config.languages) { for (const language of config.languages) {
if (dbIsFinalized(config, language, logger)) { if (dbIsFinalized(config, language, logger)) {
logger.info( logger.info(
@ -187,6 +193,12 @@ async function finalizeDatabaseCreation(
logger.endGroup(); logger.endGroup();
} }
} }
const trapImportTime = performance.now() - trapImportStart;
return {
scanned_language_extraction_duration_ms: Math.round(extractionTime),
trap_import_duration_ms: Math.round(trapImportTime),
};
} }
// Runs queries and creates sarif files in the given folder // Runs queries and creates sarif files in the given folder
@ -496,7 +508,7 @@ export async function runFinalize(
config: configUtils.Config, config: configUtils.Config,
logger: Logger, logger: Logger,
featureFlags: FeatureFlags featureFlags: FeatureFlags
) { ): Promise<DatabaseCreationTimings> {
try { try {
await del(outputDir, { force: true }); await del(outputDir, { force: true });
} catch (error: any) { } catch (error: any) {
@ -506,7 +518,7 @@ export async function runFinalize(
} }
await fs.promises.mkdir(outputDir, { recursive: true }); await fs.promises.mkdir(outputDir, { recursive: true });
await finalizeDatabaseCreation( const timings = await finalizeDatabaseCreation(
config, config,
threadsFlag, threadsFlag,
memoryFlag, memoryFlag,
@ -527,6 +539,7 @@ export async function runFinalize(
// Delete the tracer config env var to avoid tracing ourselves // Delete the tracer config env var to avoid tracing ourselves
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION]; delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
} }
return timings;
} }
export async function runCleanup( export async function runCleanup(