Add an option to allow waiting until an analysis has been processed before finishing the Action.

This commit is contained in:
Chris Gavin 2021-10-18 11:43:30 +01:00
parent 2ecc17d74f
commit 316ad9d919
No known key found for this signature in database
GPG key ID: 07F950B80C27E4DA
8 changed files with 133 additions and 8 deletions

View file

@ -52,6 +52,9 @@ inputs:
description: Whether to upload the resulting CodeQL database
required: false
default: "true"
wait-for-processing:
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
required: false
token:
default: ${{ github.token }}
matrix:

55
lib/upload-lib.js generated
View file

@ -105,6 +105,7 @@ async function uploadPayload(payload, repositoryNwo, apiDetails, logger) {
});
logger.debug(`response status: ${response.status}`);
logger.info("Successfully uploaded results");
return response.data.id;
}
// Recursively walks a directory and returns all SARIF files it finds.
// Does not follow symlinks.
@ -129,14 +130,14 @@ exports.findSarifFilesInDir = findSarifFilesInDir;
// depending on what the path happens to refer to.
// Returns true iff the upload occurred and succeeded
async function uploadFromActions(sarifPath, gitHubVersion, apiDetails, logger) {
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getOptionalInput("category"), util.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), gitHubVersion, apiDetails, logger);
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getOptionalInput("category"), util.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), actionsUtil.getOptionalInput("wait-for-processing") === "true", gitHubVersion, apiDetails, logger);
}
exports.uploadFromActions = uploadFromActions;
// Uploads a single sarif file or a directory of sarif files
// depending on what the path happens to refer to.
// Returns true iff the upload occurred and succeeded
async function uploadFromRunner(sarifPath, repositoryNwo, commitOid, ref, category, sourceRoot, gitHubVersion, apiDetails, logger) {
return await uploadFiles(getSarifFilePaths(sarifPath), repositoryNwo, commitOid, ref, undefined, category, undefined, undefined, sourceRoot, undefined, gitHubVersion, apiDetails, logger);
return await uploadFiles(getSarifFilePaths(sarifPath), repositoryNwo, commitOid, ref, undefined, category, undefined, undefined, sourceRoot, undefined, false, gitHubVersion, apiDetails, logger);
}
exports.uploadFromRunner = uploadFromRunner;
function getSarifFilePaths(sarifPath) {
@ -238,9 +239,11 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
}
}
exports.buildPayload = buildPayload;
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
// Uploads the given set of sarif files.
// Returns true iff the upload occurred and succeeded
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, sourceRoot, environment, gitHubVersion, apiDetails, logger) {
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, sourceRoot, environment, waitForProcessing, gitHubVersion, apiDetails, logger) {
logger.startGroup("Uploading results");
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
validateUniqueCategory(category);
@ -263,8 +266,52 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
const numResultInSarif = countResultsInSarif(sarifPayload);
logger.debug(`Number of results in upload: ${numResultInSarif}`);
// Make the upload
await uploadPayload(payload, repositoryNwo, apiDetails, logger);
const sarifID = await uploadPayload(payload, repositoryNwo, apiDetails, logger);
logger.endGroup();
if (waitForProcessing) {
logger.startGroup("Waiting for processing to finish");
const client = api.getApiClient(apiDetails);
const statusCheckingStarted = Date.now();
// eslint-disable-next-line no-constant-condition
while (true) {
if (Date.now() >
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
break;
}
// We put the delay at the start of the loop, since it's unlikely that the analysis will have succeeded immediately. We might as well wait preemptively.
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
try {
const response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
sarif_id: sarifID,
});
const status = response.data.processing_status;
logger.info(`Status is ${status}.`);
if (status === "complete") {
break;
}
}
catch (e) {
if (util.isHTTPError(e)) {
switch (e.status) {
case 404:
logger.info("Analysis is not found yet...");
break;
default:
throw e;
}
}
else {
throw e;
}
}
}
logger.endGroup();
}
return {
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,

File diff suppressed because one or more lines are too long

6
lib/util.js generated
View file

@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const path = __importStar(require("path"));
@ -492,4 +492,8 @@ async function bundleDb(config, language, codeql) {
return databaseBundlePath;
}
exports.bundleDb = bundleDb;
async function delay(milliseconds) {
return new Promise((resolve) => setTimeout(resolve, milliseconds));
}
exports.delay = delay;
//# sourceMappingURL=util.js.map

File diff suppressed because one or more lines are too long

View file

@ -110,6 +110,8 @@ async function uploadPayload(
logger.debug(`response status: ${response.status}`);
logger.info("Successfully uploaded results");
return response.data.id;
}
export interface UploadStatusReport {
@ -159,6 +161,7 @@ export async function uploadFromActions(
actionsUtil.getWorkflowRunID(),
actionsUtil.getRequiredInput("checkout_path"),
actionsUtil.getRequiredInput("matrix"),
actionsUtil.getOptionalInput("wait-for-processing") === "true",
gitHubVersion,
apiDetails,
logger
@ -190,6 +193,7 @@ export async function uploadFromRunner(
undefined,
sourceRoot,
undefined,
false,
gitHubVersion,
apiDetails,
logger
@ -323,6 +327,9 @@ export function buildPayload(
}
}
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
// Uploads the given set of sarif files.
// Returns true iff the upload occurred and succeeded
async function uploadFiles(
@ -336,6 +343,7 @@ async function uploadFiles(
workflowRunID: number | undefined,
sourceRoot: string,
environment: string | undefined,
waitForProcessing: boolean,
gitHubVersion: util.GitHubVersion,
apiDetails: api.GitHubApiDetails,
logger: Logger
@ -390,10 +398,66 @@ async function uploadFiles(
logger.debug(`Number of results in upload: ${numResultInSarif}`);
// Make the upload
await uploadPayload(payload, repositoryNwo, apiDetails, logger);
const sarifID = await uploadPayload(
payload,
repositoryNwo,
apiDetails,
logger
);
logger.endGroup();
if (waitForProcessing) {
logger.startGroup("Waiting for processing to finish");
const client = api.getApiClient(apiDetails);
const statusCheckingStarted = Date.now();
// eslint-disable-next-line no-constant-condition
while (true) {
if (
Date.now() >
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS
) {
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
logger.warning(
"Timed out waiting for analysis to finish processing. Continuing."
);
break;
}
// We put the delay at the start of the loop, since it's unlikely that the analysis will have succeeded immediately. We might as well wait preemptively.
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
try {
const response = await client.request(
"GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id",
{
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
sarif_id: sarifID,
}
);
const status = response.data.processing_status;
logger.info(`Status is ${status}.`);
if (status === "complete") {
break;
}
} catch (e) {
if (util.isHTTPError(e)) {
switch (e.status) {
case 404:
logger.info("Analysis is not found yet...");
break;
default:
throw e;
}
} else {
throw e;
}
}
}
logger.endGroup();
}
return {
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,

View file

@ -564,3 +564,7 @@ export async function bundleDb(
}
return databaseBundlePath;
}
export async function delay(milliseconds: number) {
return new Promise((resolve) => setTimeout(resolve, milliseconds));
}

View file

@ -20,6 +20,9 @@ inputs:
category:
description: String used by Code Scanning for matching the analyses
required: false
wait-for-processing:
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
required: false
runs:
using: 'node12'
main: '../lib/upload-sarif-action.js'