Merge branch 'master' into analysisName
This commit is contained in:
commit
52cd1f2261
15 changed files with 279 additions and 75 deletions
87
lib/upload-lib.js
generated
87
lib/upload-lib.js
generated
|
|
@ -54,23 +54,77 @@ function combineSarifFiles(sarifFiles) {
|
|||
return JSON.stringify(combinedSarif);
|
||||
}
|
||||
exports.combineSarifFiles = combineSarifFiles;
|
||||
// Upload the given payload.
|
||||
// If the request fails then this will retry a small number of times.
|
||||
async function uploadPayload(payload) {
|
||||
core.info('Uploading results');
|
||||
const githubToken = core.getInput('token');
|
||||
const ph = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Upload SARIF', [ph]);
|
||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY'] + '/code-scanning/analysis';
|
||||
// Make up to 4 attempts to upload, and sleep for these
|
||||
// number of seconds between each attempt.
|
||||
// We don't want to backoff too much to avoid wasting action
|
||||
// minutes, but just waiting a little bit could maybe help.
|
||||
const backoffPeriods = [1, 5, 15];
|
||||
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
|
||||
const res = await client.put(url, payload);
|
||||
core.debug('response status: ' + res.message.statusCode);
|
||||
const statusCode = res.message.statusCode;
|
||||
if (statusCode === 202) {
|
||||
core.info("Successfully uploaded results");
|
||||
return true;
|
||||
}
|
||||
const requestID = res.message.headers["x-github-request-id"];
|
||||
// On any other status code that's not 5xx mark the upload as failed
|
||||
if (!statusCode || statusCode < 500 || statusCode >= 600) {
|
||||
core.setFailed('Upload failed (' + requestID + '): (' + statusCode + ') ' + await res.readBody());
|
||||
return false;
|
||||
}
|
||||
// On a 5xx status code we may retry the request
|
||||
if (attempt < backoffPeriods.length) {
|
||||
// Log the failure as a warning but don't mark the action as failed yet
|
||||
core.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
|
||||
') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] +
|
||||
' seconds: (' + statusCode + ') ' + await res.readBody());
|
||||
// Sleep for the backoff period
|
||||
await new Promise(r => setTimeout(r, backoffPeriods[attempt] * 1000));
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
// If the upload fails with 5xx then we assume it is a temporary problem
|
||||
// and not an error that the user has caused or can fix.
|
||||
// We avoid marking the job as failed to avoid breaking CI workflows.
|
||||
core.error('Upload failed (' + requestID + '): (' + statusCode + ') ' + await res.readBody());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function upload(input) {
|
||||
if (fs.lstatSync(input).isDirectory()) {
|
||||
const sarifFiles = fs.readdirSync(input)
|
||||
.filter(f => f.endsWith(".sarif"))
|
||||
.map(f => path.resolve(input, f));
|
||||
await uploadFiles(sarifFiles);
|
||||
if (sarifFiles.length === 0) {
|
||||
core.setFailed("No SARIF files found to upload in \"" + input + "\".");
|
||||
return false;
|
||||
}
|
||||
return await uploadFiles(sarifFiles);
|
||||
}
|
||||
else {
|
||||
await uploadFiles([input]);
|
||||
return await uploadFiles([input]);
|
||||
}
|
||||
}
|
||||
exports.upload = upload;
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles) {
|
||||
core.startGroup("Uploading results");
|
||||
let succeeded = false;
|
||||
try {
|
||||
// Check if an upload has happened before. If so then abort.
|
||||
// This is intended to catch when the finish and upload-sarif actions
|
||||
|
|
@ -78,7 +132,7 @@ async function uploadFiles(sarifFiles) {
|
|||
const sentinelFile = await getSentinelFilePath();
|
||||
if (fs.existsSync(sentinelFile)) {
|
||||
core.info("Aborting as an upload has already happened from this job");
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
|
|
@ -86,7 +140,7 @@ async function uploadFiles(sarifFiles) {
|
|||
const analysisKey = await util.getAnalysisKey();
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||
core.debug("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
|
||||
|
|
@ -95,7 +149,7 @@ async function uploadFiles(sarifFiles) {
|
|||
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
let matrix = core.getInput('matrix');
|
||||
if (matrix === "null" || matrix === "") {
|
||||
|
|
@ -114,26 +168,8 @@ async function uploadFiles(sarifFiles) {
|
|||
"started_at": startedAt,
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
core.info('Uploading results');
|
||||
const githubToken = core.getInput('token');
|
||||
const ph = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Upload SARIF', [ph]);
|
||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY'] + '/code-scanning/analysis';
|
||||
const res = await client.put(url, payload);
|
||||
const requestID = res.message.headers["x-github-request-id"];
|
||||
core.debug('response status: ' + res.message.statusCode);
|
||||
if (res.message.statusCode === 500) {
|
||||
// If the upload fails with 500 then we assume it is a temporary problem
|
||||
// with turbo-scan and not an error that the user has caused or can fix.
|
||||
// We avoid marking the job as failed to avoid breaking CI workflows.
|
||||
core.error('Upload failed (' + requestID + '): ' + await res.readBody());
|
||||
}
|
||||
else if (res.message.statusCode !== 202) {
|
||||
core.setFailed('Upload failed (' + requestID + '): ' + await res.readBody());
|
||||
}
|
||||
else {
|
||||
core.info("Successfully uploaded results");
|
||||
}
|
||||
// Make the upload
|
||||
succeeded = await uploadPayload(payload);
|
||||
// Mark that we have made an upload
|
||||
fs.writeFileSync(sentinelFile, '');
|
||||
}
|
||||
|
|
@ -141,4 +177,5 @@ async function uploadFiles(sarifFiles) {
|
|||
core.setFailed(error.message);
|
||||
}
|
||||
core.endGroup();
|
||||
return succeeded;
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue