Remove redundant layer from upload files functions
This commit is contained in:
parent
79e9a50e51
commit
6c2a71ced3
15 changed files with 54 additions and 80 deletions
27
lib/upload-lib.js
generated
27
lib/upload-lib.js
generated
|
|
@ -30,9 +30,9 @@ exports.InvalidSarifUploadError = void 0;
|
|||
exports.shouldShowCombineSarifFilesDeprecationWarning = shouldShowCombineSarifFilesDeprecationWarning;
|
||||
exports.populateRunAutomationDetails = populateRunAutomationDetails;
|
||||
exports.findSarifFilesInDir = findSarifFilesInDir;
|
||||
exports.uploadFromActions = uploadFromActions;
|
||||
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||
exports.buildPayload = buildPayload;
|
||||
exports.uploadFiles = uploadFiles;
|
||||
exports.waitForProcessing = waitForProcessing;
|
||||
exports.validateUniqueCategory = validateUniqueCategory;
|
||||
const fs = __importStar(require("fs"));
|
||||
|
|
@ -278,13 +278,6 @@ function findSarifFilesInDir(sarifPath) {
|
|||
walkSarifFiles(sarifPath);
|
||||
return sarifFiles;
|
||||
}
|
||||
/**
|
||||
* Uploads a single SARIF file or a directory of SARIF files depending on what `sarifPath` refers
|
||||
* to.
|
||||
*/
|
||||
async function uploadFromActions(sarifPath, checkoutPath, category, logger) {
|
||||
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(checkoutPath), await actionsUtil.getRef(), await api.getAnalysisKey(), category, util.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getWorkflowRunAttempt(), checkoutPath, actionsUtil.getRequiredInput("matrix"), logger);
|
||||
}
|
||||
function getSarifFilePaths(sarifPath) {
|
||||
if (!fs.existsSync(sarifPath)) {
|
||||
// This is always a configuration error, even for first-party runs.
|
||||
|
|
@ -389,9 +382,13 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
|||
}
|
||||
return payloadObj;
|
||||
}
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, workflowRunAttempt, sourceRoot, environment, logger) {
|
||||
/**
|
||||
* Uploads a single SARIF file or a directory of SARIF files depending on what `sarifPath` refers
|
||||
* to.
|
||||
*/
|
||||
async function uploadFiles(sarifPath, checkoutPath, category, logger) {
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
||||
const sarifFiles = getSarifFilePaths(sarifPath);
|
||||
logger.startGroup("Uploading results");
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
|
|
@ -401,7 +398,9 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
|||
validateSarifFileSchema(file, logger);
|
||||
}
|
||||
let sarif = await combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger);
|
||||
sarif = await fingerprints.addFingerprints(sarif, sourceRoot, logger);
|
||||
sarif = await fingerprints.addFingerprints(sarif, checkoutPath, logger);
|
||||
const analysisKey = await api.getAnalysisKey();
|
||||
const environment = actionsUtil.getRequiredInput("matrix");
|
||||
sarif = populateRunAutomationDetails(sarif, category, analysisKey, environment);
|
||||
const toolNames = util.getToolNames(sarif);
|
||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||
|
|
@ -410,8 +409,8 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
|||
const sarifPayload = JSON.stringify(sarif);
|
||||
logger.debug(`Compressing serialized SARIF`);
|
||||
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = (0, file_url_1.default)(sourceRoot);
|
||||
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, await actionsUtil.determineMergeBaseCommitOid());
|
||||
const checkoutURI = (0, file_url_1.default)(checkoutPath);
|
||||
const payload = buildPayload(await actionsUtil.getCommitOid(checkoutPath), await actionsUtil.getRef(), analysisKey, category, zippedSarif, actionsUtil.getWorkflowRunID(), actionsUtil.getWorkflowRunAttempt(), checkoutURI, environment, toolNames, await actionsUtil.determineMergeBaseCommitOid());
|
||||
// Log some useful debug info about the info
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue