Merge branch 'main' into split-upload-method
This commit is contained in:
commit
4e46a490ae
13 changed files with 245 additions and 36 deletions
|
|
@ -336,6 +336,8 @@ test("validateWorkflow() when on.pull_request for mismatched wildcard branches",
|
|||
});
|
||||
|
||||
test("validateWorkflow() when HEAD^2 is checked out", (t) => {
|
||||
process.env.GITHUB_JOB = "test";
|
||||
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
on: ["push", "pull_request"],
|
||||
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
||||
|
|
@ -432,3 +434,61 @@ on:
|
|||
|
||||
t.deepEqual(errors, []);
|
||||
});
|
||||
|
||||
test("validateWorkflow() should only report the current job's CheckoutWrongHead", (t) => {
|
||||
process.env.GITHUB_JOB = "test";
|
||||
|
||||
const errors = actionsutil.validateWorkflow(
|
||||
yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
jobs:
|
||||
test:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test2:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test3:
|
||||
steps: []
|
||||
`)
|
||||
);
|
||||
|
||||
t.deepEqual(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]);
|
||||
});
|
||||
|
||||
test("validateWorkflow() should not report a different job's CheckoutWrongHead", (t) => {
|
||||
process.env.GITHUB_JOB = "test3";
|
||||
|
||||
const errors = actionsutil.validateWorkflow(
|
||||
yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
jobs:
|
||||
test:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test2:
|
||||
steps:
|
||||
- run: "git checkout HEAD^2"
|
||||
|
||||
test3:
|
||||
steps: []
|
||||
`)
|
||||
);
|
||||
|
||||
t.deepEqual(errors, []);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -211,10 +211,15 @@ export const WorkflowErrors = toCodedErrors({
|
|||
export function validateWorkflow(doc: Workflow): CodedError[] {
|
||||
const errors: CodedError[] = [];
|
||||
|
||||
// .jobs[key].steps[].run
|
||||
for (const job of Object.values(doc?.jobs || {})) {
|
||||
if (Array.isArray(job?.steps)) {
|
||||
for (const step of job?.steps) {
|
||||
const jobName = process.env.GITHUB_JOB;
|
||||
|
||||
if (jobName) {
|
||||
const job = doc?.jobs?.[jobName];
|
||||
|
||||
const steps = job?.steps;
|
||||
|
||||
if (Array.isArray(steps)) {
|
||||
for (const step of steps) {
|
||||
// this was advice that we used to give in the README
|
||||
// we actually want to run the analysis on the merge commit
|
||||
// to produce results that are more inline with expectations
|
||||
|
|
@ -222,6 +227,7 @@ export function validateWorkflow(doc: Workflow): CodedError[] {
|
|||
// and avoid some race conditions
|
||||
if (step?.run === "git checkout HEAD^2") {
|
||||
errors.push(WorkflowErrors.CheckoutWrongHead);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,9 +1,12 @@
|
|||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import test from "ava";
|
||||
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { setupTests } from "./testing-utils";
|
||||
import * as uploadLib from "./upload-lib";
|
||||
import { GitHubVersion } from "./util";
|
||||
import { GitHubVersion, withTmpDir } from "./util";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
|
|
@ -93,3 +96,38 @@ test("validate correct payload used per version", async (t) => {
|
|||
t.falsy(payload.base_sha);
|
||||
}
|
||||
});
|
||||
|
||||
test("finding SARIF files", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
// include a couple of sarif files
|
||||
fs.writeFileSync(path.join(tmpDir, "a.sarif"), "");
|
||||
fs.writeFileSync(path.join(tmpDir, "b.sarif"), "");
|
||||
|
||||
// other random files shouldn't be returned
|
||||
fs.writeFileSync(path.join(tmpDir, "c.foo"), "");
|
||||
|
||||
// we should recursively look in subdirectories
|
||||
fs.mkdirSync(path.join(tmpDir, "dir1"));
|
||||
fs.writeFileSync(path.join(tmpDir, "dir1", "d.sarif"), "");
|
||||
fs.mkdirSync(path.join(tmpDir, "dir1", "dir2"));
|
||||
fs.writeFileSync(path.join(tmpDir, "dir1", "dir2", "e.sarif"), "");
|
||||
|
||||
// we should ignore symlinks
|
||||
fs.mkdirSync(path.join(tmpDir, "dir3"));
|
||||
fs.symlinkSync(tmpDir, path.join(tmpDir, "dir3", "symlink1"), "dir");
|
||||
fs.symlinkSync(
|
||||
path.join(tmpDir, "a.sarif"),
|
||||
path.join(tmpDir, "dir3", "symlink2.sarif"),
|
||||
"file"
|
||||
);
|
||||
|
||||
const sarifFiles = uploadLib.findSarifFilesInDir(tmpDir);
|
||||
|
||||
t.deepEqual(sarifFiles, [
|
||||
path.join(tmpDir, "a.sarif"),
|
||||
path.join(tmpDir, "b.sarif"),
|
||||
path.join(tmpDir, "dir1", "d.sarif"),
|
||||
path.join(tmpDir, "dir1", "dir2", "e.sarif"),
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -81,6 +81,24 @@ export interface UploadStatusReport {
|
|||
num_results_in_sarif?: number;
|
||||
}
|
||||
|
||||
// Recursively walks a directory and returns all SARIF files it finds.
|
||||
// Does not follow symlinks.
|
||||
export function findSarifFilesInDir(sarifPath: string): string[] {
|
||||
const sarifFiles: string[] = [];
|
||||
const walkSarifFiles = (dir: string) => {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (entry.isFile() && entry.name.endsWith(".sarif")) {
|
||||
sarifFiles.push(path.resolve(dir, entry.name));
|
||||
} else if (entry.isDirectory()) {
|
||||
walkSarifFiles(path.resolve(dir, entry.name));
|
||||
}
|
||||
}
|
||||
};
|
||||
walkSarifFiles(sarifPath);
|
||||
return sarifFiles;
|
||||
}
|
||||
|
||||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
|
|
@ -146,23 +164,18 @@ export async function uploadFromRunner(
|
|||
}
|
||||
|
||||
function getSarifFilePaths(sarifPath: string) {
|
||||
const sarifFiles: string[] = [];
|
||||
if (!fs.existsSync(sarifPath)) {
|
||||
throw new Error(`Path does not exist: ${sarifPath}`);
|
||||
}
|
||||
|
||||
let sarifFiles: string[];
|
||||
if (fs.lstatSync(sarifPath).isDirectory()) {
|
||||
const paths = fs
|
||||
.readdirSync(sarifPath)
|
||||
.filter((f) => f.endsWith(".sarif"))
|
||||
.map((f) => path.resolve(sarifPath, f));
|
||||
for (const filepath of paths) {
|
||||
sarifFiles.push(filepath);
|
||||
}
|
||||
sarifFiles = findSarifFilesInDir(sarifPath);
|
||||
if (sarifFiles.length === 0) {
|
||||
throw new Error(`No SARIF files found to upload in "${sarifPath}".`);
|
||||
}
|
||||
} else {
|
||||
sarifFiles.push(sarifPath);
|
||||
sarifFiles = [sarifPath];
|
||||
}
|
||||
return sarifFiles;
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue