Merge pull request #818 from github/update-v1.0.23-2ecc17d7
Merge main into v1
This commit is contained in:
commit
a627e9fa50
28 changed files with 184 additions and 65 deletions
2
.github/workflows/__debug-artifacts.yml
generated
vendored
2
.github/workflows/__debug-artifacts.yml
generated
vendored
|
|
@ -52,7 +52,7 @@ jobs:
|
|||
id: analysis
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: debug-artifacts
|
||||
name: debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
|
||||
- shell: bash
|
||||
run: |
|
||||
LANGUAGES="cpp csharp go java javascript python"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,10 @@
|
|||
# CodeQL Action and CodeQL Runner Changelog
|
||||
|
||||
## 1.0.23 - 16 Nov 2021
|
||||
|
||||
- The `upload-sarif` action now allows multiple uploads in a single job, as long as they have different categories. [#801](https://github.com/github/codeql-action/pull/801)
|
||||
- Update default CodeQL bundle version to 2.7.1. [#816](https://github.com/github/codeql-action/pull/816)
|
||||
|
||||
## 1.0.22 - 04 Nov 2021
|
||||
|
||||
- The `init` step of the Action now supports `ram` and `threads` inputs to limit resource use of CodeQL extractors. These inputs also serve as defaults to the subsequent `analyze` step, which finalizes the database and executes queries. [#738](https://github.com/github/codeql-action/pull/738)
|
||||
|
|
|
|||
7
lib/actions-util.js
generated
7
lib/actions-util.js
generated
|
|
@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
|
|
@ -98,6 +98,7 @@ const getCommitOid = async function (ref = "HEAD") {
|
|||
}
|
||||
catch (e) {
|
||||
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
|
||||
core.info(e.stack || "NO STACK");
|
||||
return (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
}
|
||||
};
|
||||
|
|
@ -574,4 +575,8 @@ async function isAnalyzingDefaultBranch() {
|
|||
return currentRef === defaultBranch;
|
||||
}
|
||||
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
|
||||
function sanitizeArifactName(name) {
|
||||
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
|
||||
}
|
||||
exports.sanitizeArifactName = sanitizeArifactName;
|
||||
//# sourceMappingURL=actions-util.js.map
|
||||
File diff suppressed because one or more lines are too long
6
lib/actions-util.test.js
generated
6
lib/actions-util.test.js
generated
|
|
@ -440,4 +440,10 @@ on: ["push"]
|
|||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("sanitizeArifactName", (t) => {
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello-world_"), "hello-world_");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello`world`"), "helloworld");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello===123"), "hello123");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"), "manyinvalid");
|
||||
});
|
||||
//# sourceMappingURL=actions-util.test.js.map
|
||||
File diff suppressed because one or more lines are too long
8
lib/analyze-action.js
generated
8
lib/analyze-action.js
generated
|
|
@ -167,7 +167,13 @@ async function run() {
|
|||
}
|
||||
}
|
||||
async function uploadDebugArtifacts(toUpload, rootDir) {
|
||||
await artifact.create().uploadArtifact(util_1.DEBUG_ARTIFACT_NAME, toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
|
||||
let suffix = "";
|
||||
const matrix = actionsUtil.getRequiredInput("matrix");
|
||||
if (matrix !== undefined && matrix !== "null") {
|
||||
for (const entry of Object.entries(JSON.parse(matrix)).sort())
|
||||
suffix += `-${entry[1]}`;
|
||||
}
|
||||
await artifact.create().uploadArtifact(actionsUtil.sanitizeArifactName(`${util_1.DEBUG_ARTIFACT_NAME}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
|
||||
}
|
||||
function listFolder(dir) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -1,3 +1,3 @@
|
|||
{
|
||||
"bundleVersion": "codeql-bundle-20211025"
|
||||
"bundleVersion": "codeql-bundle-20211115"
|
||||
}
|
||||
|
|
|
|||
18
lib/tracer-config.js
generated
18
lib/tracer-config.js
generated
|
|
@ -182,15 +182,15 @@ async function getCombinedTracerConfig(config, codeql) {
|
|||
tracedLanguageConfigs[language] = await getTracerConfigForLanguage(codeql, config, language);
|
||||
}
|
||||
mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
|
||||
}
|
||||
// Add a couple more variables
|
||||
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
|
||||
const codeQLDir = path.dirname(codeql.getPath());
|
||||
if (process.platform === "darwin") {
|
||||
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(codeQLDir, "tools", "osx64", "libtrace.dylib");
|
||||
}
|
||||
else if (process.platform !== "win32") {
|
||||
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
|
||||
// Add a couple more variables
|
||||
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
|
||||
const codeQLDir = path.dirname(codeql.getPath());
|
||||
if (process.platform === "darwin") {
|
||||
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(codeQLDir, "tools", "osx64", "libtrace.dylib");
|
||||
}
|
||||
else if (process.platform !== "win32") {
|
||||
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
|
||||
}
|
||||
}
|
||||
// On macos it's necessary to prefix the build command with the runner executable
|
||||
// on order to trace when System Integrity Protection is enabled.
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
36
lib/upload-lib.js
generated
36
lib/upload-lib.js
generated
|
|
@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromRunner = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
|
||||
exports.validateUniqueCategory = exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromRunner = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const zlib_1 = __importDefault(require("zlib"));
|
||||
|
|
@ -243,14 +243,7 @@ exports.buildPayload = buildPayload;
|
|||
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, sourceRoot, environment, gitHubVersion, apiDetails, logger) {
|
||||
logger.startGroup("Uploading results");
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
|
||||
if (util.isActions()) {
|
||||
// This check only works on actions as env vars don't persist between calls to the runner
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
}
|
||||
validateUniqueCategory(category);
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
validateSarifFileSchema(file, logger);
|
||||
|
|
@ -278,4 +271,29 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
|||
num_results_in_sarif: numResultInSarif,
|
||||
};
|
||||
}
|
||||
function validateUniqueCategory(category) {
|
||||
if (util.isActions()) {
|
||||
// This check only works on actions as env vars don't persist between calls to the runner
|
||||
const sentinelEnvVar = `CODEQL_UPLOAD_SARIF${category ? `_${sanitize(category)}` : ""}`;
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per category. " +
|
||||
"Please specify a unique `category` to call this action multiple times. " +
|
||||
`Category: ${category ? category : "(none)"}`);
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
}
|
||||
}
|
||||
exports.validateUniqueCategory = validateUniqueCategory;
|
||||
/**
|
||||
* Santizes a string to be used as an environment variable name.
|
||||
* This will replace all non-alphanumeric characters with underscores.
|
||||
* There could still be some false category clashes if two uploads
|
||||
* occur that differ only in their non-alphanumeric characters. This is
|
||||
* unlikely.
|
||||
*
|
||||
* @param str the initial value to sanitize
|
||||
*/
|
||||
function sanitize(str) {
|
||||
return str.replace(/[^a-zA-Z0-9_]/g, "_");
|
||||
}
|
||||
//# sourceMappingURL=upload-lib.js.map
|
||||
File diff suppressed because one or more lines are too long
16
lib/upload-lib.test.js
generated
16
lib/upload-lib.test.js
generated
|
|
@ -113,4 +113,20 @@ ava_1.default.beforeEach(() => {
|
|||
modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, undefined, analysisKey, '{"os": "linux", "language": "javascript"}');
|
||||
t.deepEqual(modifiedSarif, expectedSarif);
|
||||
});
|
||||
(0, ava_1.default)("validateUniqueCategory", (t) => {
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(undefined));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(undefined));
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc"));
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("def"));
|
||||
// Our category sanitization is not perfect. Here are some examples
|
||||
// of where we see false clashes
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc/def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc@def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc_def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc def"));
|
||||
// this one is fine
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc_ def"));
|
||||
});
|
||||
//# sourceMappingURL=upload-lib.test.js.map
|
||||
File diff suppressed because one or more lines are too long
2
node_modules/.package-lock.json
generated
vendored
2
node_modules/.package-lock.json
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "codeql",
|
||||
"version": "1.0.22",
|
||||
"version": "1.0.23",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
|
|
|
|||
4
package-lock.json
generated
4
package-lock.json
generated
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "codeql",
|
||||
"version": "1.0.22",
|
||||
"version": "1.0.23",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "codeql",
|
||||
"version": "1.0.22",
|
||||
"version": "1.0.23",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "^0.5.2",
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "codeql",
|
||||
"version": "1.0.22",
|
||||
"version": "1.0.23",
|
||||
"private": true,
|
||||
"description": "CodeQL action",
|
||||
"scripts": {
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ steps:
|
|||
id: analysis
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: debug-artifacts
|
||||
name: debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
|
||||
- shell: bash
|
||||
run: |
|
||||
LANGUAGES="cpp csharp go java javascript python"
|
||||
|
|
|
|||
2
runner/package-lock.json
generated
2
runner/package-lock.json
generated
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "codeql-runner",
|
||||
"version": "1.0.22",
|
||||
"version": "1.0.23",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "codeql-runner",
|
||||
"version": "1.0.22",
|
||||
"version": "1.0.23",
|
||||
"private": true,
|
||||
"description": "CodeQL runner",
|
||||
"scripts": {
|
||||
|
|
|
|||
|
|
@ -680,3 +680,13 @@ test("isAnalyzingDefaultBranch()", async (t) => {
|
|||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
|
||||
});
|
||||
});
|
||||
|
||||
test("sanitizeArifactName", (t) => {
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello-world_"), "hello-world_");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello`world`"), "helloworld");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello===123"), "hello123");
|
||||
t.deepEqual(
|
||||
actionsutil.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"),
|
||||
"manyinvalid"
|
||||
);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -85,6 +85,7 @@ export const getCommitOid = async function (ref = "HEAD"): Promise<string> {
|
|||
core.info(
|
||||
`Failed to call git to get current commit. Continuing with data from environment: ${e}`
|
||||
);
|
||||
core.info((e as Error).stack || "NO STACK");
|
||||
return getRequiredEnvParam("GITHUB_SHA");
|
||||
}
|
||||
};
|
||||
|
|
@ -709,3 +710,7 @@ export async function isAnalyzingDefaultBranch(): Promise<boolean> {
|
|||
|
||||
return currentRef === defaultBranch;
|
||||
}
|
||||
|
||||
export function sanitizeArifactName(name: string): string {
|
||||
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -237,8 +237,14 @@ async function run() {
|
|||
}
|
||||
|
||||
async function uploadDebugArtifacts(toUpload: string[], rootDir: string) {
|
||||
let suffix = "";
|
||||
const matrix = actionsUtil.getRequiredInput("matrix");
|
||||
if (matrix !== undefined && matrix !== "null") {
|
||||
for (const entry of Object.entries(JSON.parse(matrix)).sort())
|
||||
suffix += `-${entry[1]}`;
|
||||
}
|
||||
await artifact.create().uploadArtifact(
|
||||
DEBUG_ARTIFACT_NAME,
|
||||
actionsUtil.sanitizeArifactName(`${DEBUG_ARTIFACT_NAME}${suffix}`),
|
||||
toUpload.map((file) => path.normalize(file)),
|
||||
path.normalize(rootDir)
|
||||
);
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
{
|
||||
"bundleVersion": "codeql-bundle-20211025"
|
||||
"bundleVersion": "codeql-bundle-20211115"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -212,25 +212,25 @@ export async function getCombinedTracerConfig(
|
|||
);
|
||||
}
|
||||
mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
|
||||
}
|
||||
|
||||
// Add a couple more variables
|
||||
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
|
||||
const codeQLDir = path.dirname(codeql.getPath());
|
||||
if (process.platform === "darwin") {
|
||||
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(
|
||||
codeQLDir,
|
||||
"tools",
|
||||
"osx64",
|
||||
"libtrace.dylib"
|
||||
);
|
||||
} else if (process.platform !== "win32") {
|
||||
mainTracerConfig.env["LD_PRELOAD"] = path.join(
|
||||
codeQLDir,
|
||||
"tools",
|
||||
"linux64",
|
||||
"${LIB}trace.so"
|
||||
);
|
||||
// Add a couple more variables
|
||||
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
|
||||
const codeQLDir = path.dirname(codeql.getPath());
|
||||
if (process.platform === "darwin") {
|
||||
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(
|
||||
codeQLDir,
|
||||
"tools",
|
||||
"osx64",
|
||||
"libtrace.dylib"
|
||||
);
|
||||
} else if (process.platform !== "win32") {
|
||||
mainTracerConfig.env["LD_PRELOAD"] = path.join(
|
||||
codeQLDir,
|
||||
"tools",
|
||||
"linux64",
|
||||
"${LIB}trace.so"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// On macos it's necessary to prefix the build command with the runner executable
|
||||
|
|
|
|||
|
|
@ -175,3 +175,24 @@ test("populateRunAutomationDetails", (t) => {
|
|||
);
|
||||
t.deepEqual(modifiedSarif, expectedSarif);
|
||||
});
|
||||
|
||||
test("validateUniqueCategory", (t) => {
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(undefined));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(undefined));
|
||||
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc"));
|
||||
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("def"));
|
||||
|
||||
// Our category sanitization is not perfect. Here are some examples
|
||||
// of where we see false clashes
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc/def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc@def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc_def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc def"));
|
||||
|
||||
// this one is fine
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc_ def"));
|
||||
});
|
||||
|
|
|
|||
|
|
@ -343,16 +343,7 @@ async function uploadFiles(
|
|||
logger.startGroup("Uploading results");
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
|
||||
|
||||
if (util.isActions()) {
|
||||
// This check only works on actions as env vars don't persist between calls to the runner
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
throw new Error(
|
||||
"Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job"
|
||||
);
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
}
|
||||
validateUniqueCategory(category);
|
||||
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
|
|
@ -409,3 +400,33 @@ async function uploadFiles(
|
|||
num_results_in_sarif: numResultInSarif,
|
||||
};
|
||||
}
|
||||
|
||||
export function validateUniqueCategory(category: string | undefined) {
|
||||
if (util.isActions()) {
|
||||
// This check only works on actions as env vars don't persist between calls to the runner
|
||||
const sentinelEnvVar = `CODEQL_UPLOAD_SARIF${
|
||||
category ? `_${sanitize(category)}` : ""
|
||||
}`;
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
throw new Error(
|
||||
"Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per category. " +
|
||||
"Please specify a unique `category` to call this action multiple times. " +
|
||||
`Category: ${category ? category : "(none)"}`
|
||||
);
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Santizes a string to be used as an environment variable name.
|
||||
* This will replace all non-alphanumeric characters with underscores.
|
||||
* There could still be some false category clashes if two uploads
|
||||
* occur that differ only in their non-alphanumeric characters. This is
|
||||
* unlikely.
|
||||
*
|
||||
* @param str the initial value to sanitize
|
||||
*/
|
||||
function sanitize(str: string) {
|
||||
return str.replace(/[^a-zA-Z0-9_]/g, "_");
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue