Bump artifact dependencies if CODEQL_ACTION_ARTIFACT_V2_UPGRADE enabled (#2482)
Co-authored-by: Andrew Eisenberg <aeisenberg@github.com> Co-authored-by: Henry Mercer <henrymercer@github.com>
This commit is contained in:
parent
cf5b0a9041
commit
a196a714b8
5388 changed files with 2176737 additions and 71701 deletions
63
node_modules/@actions/artifact/lib/internal/shared/config.js
generated
vendored
Normal file
63
node_modules/@actions/artifact/lib/internal/shared/config.js
generated
vendored
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getUploadChunkTimeout = exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
|
||||
const os_1 = __importDefault(require("os"));
|
||||
// Used for controlling the highWaterMark value of the zip that is being streamed
|
||||
// The same value is used as the chunk size that is use during upload to blob storage
|
||||
function getUploadChunkSize() {
|
||||
return 8 * 1024 * 1024; // 8 MB Chunks
|
||||
}
|
||||
exports.getUploadChunkSize = getUploadChunkSize;
|
||||
function getRuntimeToken() {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN'];
|
||||
if (!token) {
|
||||
throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable');
|
||||
}
|
||||
return token;
|
||||
}
|
||||
exports.getRuntimeToken = getRuntimeToken;
|
||||
function getResultsServiceUrl() {
|
||||
const resultsUrl = process.env['ACTIONS_RESULTS_URL'];
|
||||
if (!resultsUrl) {
|
||||
throw new Error('Unable to get the ACTIONS_RESULTS_URL env variable');
|
||||
}
|
||||
return new URL(resultsUrl).origin;
|
||||
}
|
||||
exports.getResultsServiceUrl = getResultsServiceUrl;
|
||||
function isGhes() {
|
||||
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
|
||||
const isGitHubHost = hostname === 'GITHUB.COM';
|
||||
const isGheHost = hostname.endsWith('.GHE.COM');
|
||||
const isLocalHost = hostname.endsWith('.LOCALHOST');
|
||||
return !isGitHubHost && !isGheHost && !isLocalHost;
|
||||
}
|
||||
exports.isGhes = isGhes;
|
||||
function getGitHubWorkspaceDir() {
|
||||
const ghWorkspaceDir = process.env['GITHUB_WORKSPACE'];
|
||||
if (!ghWorkspaceDir) {
|
||||
throw new Error('Unable to get the GITHUB_WORKSPACE env variable');
|
||||
}
|
||||
return ghWorkspaceDir;
|
||||
}
|
||||
exports.getGitHubWorkspaceDir = getGitHubWorkspaceDir;
|
||||
// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize
|
||||
// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32.
|
||||
// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300.
|
||||
function getConcurrency() {
|
||||
const numCPUs = os_1.default.cpus().length;
|
||||
if (numCPUs <= 4) {
|
||||
return 32;
|
||||
}
|
||||
const concurrency = 16 * numCPUs;
|
||||
return concurrency > 300 ? 300 : concurrency;
|
||||
}
|
||||
exports.getConcurrency = getConcurrency;
|
||||
function getUploadChunkTimeout() {
|
||||
return 30000; // 30 seconds
|
||||
}
|
||||
exports.getUploadChunkTimeout = getUploadChunkTimeout;
|
||||
//# sourceMappingURL=config.js.map
|
||||
Loading…
Add table
Add a link
Reference in a new issue