Add telemetry for TRAP caching
This commit is contained in:
parent
ca10a6d552
commit
4139682b64
63 changed files with 1195 additions and 126 deletions
3
lib/analysis-paths.test.js
generated
3
lib/analysis-paths.test.js
generated
|
|
@ -50,6 +50,7 @@ const util = __importStar(require("./util"));
|
|||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
|
|
@ -79,6 +80,7 @@ const util = __importStar(require("./util"));
|
|||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
||||
|
|
@ -108,6 +110,7 @@ const util = __importStar(require("./util"));
|
|||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,sBAAsB,EAAE;gBACtB,iBAAiB,EAAE,KAAK;gBACxB,kBAAkB,EAAE,KAAK;gBACzB,oBAAoB,EAAE,KAAK;aAC5B;YACD,UAAU,EAAE,EAAE;SACf,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,sBAAsB,EAAE;gBACtB,iBAAiB,EAAE,KAAK;gBACxB,kBAAkB,EAAE,KAAK;gBACzB,oBAAoB,EAAE,KAAK;aAC5B;YACD,UAAU,EAAE,EAAE;SACf,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;IAC/D,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,WAAW,EAAE,EAAE;QACf,KAAK,EAAE,EAAE;QACT,iBAAiB,EAAE,EAAE;QACrB,OAAO;QACP,SAAS,EAAE,EAAE;QACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;QACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;QACrD,KAAK,EAAE,EAAE;QACT,SAAS,EAAE,KAAK;QAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;QACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;QACnD,sBAAsB,EAAE;YACtB,iBAAiB,EAAE,KAAK;YACxB,kBAAkB,EAAE,KAAK;YACzB,oBAAoB,EAAE,KAAK;SAC5B;QACD,UAAU,EAAE,EAAE;KACf,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;IAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,sBAAsB,EAAE;gBACtB,iBAAiB,EAAE,KAAK;gBACxB,kBAAkB,EAAE,KAAK;gBACzB,oBAAoB,EAAE,KAAK;aAC5B;YACD,UAAU,EAAE,EAAE;YACd,qBAAqB,EAAE,CAAC;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,sBAAsB,EAAE;gBACtB,iBAAiB,EAAE,KAAK;gBACxB,kBAAkB,EAAE,KAAK;gBACzB,oBAAoB,EAAE,KAAK;aAC5B;YACD,UAAU,EAAE,EAAE;YACd,qBAAqB,EAAE,CAAC;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;IAC/D,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,WAAW,EAAE,EAAE;QACf,KAAK,EAAE,EAAE;QACT,iBAAiB,EAAE,EAAE;QACrB,OAAO;QACP,SAAS,EAAE,EAAE;QACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;QACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;QACrD,KAAK,EAAE,EAAE;QACT,SAAS,EAAE,KAAK;QAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;QACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;QACnD,sBAAsB,EAAE;YACtB,iBAAiB,EAAE,KAAK;YACxB,kBAAkB,EAAE,KAAK;YACzB,oBAAoB,EAAE,KAAK;SAC5B;QACD,UAAU,EAAE,EAAE;QACd,qBAAqB,EAAE,CAAC;KACzB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;IAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC"}
|
||||
22
lib/analyze-action.js
generated
22
lib/analyze-action.js
generated
|
|
@ -35,7 +35,7 @@ const upload_lib = __importStar(require("./upload-lib"));
|
|||
const util = __importStar(require("./util"));
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
async function sendStatusReport(startedAt, config, stats, error) {
|
||||
async function sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, didUploadTrapCaches) {
|
||||
const status = actionsUtil.getActionsStatus(error, stats === null || stats === void 0 ? void 0 : stats.analyze_failure_language);
|
||||
const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, error === null || error === void 0 ? void 0 : error.message, error === null || error === void 0 ? void 0 : error.stack);
|
||||
const statusReport = {
|
||||
|
|
@ -46,6 +46,10 @@ async function sendStatusReport(startedAt, config, stats, error) {
|
|||
}
|
||||
: {}),
|
||||
...(stats || {}),
|
||||
trap_cache_upload_duration_ms: trapCacheUploadTime || 0,
|
||||
trap_cache_upload_size_bytes: config && didUploadTrapCaches
|
||||
? await (0, trap_caching_1.getTotalCacheSize)(config.trapCaches)
|
||||
: 0,
|
||||
};
|
||||
await actionsUtil.sendStatusReport(statusReport);
|
||||
}
|
||||
|
|
@ -55,6 +59,8 @@ async function run() {
|
|||
let uploadResult = undefined;
|
||||
let runStats = undefined;
|
||||
let config = undefined;
|
||||
let trapCacheUploadTime = undefined;
|
||||
let didUploadTrapCaches = false;
|
||||
util.initializeEnvironment(util.Mode.actions, pkg.version);
|
||||
await util.checkActionVersion(pkg.version);
|
||||
try {
|
||||
|
|
@ -100,8 +106,10 @@ async function run() {
|
|||
// Possibly upload the database bundles for remote queries
|
||||
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger);
|
||||
// Possibly upload the TRAP caches for later re-use
|
||||
const trapCacheUploadStartTime = Date.now();
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
await (0, trap_caching_1.uploadTrapCaches)(codeql, config, logger);
|
||||
trapCacheUploadTime = Date.now() - trapCacheUploadStartTime;
|
||||
didUploadTrapCaches = await (0, trap_caching_1.uploadTrapCaches)(codeql, config, logger);
|
||||
// We don't upload results in test mode, so don't wait for processing
|
||||
if (util.isInTestMode()) {
|
||||
core.debug("In test mode. Waiting for processing is disabled.");
|
||||
|
|
@ -117,10 +125,10 @@ async function run() {
|
|||
console.log(error);
|
||||
if (error instanceof analyze_1.CodeQLAnalysisError) {
|
||||
const stats = { ...error.queriesStatusReport };
|
||||
await sendStatusReport(startedAt, config, stats, error);
|
||||
await sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, didUploadTrapCaches);
|
||||
}
|
||||
else {
|
||||
await sendStatusReport(startedAt, config, undefined, error);
|
||||
await sendStatusReport(startedAt, config, undefined, error, trapCacheUploadTime, didUploadTrapCaches);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
|
@ -128,13 +136,13 @@ async function run() {
|
|||
await sendStatusReport(startedAt, config, {
|
||||
...runStats,
|
||||
...uploadResult.statusReport,
|
||||
});
|
||||
}, undefined, trapCacheUploadTime, didUploadTrapCaches);
|
||||
}
|
||||
else if (runStats) {
|
||||
await sendStatusReport(startedAt, config, { ...runStats });
|
||||
await sendStatusReport(startedAt, config, { ...runStats }, undefined, trapCacheUploadTime, didUploadTrapCaches);
|
||||
}
|
||||
else {
|
||||
await sendStatusReport(startedAt, config, undefined);
|
||||
await sendStatusReport(startedAt, config, undefined, undefined, trapCacheUploadTime, didUploadTrapCaches);
|
||||
}
|
||||
}
|
||||
exports.runPromise = run();
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
2
lib/analyze.test.js
generated
2
lib/analyze.test.js
generated
|
|
@ -124,6 +124,7 @@ const util = __importStar(require("./util"));
|
|||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||
recursive: true,
|
||||
|
|
@ -370,6 +371,7 @@ const stubConfig = {
|
|||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
for (const options of [
|
||||
{
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
1
lib/codeql.test.js
generated
1
lib/codeql.test.js
generated
|
|
@ -76,6 +76,7 @@ ava_1.default.beforeEach(() => {
|
|||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
});
|
||||
(0, ava_1.default)("download codeql bundle cache", async (t) => {
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
22
lib/config-utils.js
generated
22
lib/config-utils.js
generated
|
|
@ -492,6 +492,7 @@ async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput,
|
|||
augmentationProperties.injectedMlQueries =
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
}
|
||||
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logger);
|
||||
return {
|
||||
languages,
|
||||
queries,
|
||||
|
|
@ -507,12 +508,21 @@ async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput,
|
|||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
augmentationProperties,
|
||||
trapCaches: trapCachingEnabled
|
||||
? await (0, trap_caching_1.downloadTrapCaches)(codeQL, languages, logger)
|
||||
: {},
|
||||
trapCaches,
|
||||
trapCacheDownloadTime,
|
||||
};
|
||||
}
|
||||
exports.getDefaultConfig = getDefaultConfig;
|
||||
async function downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logger) {
|
||||
let trapCaches = {};
|
||||
let trapCacheDownloadTime = 0;
|
||||
if (trapCachingEnabled) {
|
||||
const start = Date.now();
|
||||
trapCaches = await (0, trap_caching_1.downloadTrapCaches)(codeQL, languages, logger);
|
||||
trapCacheDownloadTime = Date.now() - start;
|
||||
}
|
||||
return { trapCaches, trapCacheDownloadTime };
|
||||
}
|
||||
/**
|
||||
* Load the config from the given file.
|
||||
*/
|
||||
|
|
@ -603,6 +613,7 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
|
|||
paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger));
|
||||
}
|
||||
}
|
||||
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logger);
|
||||
return {
|
||||
languages,
|
||||
queries,
|
||||
|
|
@ -618,9 +629,8 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
|
|||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
augmentationProperties,
|
||||
trapCaches: trapCachingEnabled
|
||||
? await (0, trap_caching_1.downloadTrapCaches)(codeQL, languages, logger)
|
||||
: {},
|
||||
trapCaches,
|
||||
trapCacheDownloadTime,
|
||||
};
|
||||
}
|
||||
/**
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
1
lib/config-utils.test.js
generated
1
lib/config-utils.test.js
generated
|
|
@ -224,6 +224,7 @@ function mockListLanguages(languages) {
|
|||
debugDatabaseName: "my-db",
|
||||
augmentationProperties: configUtils.defaultAugmentationProperties,
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
const languages = "javascript";
|
||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
1
lib/database-upload.test.js
generated
1
lib/database-upload.test.js
generated
|
|
@ -61,6 +61,7 @@ function getTestConfig(tmpDir) {
|
|||
debugDatabaseName: util_1.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
augmentationProperties: config_utils_1.defaultAugmentationProperties,
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
}
|
||||
async function mockHttpRequests(databaseUploadStatusCode) {
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
4
lib/init-action.js
generated
4
lib/init-action.js
generated
|
|
@ -29,6 +29,7 @@ const init_1 = require("./init");
|
|||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const trap_caching_1 = require("./trap-caching");
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
|
|
@ -64,6 +65,9 @@ async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
|
|||
tools_input: (0, actions_util_1.getOptionalInput)("tools") || "",
|
||||
tools_resolved_version: toolsVersion,
|
||||
workflow_languages: workflowLanguages || "",
|
||||
trap_cache_languages: Object.keys(config.trapCaches).join(","),
|
||||
trap_cache_download_size_bytes: await (0, trap_caching_1.getTotalCacheSize)(config.trapCaches),
|
||||
trap_cache_download_duration_ms: config.trapCacheDownloadTime,
|
||||
};
|
||||
await (0, actions_util_1.sendStatusReport)(statusReport);
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
1
lib/tracer-config.test.js
generated
1
lib/tracer-config.test.js
generated
|
|
@ -49,6 +49,7 @@ function getTestConfig(tmpDir) {
|
|||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
augmentationProperties: configUtils.defaultAugmentationProperties,
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
}
|
||||
// A very minimal setup
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
30
lib/trap-caching.js
generated
30
lib/trap-caching.js
generated
|
|
@ -18,11 +18,15 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getLanguagesSupportingCaching = exports.uploadTrapCaches = exports.downloadTrapCaches = exports.getTrapCachingExtractorConfigArgsForLang = exports.getTrapCachingExtractorConfigArgs = void 0;
|
||||
exports.getTotalCacheSize = exports.getLanguagesSupportingCaching = exports.uploadTrapCaches = exports.downloadTrapCaches = exports.getTrapCachingExtractorConfigArgsForLang = exports.getTrapCachingExtractorConfigArgs = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const cache = __importStar(require("@actions/cache"));
|
||||
const get_folder_size_1 = __importDefault(require("get-folder-size"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const util_1 = require("./util");
|
||||
|
|
@ -106,9 +110,16 @@ async function downloadTrapCaches(codeql, languages, logger) {
|
|||
return result;
|
||||
}
|
||||
exports.downloadTrapCaches = downloadTrapCaches;
|
||||
/**
|
||||
* Possibly upload TRAP caches to the Actions cache.
|
||||
* @param codeql The CodeQL instance to use.
|
||||
* @param config The configuration for this workflow.
|
||||
* @param logger A logger to record some informational messages to.
|
||||
* @returns Whether the TRAP caches were uploaded.
|
||||
*/
|
||||
async function uploadTrapCaches(codeql, config, logger) {
|
||||
if (!(await actionsUtil.isAnalyzingDefaultBranch()))
|
||||
return; // Only upload caches from the default branch
|
||||
return false; // Only upload caches from the default branch
|
||||
const toAwait = [];
|
||||
for (const language of config.languages) {
|
||||
const cacheDir = config.trapCaches[language];
|
||||
|
|
@ -119,6 +130,7 @@ async function uploadTrapCaches(codeql, config, logger) {
|
|||
toAwait.push(cache.saveCache([cacheDir], key));
|
||||
}
|
||||
await Promise.all(toAwait);
|
||||
return true;
|
||||
}
|
||||
exports.uploadTrapCaches = uploadTrapCaches;
|
||||
async function getLanguagesSupportingCaching(codeql, languages, logger) {
|
||||
|
|
@ -147,6 +159,20 @@ async function getLanguagesSupportingCaching(codeql, languages, logger) {
|
|||
return result;
|
||||
}
|
||||
exports.getLanguagesSupportingCaching = getLanguagesSupportingCaching;
|
||||
async function getTotalCacheSize(trapCaches) {
|
||||
const sizes = await Promise.all(Object.values(trapCaches).map(async (cacheDir) => {
|
||||
return new Promise((resolve) => {
|
||||
(0, get_folder_size_1.default)(cacheDir, (err, size) => {
|
||||
// Ignore file system errors when getting the size. It's only used for telemetry anyway.
|
||||
if (err)
|
||||
resolve(0);
|
||||
resolve(size);
|
||||
});
|
||||
});
|
||||
}));
|
||||
return sizes.reduce((a, b) => a + b, 0);
|
||||
}
|
||||
exports.getTotalCacheSize = getTotalCacheSize;
|
||||
async function cacheKey(codeql, language, baseSha) {
|
||||
return `${await cachePrefix(codeql, language)}${baseSha}`;
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
2
lib/trap-caching.test.js
generated
2
lib/trap-caching.test.js
generated
|
|
@ -98,6 +98,7 @@ const testConfigWithoutTmpDir = {
|
|||
trapCaches: {
|
||||
javascript: "/some/cache/dir",
|
||||
},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
function getTestConfigWithTempDir(tmpDir) {
|
||||
return {
|
||||
|
|
@ -123,6 +124,7 @@ function getTestConfigWithTempDir(tmpDir) {
|
|||
javascript: path.resolve(tmpDir, "jsCache"),
|
||||
ruby: path.resolve(tmpDir, "rubyCache"),
|
||||
},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
}
|
||||
(0, ava_1.default)("check flags for JS, analyzing default branch", async (t) => {
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
1
lib/util.test.js
generated
1
lib/util.test.js
generated
|
|
@ -267,6 +267,7 @@ for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
|
|||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
t.is(util.getMlPoweredJsQueriesStatus(config), expectedStatus);
|
||||
});
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
1
node_modules/.bin/get-folder-size
generated
vendored
Symbolic link
1
node_modules/.bin/get-folder-size
generated
vendored
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../get-folder-size/bin/get-folder-size
|
||||
33
node_modules/.package-lock.json
generated
vendored
33
node_modules/.package-lock.json
generated
vendored
|
|
@ -726,6 +726,12 @@
|
|||
"integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/get-folder-size": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/get-folder-size/-/get-folder-size-2.0.0.tgz",
|
||||
"integrity": "sha512-6VKKrDB20E/6ovi2Pfpy9Pcz8Me1ue/tReaZrwrz9mfVdsr6WAMiDZ+F1oAAcss4U5n2k673i1leDIx2aEBDFQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/js-yaml": {
|
||||
"version": "4.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.5.tgz",
|
||||
|
|
@ -3132,6 +3138,11 @@
|
|||
"integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/gar": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/gar/-/gar-1.0.4.tgz",
|
||||
"integrity": "sha512-w4n9cPWyP7aHxKxYHFQMegj7WIAsL/YX/C4Bs5Rr8s1H9M1rNtRWRsw+ovYMkXDQ5S4ZbYHsHAPmevPjPgw44w=="
|
||||
},
|
||||
"node_modules/get-caller-file": {
|
||||
"version": "2.0.5",
|
||||
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
|
||||
|
|
@ -3141,6 +3152,18 @@
|
|||
"node": "6.* || 8.* || >= 10.*"
|
||||
}
|
||||
},
|
||||
"node_modules/get-folder-size": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/get-folder-size/-/get-folder-size-2.0.1.tgz",
|
||||
"integrity": "sha512-+CEb+GDCM7tkOS2wdMKTn9vU7DgnKUTuDlehkNJKNSovdCOVxs14OfKCk4cvSaR3za4gj+OBdl9opPN9xrJ0zA==",
|
||||
"dependencies": {
|
||||
"gar": "^1.0.4",
|
||||
"tiny-each-async": "2.0.3"
|
||||
},
|
||||
"bin": {
|
||||
"get-folder-size": "bin/get-folder-size"
|
||||
}
|
||||
},
|
||||
"node_modules/get-intrinsic": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz",
|
||||
|
|
@ -4043,8 +4066,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/minimatch": {
|
||||
"version": "3.0.4",
|
||||
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
||||
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
},
|
||||
|
|
@ -5417,6 +5441,11 @@
|
|||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/tiny-each-async": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/tiny-each-async/-/tiny-each-async-2.0.3.tgz",
|
||||
"integrity": "sha512-5ROII7nElnAirvFn8g7H7MtpfV1daMcyfTGQwsn/x2VtyV+VPiO5CjReCJtWLvoKTDEDmZocf3cNPraiMnBXLA=="
|
||||
},
|
||||
"node_modules/tmp": {
|
||||
"version": "0.2.1",
|
||||
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz",
|
||||
|
|
|
|||
21
node_modules/@types/get-folder-size/LICENSE
generated
vendored
Normal file
21
node_modules/@types/get-folder-size/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
16
node_modules/@types/get-folder-size/README.md
generated
vendored
Normal file
16
node_modules/@types/get-folder-size/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
# Installation
|
||||
> `npm install --save @types/get-folder-size`
|
||||
|
||||
# Summary
|
||||
This package contains type definitions for get-folder-size (https://github.com/alessioalex/get-folder-size).
|
||||
|
||||
# Details
|
||||
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/get-folder-size
|
||||
|
||||
Additional Details
|
||||
* Last updated: Fri, 06 Jul 2018 00:07:02 GMT
|
||||
* Dependencies: none
|
||||
* Global values: none
|
||||
|
||||
# Credits
|
||||
These definitions were written by Mariusz Szczepańczyk <https://github.com/mszczepanczyk>.
|
||||
9
node_modules/@types/get-folder-size/index.d.ts
generated
vendored
Normal file
9
node_modules/@types/get-folder-size/index.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
// Type definitions for get-folder-size 2.0
|
||||
// Project: https://github.com/alessioalex/get-folder-size
|
||||
// Definitions by: Mariusz Szczepańczyk <https://github.com/mszczepanczyk>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
export = getFolderSize;
|
||||
|
||||
declare function getFolderSize(folder: string, callback: (err: Error | null, size: number) => void): void;
|
||||
declare function getFolderSize(folder: string, regexIgnorePattern: RegExp, callback: (err: Error | null, size: number) => void): void;
|
||||
22
node_modules/@types/get-folder-size/package.json
generated
vendored
Normal file
22
node_modules/@types/get-folder-size/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
{
|
||||
"name": "@types/get-folder-size",
|
||||
"version": "2.0.0",
|
||||
"description": "TypeScript definitions for get-folder-size",
|
||||
"license": "MIT",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Mariusz Szczepańczyk",
|
||||
"url": "https://github.com/mszczepanczyk",
|
||||
"githubUsername": "mszczepanczyk"
|
||||
}
|
||||
],
|
||||
"main": "",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git"
|
||||
},
|
||||
"scripts": {},
|
||||
"dependencies": {},
|
||||
"typesPublisherContentHash": "40dcdde6847f0332ef276f2d429f348945a3e481fb5cda7a3a89caf204b6f4f0",
|
||||
"typeScriptVersion": "2.0"
|
||||
}
|
||||
21
node_modules/gar/LICENSE
generated
vendored
Executable file
21
node_modules/gar/LICENSE
generated
vendored
Executable file
|
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2018 Ethan Davis
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
48
node_modules/gar/README.md
generated
vendored
Executable file
48
node_modules/gar/README.md
generated
vendored
Executable file
|
|
@ -0,0 +1,48 @@
|
|||
# gar
|
||||
> The lightweight Node arguments parser
|
||||
|
||||
[GitHub](https://github.com/ethanent/gar) | [NPM](https://www.npmjs.com/package/gar)
|
||||
|
||||
## Install
|
||||
|
||||
```bash
|
||||
npm i gar
|
||||
```
|
||||
|
||||
## Use
|
||||
|
||||

|
||||
|
||||
```javascript
|
||||
const args = require('gar')(process.argv.slice(2))
|
||||
|
||||
console.log(args)
|
||||
```
|
||||
|
||||
So for: `-h hey --toggle -ac --hey=hi -spaced "hey there" -num 1 lone`
|
||||
|
||||
```json
|
||||
{
|
||||
"h": "hey",
|
||||
"toggle": true,
|
||||
"a": true,
|
||||
"c": true,
|
||||
"hey": "hi",
|
||||
"spaced": "hey there",
|
||||
"num": 1,
|
||||
"_": ["lone"]
|
||||
}
|
||||
```
|
||||
|
||||
## Why use gar?
|
||||
|
||||
gar is way more lightweight than other argument parsing packages.
|
||||
|
||||
Here's a size comparison table:
|
||||
|
||||
Package | Size
|
||||
--- | ---
|
||||
optimist | [](https://packagephobia.now.sh/result?p=optimist)
|
||||
minimist | [](https://packagephobia.now.sh/result?p=minimist)
|
||||
args-parser | [](https://packagephobia.now.sh/result?p=args-parser)
|
||||
gar | [](https://packagephobia.now.sh/result?p=gar)
|
||||
36
node_modules/gar/index.js
generated
vendored
Executable file
36
node_modules/gar/index.js
generated
vendored
Executable file
|
|
@ -0,0 +1,36 @@
|
|||
module.exports = (sargs) => {
|
||||
let props = {}
|
||||
let lones = []
|
||||
|
||||
const convertIfApplicable = (value) => (isNaN(value) ? (value.toString().toLowerCase() === 'true' ? true : (value.toString().toLowerCase() === 'false' ? false : value)) : Number(value))
|
||||
const removeStartHyphens = (value) => value.replace(/^\-+/g, '')
|
||||
|
||||
for (let i = 0; i < sargs.length; i++) {
|
||||
const equalsIndex = sargs[i].indexOf('=')
|
||||
const isNextRefProp = sargs[i].charAt(0) === '-' && sargs.length - 1 >= i + 1 && sargs[i + 1].indexOf('=') === -1 && sargs[i + 1].charAt(0) !== '-'
|
||||
const argName = equalsIndex === -1 ? removeStartHyphens(sargs[i]) : removeStartHyphens(sargs[i].slice(0, equalsIndex))
|
||||
|
||||
if (equalsIndex !== -1) {
|
||||
props[argName] = convertIfApplicable(sargs[i].slice(equalsIndex + 1))
|
||||
}
|
||||
else if (isNextRefProp) {
|
||||
props[argName] = convertIfApplicable(sargs[i + 1])
|
||||
i++
|
||||
} else if (sargs[i].charAt(0) === '-') {
|
||||
if (sargs[i].charAt(1) === '-') {
|
||||
props[argName] = true
|
||||
}
|
||||
else {
|
||||
for (let b = 0; b < argName.length; b++) {
|
||||
props[argName.charAt(b)] = true
|
||||
}
|
||||
}
|
||||
} else {
|
||||
lones.push(convertIfApplicable(argName))
|
||||
}
|
||||
}
|
||||
|
||||
return Object.assign(props, {
|
||||
'_': lones
|
||||
})
|
||||
}
|
||||
39
node_modules/gar/package.json
generated
vendored
Executable file
39
node_modules/gar/package.json
generated
vendored
Executable file
|
|
@ -0,0 +1,39 @@
|
|||
{
|
||||
"name": "gar",
|
||||
"version": "1.0.4",
|
||||
"description": "The lightweight Node arguments parser",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Tested before deployment.\" && exit 0",
|
||||
"test-dev": "npm install && node test.js",
|
||||
"prepublishOnly": "npm run test-dev"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/ethanent/gar.git"
|
||||
},
|
||||
"keywords": [
|
||||
"argument",
|
||||
"args",
|
||||
"argv",
|
||||
"parse",
|
||||
"cli",
|
||||
"command-line",
|
||||
"parser",
|
||||
"command",
|
||||
"lightweight"
|
||||
],
|
||||
"author": "Ethan Davis",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/ethanent/gar/issues"
|
||||
},
|
||||
"homepage": "https://github.com/ethanent/gar#readme",
|
||||
"devDependencies": {
|
||||
"whew": "^1.1.3"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"LICENSE"
|
||||
]
|
||||
}
|
||||
37
node_modules/get-folder-size/README.md
generated
vendored
Normal file
37
node_modules/get-folder-size/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
# get-folder-size
|
||||
|
||||
Get the size of a folder by iterating through its sub-files and folders.
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
getFolderSize(folder, [regexIgnorePattern], callback)
|
||||
```
|
||||
|
||||
Example:
|
||||
|
||||
```js
|
||||
const getSize = require('get-folder-size');
|
||||
|
||||
getSize(myFolder, (err, size) => {
|
||||
if (err) { throw err; }
|
||||
|
||||
console.log(size + ' bytes');
|
||||
console.log((size / 1024 / 1024).toFixed(2) + ' MB');
|
||||
});
|
||||
```
|
||||
|
||||
## CLI tool
|
||||
|
||||
```bash
|
||||
npm i -g get-folder-size
|
||||
get-folder-size --folder=/my/folder --ignore=node_modules
|
||||
```
|
||||
|
||||
## Size vs Size on disk
|
||||
|
||||
[This module calculates the actual file size, and not the size on disk.](https://web.archive.org/web/20140712235443/https://stackoverflow.com/questions/15470787/please-help-me-understand-size-vs-size-on-disk)
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
22
node_modules/get-folder-size/bin/get-folder-size
generated
vendored
Executable file
22
node_modules/get-folder-size/bin/get-folder-size
generated
vendored
Executable file
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const getSize = require('../');
|
||||
const argv = require('gar')(process.argv.slice(2));
|
||||
const path = require('path');
|
||||
// --folder or -f or last argument passed
|
||||
const folder = argv.folder || argv.f || argv._[argv._.length - 1];
|
||||
|
||||
if (!folder) {
|
||||
console.error('missing folder argument');
|
||||
console.error('\n Usage:\n');
|
||||
console.error('get-folder-size --folder=/home/alex/www');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const ignore = argv.ignore ? new RegExp(argv.ignore) : null;
|
||||
|
||||
getSize(path.resolve(folder), ignore, (err, bytes) => {
|
||||
if (err) { throw err; }
|
||||
|
||||
console.log((bytes / 1024 / 1024).toFixed(2) + ' Mb');
|
||||
});
|
||||
15
node_modules/get-folder-size/example.js
generated
vendored
Normal file
15
node_modules/get-folder-size/example.js
generated
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
const getSize = require('./');
|
||||
|
||||
if (!process.env.FOLDER) {
|
||||
throw new Error('FOLDER env var needed');
|
||||
}
|
||||
|
||||
getSize(path.resolve(process.env.FOLDER), (err, size) => {
|
||||
if (err) { throw err; }
|
||||
|
||||
console.log(size + ' bytes');
|
||||
console.log((size / 1024 / 1024).toFixed(2) + ' Mb');
|
||||
});
|
||||
66
node_modules/get-folder-size/index.js
generated
vendored
Normal file
66
node_modules/get-folder-size/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const eachAsync = require('tiny-each-async');
|
||||
|
||||
function readSizeRecursive(seen, item, ignoreRegEx, callback) {
|
||||
let cb;
|
||||
let ignoreRegExp;
|
||||
|
||||
if (!callback) {
|
||||
cb = ignoreRegEx;
|
||||
ignoreRegExp = null;
|
||||
} else {
|
||||
cb = callback;
|
||||
ignoreRegExp = ignoreRegEx;
|
||||
}
|
||||
|
||||
fs.lstat(item, function lstat(e, stats) {
|
||||
let total = !e ? (stats.size || 0) : 0;
|
||||
|
||||
if (stats) {
|
||||
if (seen.has(stats.ino)) { return cb(null, 0); }
|
||||
|
||||
seen.add(stats.ino);
|
||||
}
|
||||
|
||||
if (!e && stats.isDirectory()) {
|
||||
fs.readdir(item, (err, list) => {
|
||||
if (err) { return cb(err); }
|
||||
|
||||
eachAsync(
|
||||
list,
|
||||
5000,
|
||||
(dirItem, next) => {
|
||||
readSizeRecursive(
|
||||
seen,
|
||||
path.join(item, dirItem),
|
||||
ignoreRegExp,
|
||||
(error, size) => {
|
||||
if (!error) { total += size; }
|
||||
|
||||
next(error);
|
||||
}
|
||||
);
|
||||
},
|
||||
(finalErr) => {
|
||||
cb(finalErr, total);
|
||||
}
|
||||
);
|
||||
});
|
||||
} else {
|
||||
if (ignoreRegExp && ignoreRegExp.test(item)) {
|
||||
total = 0;
|
||||
}
|
||||
|
||||
cb(e, total);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = (...args) => {
|
||||
args.unshift(new Set());
|
||||
|
||||
return readSizeRecursive(...args);
|
||||
};
|
||||
33
node_modules/get-folder-size/package.json
generated
vendored
Normal file
33
node_modules/get-folder-size/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
{
|
||||
"name": "get-folder-size",
|
||||
"version": "2.0.1",
|
||||
"description": "Get the size of a folder by recursively iterating through all its sub(files && folders).",
|
||||
"main": "index.js",
|
||||
"bin": {
|
||||
"get-folder-size": "bin/get-folder-size"
|
||||
},
|
||||
"dependencies": {
|
||||
"gar": "^1.0.4",
|
||||
"tiny-each-async": "2.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"husky": "^0.10.1",
|
||||
"mocha": "^5.2.0",
|
||||
"proxyquire": "^1.7.3",
|
||||
"should": "^7.1.1"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha",
|
||||
"precommit": "npm test"
|
||||
},
|
||||
"keywords": [
|
||||
"folder",
|
||||
"size"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/alessioalex/get-folder-size.git"
|
||||
},
|
||||
"author": "Alexandru Vladutu <alexandru.vladutu@gmail.com>",
|
||||
"license": "MIT"
|
||||
}
|
||||
102
node_modules/get-folder-size/test.js
generated
vendored
Normal file
102
node_modules/get-folder-size/test.js
generated
vendored
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
'use strict';
|
||||
|
||||
const proxyquire = require('proxyquire');
|
||||
const path = require('path');
|
||||
require('should');
|
||||
|
||||
let files = [
|
||||
'/root',
|
||||
'/root/folder',
|
||||
'/root/folder/subfolder',
|
||||
'/root/folder/subfolder/file4',
|
||||
'/root/folder/file3',
|
||||
'/root/file1',
|
||||
'/root/file2'
|
||||
];
|
||||
|
||||
const sizes = {
|
||||
'/root/folder/subfolder/file4': 4,
|
||||
'/root/folder/file3': 3,
|
||||
'/root/file1': 1,
|
||||
'/root/file2': 2
|
||||
};
|
||||
|
||||
let inos = {};
|
||||
|
||||
Object.keys(sizes).forEach(file => {
|
||||
const file2 = file.replace(/\//g, path.sep);
|
||||
|
||||
sizes[file2] = sizes[file];
|
||||
});
|
||||
|
||||
files = files.map(file => file.replace(/\//g, path.sep));
|
||||
|
||||
let inoCounter = 0;
|
||||
|
||||
const fs = {
|
||||
lstat: (item, cb) => {
|
||||
const stats = {
|
||||
size: sizes[item],
|
||||
isDirectory: () => {
|
||||
return ((item === files[0]) || /folder$/.test(item));
|
||||
},
|
||||
ino: inos[item] || ++inoCounter
|
||||
};
|
||||
|
||||
setImmediate(() => cb(null, stats));
|
||||
},
|
||||
readdir: (item, cb) => {
|
||||
setImmediate(() => {
|
||||
const list = files.filter(file => {
|
||||
return ((file !== item) && (file.indexOf(item) !== -1));
|
||||
}).map(file => {
|
||||
return file.replace(item, '');
|
||||
}).filter(it => {
|
||||
return (it.lastIndexOf(path.sep) <= 0);
|
||||
});
|
||||
|
||||
cb(null, list);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
describe('getSize', () => {
|
||||
let getSize;
|
||||
|
||||
before(() => {
|
||||
getSize = proxyquire.load('./index', {
|
||||
fs: fs
|
||||
});
|
||||
});
|
||||
|
||||
it('should get the size of the folder', (done) => {
|
||||
getSize(files[0], (err, total) => {
|
||||
total.should.eql(10);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should ignore files', (done) => {
|
||||
getSize(files[0], /(file1|file2)/, (err, total) => {
|
||||
total.should.eql(7);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should not count hardlinks twice', (done) => {
|
||||
inos['/root/file1'] = 222;
|
||||
inos['/root/file2'] = inos['/root/file1'];
|
||||
|
||||
getSize(files[0], (err, total) => {
|
||||
total.should.eql(8);
|
||||
|
||||
delete inos['/root/file1'];
|
||||
delete inos['/root/file2'];
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
23
node_modules/minimatch/README.md
generated
vendored
23
node_modules/minimatch/README.md
generated
vendored
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
A minimal matching utility.
|
||||
|
||||
[](http://travis-ci.org/isaacs/minimatch)
|
||||
[](http://travis-ci.org/isaacs/minimatch)
|
||||
|
||||
|
||||
This is the matching library used internally by npm.
|
||||
|
|
@ -171,6 +171,27 @@ Suppress the behavior of treating a leading `!` character as negation.
|
|||
Returns from negate expressions the same as if they were not negated.
|
||||
(Ie, true on a hit, false on a miss.)
|
||||
|
||||
### partial
|
||||
|
||||
Compare a partial path to a pattern. As long as the parts of the path that
|
||||
are present are not contradicted by the pattern, it will be treated as a
|
||||
match. This is useful in applications where you're walking through a
|
||||
folder structure, and don't yet have the full path, but want to ensure that
|
||||
you do not walk down paths that can never be a match.
|
||||
|
||||
For example,
|
||||
|
||||
```js
|
||||
minimatch('/a/b', '/a/*/c/d', { partial: true }) // true, might be /a/b/c/d
|
||||
minimatch('/a/b', '/**/d', { partial: true }) // true, might be /a/b/.../d
|
||||
minimatch('/x/y/z', '/a/**/z', { partial: true }) // false, because x !== a
|
||||
```
|
||||
|
||||
### allowWindowsEscape
|
||||
|
||||
Windows path separator `\` is by default converted to `/`, which
|
||||
prohibits the usage of `\` as a escape character. This flag skips that
|
||||
behavior and allows using the escape character.
|
||||
|
||||
## Comparisons to other fnmatch/glob implementations
|
||||
|
||||
|
|
|
|||
162
node_modules/minimatch/minimatch.js
generated
vendored
162
node_modules/minimatch/minimatch.js
generated
vendored
|
|
@ -1,10 +1,10 @@
|
|||
module.exports = minimatch
|
||||
minimatch.Minimatch = Minimatch
|
||||
|
||||
var path = { sep: '/' }
|
||||
try {
|
||||
path = require('path')
|
||||
} catch (er) {}
|
||||
var path = (function () { try { return require('path') } catch (e) {}}()) || {
|
||||
sep: '/'
|
||||
}
|
||||
minimatch.sep = path.sep
|
||||
|
||||
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
|
||||
var expand = require('brace-expansion')
|
||||
|
|
@ -56,43 +56,64 @@ function filter (pattern, options) {
|
|||
}
|
||||
|
||||
function ext (a, b) {
|
||||
a = a || {}
|
||||
b = b || {}
|
||||
var t = {}
|
||||
Object.keys(b).forEach(function (k) {
|
||||
t[k] = b[k]
|
||||
})
|
||||
Object.keys(a).forEach(function (k) {
|
||||
t[k] = a[k]
|
||||
})
|
||||
Object.keys(b).forEach(function (k) {
|
||||
t[k] = b[k]
|
||||
})
|
||||
return t
|
||||
}
|
||||
|
||||
minimatch.defaults = function (def) {
|
||||
if (!def || !Object.keys(def).length) return minimatch
|
||||
if (!def || typeof def !== 'object' || !Object.keys(def).length) {
|
||||
return minimatch
|
||||
}
|
||||
|
||||
var orig = minimatch
|
||||
|
||||
var m = function minimatch (p, pattern, options) {
|
||||
return orig.minimatch(p, pattern, ext(def, options))
|
||||
return orig(p, pattern, ext(def, options))
|
||||
}
|
||||
|
||||
m.Minimatch = function Minimatch (pattern, options) {
|
||||
return new orig.Minimatch(pattern, ext(def, options))
|
||||
}
|
||||
m.Minimatch.defaults = function defaults (options) {
|
||||
return orig.defaults(ext(def, options)).Minimatch
|
||||
}
|
||||
|
||||
m.filter = function filter (pattern, options) {
|
||||
return orig.filter(pattern, ext(def, options))
|
||||
}
|
||||
|
||||
m.defaults = function defaults (options) {
|
||||
return orig.defaults(ext(def, options))
|
||||
}
|
||||
|
||||
m.makeRe = function makeRe (pattern, options) {
|
||||
return orig.makeRe(pattern, ext(def, options))
|
||||
}
|
||||
|
||||
m.braceExpand = function braceExpand (pattern, options) {
|
||||
return orig.braceExpand(pattern, ext(def, options))
|
||||
}
|
||||
|
||||
m.match = function (list, pattern, options) {
|
||||
return orig.match(list, pattern, ext(def, options))
|
||||
}
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
Minimatch.defaults = function (def) {
|
||||
if (!def || !Object.keys(def).length) return Minimatch
|
||||
return minimatch.defaults(def).Minimatch
|
||||
}
|
||||
|
||||
function minimatch (p, pattern, options) {
|
||||
if (typeof pattern !== 'string') {
|
||||
throw new TypeError('glob pattern string required')
|
||||
}
|
||||
assertValidPattern(pattern)
|
||||
|
||||
if (!options) options = {}
|
||||
|
||||
|
|
@ -101,9 +122,6 @@ function minimatch (p, pattern, options) {
|
|||
return false
|
||||
}
|
||||
|
||||
// "" only matches ""
|
||||
if (pattern.trim() === '') return p === ''
|
||||
|
||||
return new Minimatch(pattern, options).match(p)
|
||||
}
|
||||
|
||||
|
|
@ -112,15 +130,14 @@ function Minimatch (pattern, options) {
|
|||
return new Minimatch(pattern, options)
|
||||
}
|
||||
|
||||
if (typeof pattern !== 'string') {
|
||||
throw new TypeError('glob pattern string required')
|
||||
}
|
||||
assertValidPattern(pattern)
|
||||
|
||||
if (!options) options = {}
|
||||
|
||||
pattern = pattern.trim()
|
||||
|
||||
// windows support: need to use /, not \
|
||||
if (path.sep !== '/') {
|
||||
if (!options.allowWindowsEscape && path.sep !== '/') {
|
||||
pattern = pattern.split(path.sep).join('/')
|
||||
}
|
||||
|
||||
|
|
@ -131,6 +148,7 @@ function Minimatch (pattern, options) {
|
|||
this.negate = false
|
||||
this.comment = false
|
||||
this.empty = false
|
||||
this.partial = !!options.partial
|
||||
|
||||
// make the set of regexps etc.
|
||||
this.make()
|
||||
|
|
@ -140,9 +158,6 @@ Minimatch.prototype.debug = function () {}
|
|||
|
||||
Minimatch.prototype.make = make
|
||||
function make () {
|
||||
// don't do it more than once.
|
||||
if (this._made) return
|
||||
|
||||
var pattern = this.pattern
|
||||
var options = this.options
|
||||
|
||||
|
|
@ -162,7 +177,7 @@ function make () {
|
|||
// step 2: expand braces
|
||||
var set = this.globSet = this.braceExpand()
|
||||
|
||||
if (options.debug) this.debug = console.error
|
||||
if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) }
|
||||
|
||||
this.debug(this.pattern, set)
|
||||
|
||||
|
|
@ -242,12 +257,11 @@ function braceExpand (pattern, options) {
|
|||
pattern = typeof pattern === 'undefined'
|
||||
? this.pattern : pattern
|
||||
|
||||
if (typeof pattern === 'undefined') {
|
||||
throw new TypeError('undefined pattern')
|
||||
}
|
||||
assertValidPattern(pattern)
|
||||
|
||||
if (options.nobrace ||
|
||||
!pattern.match(/\{.*\}/)) {
|
||||
// Thanks to Yeting Li <https://github.com/yetingli> for
|
||||
// improving this regexp to avoid a ReDOS vulnerability.
|
||||
if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
|
||||
// shortcut. no need to expand.
|
||||
return [pattern]
|
||||
}
|
||||
|
|
@ -255,6 +269,17 @@ function braceExpand (pattern, options) {
|
|||
return expand(pattern)
|
||||
}
|
||||
|
||||
var MAX_PATTERN_LENGTH = 1024 * 64
|
||||
var assertValidPattern = function (pattern) {
|
||||
if (typeof pattern !== 'string') {
|
||||
throw new TypeError('invalid pattern')
|
||||
}
|
||||
|
||||
if (pattern.length > MAX_PATTERN_LENGTH) {
|
||||
throw new TypeError('pattern is too long')
|
||||
}
|
||||
}
|
||||
|
||||
// parse a component of the expanded set.
|
||||
// At this point, no pattern may contain "/" in it
|
||||
// so we're going to return a 2d array, where each entry is the full
|
||||
|
|
@ -269,14 +294,17 @@ function braceExpand (pattern, options) {
|
|||
Minimatch.prototype.parse = parse
|
||||
var SUBPARSE = {}
|
||||
function parse (pattern, isSub) {
|
||||
if (pattern.length > 1024 * 64) {
|
||||
throw new TypeError('pattern is too long')
|
||||
}
|
||||
assertValidPattern(pattern)
|
||||
|
||||
var options = this.options
|
||||
|
||||
// shortcuts
|
||||
if (!options.noglobstar && pattern === '**') return GLOBSTAR
|
||||
if (pattern === '**') {
|
||||
if (!options.noglobstar)
|
||||
return GLOBSTAR
|
||||
else
|
||||
pattern = '*'
|
||||
}
|
||||
if (pattern === '') return ''
|
||||
|
||||
var re = ''
|
||||
|
|
@ -332,10 +360,12 @@ function parse (pattern, isSub) {
|
|||
}
|
||||
|
||||
switch (c) {
|
||||
case '/':
|
||||
/* istanbul ignore next */
|
||||
case '/': {
|
||||
// completely not allowed, even escaped.
|
||||
// Should already be path-split by now.
|
||||
return false
|
||||
}
|
||||
|
||||
case '\\':
|
||||
clearStateChar()
|
||||
|
|
@ -454,25 +484,23 @@ function parse (pattern, isSub) {
|
|||
|
||||
// handle the case where we left a class open.
|
||||
// "[z-a]" is valid, equivalent to "\[z-a\]"
|
||||
if (inClass) {
|
||||
// split where the last [ was, make sure we don't have
|
||||
// an invalid re. if so, re-walk the contents of the
|
||||
// would-be class to re-translate any characters that
|
||||
// were passed through as-is
|
||||
// TODO: It would probably be faster to determine this
|
||||
// without a try/catch and a new RegExp, but it's tricky
|
||||
// to do safely. For now, this is safe and works.
|
||||
var cs = pattern.substring(classStart + 1, i)
|
||||
try {
|
||||
RegExp('[' + cs + ']')
|
||||
} catch (er) {
|
||||
// not a valid class!
|
||||
var sp = this.parse(cs, SUBPARSE)
|
||||
re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
|
||||
hasMagic = hasMagic || sp[1]
|
||||
inClass = false
|
||||
continue
|
||||
}
|
||||
// split where the last [ was, make sure we don't have
|
||||
// an invalid re. if so, re-walk the contents of the
|
||||
// would-be class to re-translate any characters that
|
||||
// were passed through as-is
|
||||
// TODO: It would probably be faster to determine this
|
||||
// without a try/catch and a new RegExp, but it's tricky
|
||||
// to do safely. For now, this is safe and works.
|
||||
var cs = pattern.substring(classStart + 1, i)
|
||||
try {
|
||||
RegExp('[' + cs + ']')
|
||||
} catch (er) {
|
||||
// not a valid class!
|
||||
var sp = this.parse(cs, SUBPARSE)
|
||||
re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
|
||||
hasMagic = hasMagic || sp[1]
|
||||
inClass = false
|
||||
continue
|
||||
}
|
||||
|
||||
// finish up the class.
|
||||
|
|
@ -556,9 +584,7 @@ function parse (pattern, isSub) {
|
|||
// something that could conceivably capture a dot
|
||||
var addPatternStart = false
|
||||
switch (re.charAt(0)) {
|
||||
case '.':
|
||||
case '[':
|
||||
case '(': addPatternStart = true
|
||||
case '[': case '.': case '(': addPatternStart = true
|
||||
}
|
||||
|
||||
// Hack to work around lack of negative lookbehind in JS
|
||||
|
|
@ -620,7 +646,7 @@ function parse (pattern, isSub) {
|
|||
var flags = options.nocase ? 'i' : ''
|
||||
try {
|
||||
var regExp = new RegExp('^' + re + '$', flags)
|
||||
} catch (er) {
|
||||
} catch (er) /* istanbul ignore next - should be impossible */ {
|
||||
// If it was an invalid regular expression, then it can't match
|
||||
// anything. This trick looks for a character after the end of
|
||||
// the string, which is of course impossible, except in multi-line
|
||||
|
|
@ -678,7 +704,7 @@ function makeRe () {
|
|||
|
||||
try {
|
||||
this.regexp = new RegExp(re, flags)
|
||||
} catch (ex) {
|
||||
} catch (ex) /* istanbul ignore next - should be impossible */ {
|
||||
this.regexp = false
|
||||
}
|
||||
return this.regexp
|
||||
|
|
@ -696,8 +722,8 @@ minimatch.match = function (list, pattern, options) {
|
|||
return list
|
||||
}
|
||||
|
||||
Minimatch.prototype.match = match
|
||||
function match (f, partial) {
|
||||
Minimatch.prototype.match = function match (f, partial) {
|
||||
if (typeof partial === 'undefined') partial = this.partial
|
||||
this.debug('match', f, this.pattern)
|
||||
// short-circuit in the case of busted things.
|
||||
// comments, etc.
|
||||
|
|
@ -779,6 +805,7 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
|||
|
||||
// should be impossible.
|
||||
// some invalid regexp stuff in the set.
|
||||
/* istanbul ignore if */
|
||||
if (p === false) return false
|
||||
|
||||
if (p === GLOBSTAR) {
|
||||
|
|
@ -852,6 +879,7 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
|||
// no match was found.
|
||||
// However, in partial mode, we can't say this is necessarily over.
|
||||
// If there's more *pattern* left, then
|
||||
/* istanbul ignore if */
|
||||
if (partial) {
|
||||
// ran out of file
|
||||
this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
|
||||
|
|
@ -865,11 +893,7 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
|||
// patterns with magic have been turned into regexps.
|
||||
var hit
|
||||
if (typeof p === 'string') {
|
||||
if (options.nocase) {
|
||||
hit = f.toLowerCase() === p.toLowerCase()
|
||||
} else {
|
||||
hit = f === p
|
||||
}
|
||||
hit = f === p
|
||||
this.debug('string match', p, f, hit)
|
||||
} else {
|
||||
hit = f.match(p)
|
||||
|
|
@ -900,16 +924,16 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
|||
// this is ok if we're doing the match as part of
|
||||
// a glob fs traversal.
|
||||
return partial
|
||||
} else if (pi === pl) {
|
||||
} else /* istanbul ignore else */ if (pi === pl) {
|
||||
// ran out of pattern, still have file left.
|
||||
// this is only acceptable if we're on the very last
|
||||
// empty segment of a file with a trailing slash.
|
||||
// a/* should match a/b/
|
||||
var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')
|
||||
return emptyFileEnd
|
||||
return (fi === fl - 1) && (file[fi] === '')
|
||||
}
|
||||
|
||||
// should be unreachable.
|
||||
/* istanbul ignore next */
|
||||
throw new Error('wtf?')
|
||||
}
|
||||
|
||||
|
|
|
|||
9
node_modules/minimatch/package.json
generated
vendored
9
node_modules/minimatch/package.json
generated
vendored
|
|
@ -2,14 +2,17 @@
|
|||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)",
|
||||
"name": "minimatch",
|
||||
"description": "a glob matcher in javascript",
|
||||
"version": "3.0.4",
|
||||
"version": "3.1.2",
|
||||
"publishConfig": {
|
||||
"tag": "v3-legacy"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/isaacs/minimatch.git"
|
||||
},
|
||||
"main": "minimatch.js",
|
||||
"scripts": {
|
||||
"test": "tap test/*.js --cov",
|
||||
"test": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --all; git push origin --tags"
|
||||
|
|
@ -21,7 +24,7 @@
|
|||
"brace-expansion": "^1.1.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"tap": "^10.3.2"
|
||||
"tap": "^15.1.6"
|
||||
},
|
||||
"license": "ISC",
|
||||
"files": [
|
||||
|
|
|
|||
2
node_modules/tiny-each-async/.npmignore
generated
vendored
Normal file
2
node_modules/tiny-each-async/.npmignore
generated
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
bench/
|
||||
examples/
|
||||
9
node_modules/tiny-each-async/.travis.yml
generated
vendored
Normal file
9
node_modules/tiny-each-async/.travis.yml
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
language: node_js
|
||||
node_js:
|
||||
- 'stable'
|
||||
- '0.12'
|
||||
- '0.10'
|
||||
sudo: false
|
||||
cache:
|
||||
directories:
|
||||
- node_modules
|
||||
52
node_modules/tiny-each-async/README.md
generated
vendored
Normal file
52
node_modules/tiny-each-async/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
# tiny-each-async
|
||||
|
||||
Asynchronous iterator function similar to (and inspired by) [async.each](https://github.com/caolan/async#eacharr-iterator-callback), with support for concurrency limit and item index.
|
||||
|
||||
[](http://travis-ci.org/alessioalex/tiny-each-async)
|
||||
|
||||
## Usage
|
||||
|
||||
### each(array, [limit], iterator, [callback])
|
||||
|
||||
Arguments:
|
||||
|
||||
- array - An array of items to iterate through.
|
||||
- [limit] - An (optional) integer for determining how many `iterator` functions should be run in parallel.
|
||||
- iterator(item, [index], callback) - A function to be applied to each item in the array. When it has finished processing the item then the `callback` function should be called (in case of a failure with the `error` argument, otherwise none).
|
||||
- callback(err) - An optional callback function that gets called when either all `iterator` functions have finished or one of them has returned an error.
|
||||
|
||||
### Example
|
||||
|
||||
```js
|
||||
var eachAsync = require('tiny-each-async');
|
||||
var timeouts = [300, 100, 2000];
|
||||
|
||||
eachAsync(['file1', 'file2', 'file3'], function(item, index, next) {
|
||||
setTimeout(function() {
|
||||
console.log(item, index, timeouts[index]);
|
||||
next();
|
||||
}, timeouts[index]);
|
||||
}, function(err) {
|
||||
return err ? console.error(err.stack) : console.log('all done');
|
||||
});
|
||||
```
|
||||
|
||||
For more examples checkout the [/examples](/examples) folder.
|
||||
|
||||
## FAQ
|
||||
|
||||
- Why the name?
|
||||
|
||||
Other possible names were already taken, and the actual source code is tiny.
|
||||
|
||||
- Why create another async library?
|
||||
|
||||
Because doing your own thing is fun.
|
||||
|
||||
- What if my iterator function is sync, but I want it && the callback to be async?
|
||||
|
||||
Then you might want to use [dezalgo](https://github.com/npm/dezalgo).
|
||||
|
||||
## License
|
||||
|
||||
[MIT](http://alessioalex.mit-license.org/)
|
||||
59
node_modules/tiny-each-async/index.js
generated
vendored
Normal file
59
node_modules/tiny-each-async/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
/* eslint-disable no-use-before-define */
|
||||
'use strict';
|
||||
|
||||
module.exports = function eachAsync(arr, parallelLimit, iteratorFn, cb) {
|
||||
var pending = 0;
|
||||
var index = 0;
|
||||
var lastIndex = arr.length - 1;
|
||||
var called = false;
|
||||
var limit;
|
||||
var callback;
|
||||
var iterate;
|
||||
|
||||
if (typeof parallelLimit === 'number') {
|
||||
limit = parallelLimit;
|
||||
iterate = iteratorFn;
|
||||
callback = cb || function noop() {};
|
||||
} else {
|
||||
iterate = parallelLimit;
|
||||
callback = iteratorFn || function noop() {};
|
||||
limit = arr.length;
|
||||
}
|
||||
|
||||
if (!arr.length) { return callback(); }
|
||||
|
||||
var iteratorLength = iterate.length;
|
||||
|
||||
var shouldCallNextIterator = function shouldCallNextIterator() {
|
||||
return (!called && (pending < limit) && (index < lastIndex));
|
||||
};
|
||||
|
||||
var iteratorCallback = function iteratorCallback(err) {
|
||||
if (called) { return; }
|
||||
|
||||
pending--;
|
||||
|
||||
if (err || (index === lastIndex && !pending)) {
|
||||
called = true;
|
||||
|
||||
callback(err);
|
||||
} else if (shouldCallNextIterator()) {
|
||||
processIterator(++index);
|
||||
}
|
||||
};
|
||||
|
||||
var processIterator = function processIterator() {
|
||||
pending++;
|
||||
|
||||
var args = (iteratorLength === 2) ? [arr[index], iteratorCallback]
|
||||
: [arr[index], index, iteratorCallback];
|
||||
|
||||
iterate.apply(null, args);
|
||||
|
||||
if (shouldCallNextIterator()) {
|
||||
processIterator(++index);
|
||||
}
|
||||
};
|
||||
|
||||
processIterator();
|
||||
};
|
||||
38
node_modules/tiny-each-async/package.json
generated
vendored
Normal file
38
node_modules/tiny-each-async/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
{
|
||||
"name": "tiny-each-async",
|
||||
"version": "2.0.3",
|
||||
"description": "Asynchronous iterator function for parallel processing.",
|
||||
"main": "index.js",
|
||||
"keywords": [
|
||||
"each",
|
||||
"async",
|
||||
"asynchronous",
|
||||
"iteration",
|
||||
"iterate",
|
||||
"loop",
|
||||
"foreach",
|
||||
"parallel",
|
||||
"concurrent",
|
||||
"array",
|
||||
"flow",
|
||||
"control flow"
|
||||
],
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"alessioalex-standard": "^1.0.0",
|
||||
"async": "^1.5.0",
|
||||
"husky": "^0.10.1",
|
||||
"lolex": "^1.3.2",
|
||||
"matcha": "^0.6.0",
|
||||
"tape": "^4.2.2"
|
||||
},
|
||||
"scripts": {
|
||||
"bench": "matcha bench/*.js",
|
||||
"test": "tape test.js",
|
||||
"lint": "alessioalex-standard",
|
||||
"precommit": "npm run lint && npm test"
|
||||
},
|
||||
"author": "Alexandru Vladutu <alexandru.vladutu@gmail.com>",
|
||||
"license": "MIT",
|
||||
"repository": "alessioalex/tiny-each-async"
|
||||
}
|
||||
95
node_modules/tiny-each-async/test.js
generated
vendored
Normal file
95
node_modules/tiny-each-async/test.js
generated
vendored
Normal file
|
|
@ -0,0 +1,95 @@
|
|||
/* eslint-disable no-console, func-names */
|
||||
'use strict';
|
||||
|
||||
var it = require('tape');
|
||||
var eachAsync = require('./');
|
||||
var lolex = require('lolex');
|
||||
|
||||
it('should call back even if the array is empty', function(t) {
|
||||
eachAsync([], function(item, next) {
|
||||
next();
|
||||
}, function() {
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
|
||||
it('should execute the final callback once all individual tasks are finished', function(t) {
|
||||
var counter = 0;
|
||||
|
||||
eachAsync([1, 2, 3], function(item, next) {
|
||||
counter++;
|
||||
next();
|
||||
}, function() {
|
||||
t.equal(counter, 3);
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
|
||||
it('should provide index as an argument for the iterator if needed', function(t) {
|
||||
var items = [11, 22, 33];
|
||||
|
||||
eachAsync(items, function(item, i, next) {
|
||||
t.equal(item, items[i]);
|
||||
|
||||
next();
|
||||
}, function() {
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
|
||||
it('should treat iterator index as an optional param', function(t) {
|
||||
eachAsync([1, 2, 3], function(item, next) {
|
||||
next();
|
||||
}, function() {
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
|
||||
it('should treat limit as an optional param', function(t) {
|
||||
eachAsync([1, 2, 3], function(item, next) {
|
||||
next();
|
||||
}, function() {
|
||||
eachAsync([1, 2, 3], 2, function(item, next) {
|
||||
next();
|
||||
}, function() {
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should return early in case there\'s an error', function(t) {
|
||||
var error = new Error('test');
|
||||
|
||||
eachAsync([1, 2, 3], function(item, next) {
|
||||
if (item === 2) { return next(error); }
|
||||
|
||||
t.ok(item === 1);
|
||||
|
||||
next();
|
||||
}, function(err) {
|
||||
t.equal(err, error);
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
|
||||
it('should limit the concurrency', function(t) {
|
||||
var clock = lolex.install();
|
||||
var items = [];
|
||||
|
||||
eachAsync([1, 2, 3, 4, 5], 2, function(item, next) {
|
||||
setTimeout(function() {
|
||||
items.push(item);
|
||||
next();
|
||||
}, 1000);
|
||||
}, function() {
|
||||
clock.uninstall();
|
||||
t.end();
|
||||
});
|
||||
|
||||
clock.tick(1001);
|
||||
t.deepEqual([1, 2], items);
|
||||
clock.tick(1001);
|
||||
t.deepEqual([1, 2, 3, 4], items);
|
||||
clock.tick(1000);
|
||||
t.deepEqual([1, 2, 3, 4, 5], items);
|
||||
});
|
||||
65
package-lock.json
generated
65
package-lock.json
generated
|
|
@ -26,6 +26,7 @@
|
|||
"fast-deep-equal": "^3.1.3",
|
||||
"file-url": "^3.0.0",
|
||||
"fs": "0.0.1-security",
|
||||
"get-folder-size": "^2.0.1",
|
||||
"github-linguist": "^2.4.4",
|
||||
"glob": "^8.0.1",
|
||||
"js-yaml": "^4.1.0",
|
||||
|
|
@ -41,6 +42,7 @@
|
|||
"devDependencies": {
|
||||
"@ava/typescript": "3.0.1",
|
||||
"@types/adm-zip": "^0.5.0",
|
||||
"@types/get-folder-size": "^2.0.0",
|
||||
"@types/js-yaml": "^4.0.5",
|
||||
"@types/long": "5.0.0",
|
||||
"@types/node": "16.11.22",
|
||||
|
|
@ -782,6 +784,12 @@
|
|||
"integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/get-folder-size": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/get-folder-size/-/get-folder-size-2.0.0.tgz",
|
||||
"integrity": "sha512-6VKKrDB20E/6ovi2Pfpy9Pcz8Me1ue/tReaZrwrz9mfVdsr6WAMiDZ+F1oAAcss4U5n2k673i1leDIx2aEBDFQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/js-yaml": {
|
||||
"version": "4.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.5.tgz",
|
||||
|
|
@ -3188,6 +3196,11 @@
|
|||
"integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/gar": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/gar/-/gar-1.0.4.tgz",
|
||||
"integrity": "sha512-w4n9cPWyP7aHxKxYHFQMegj7WIAsL/YX/C4Bs5Rr8s1H9M1rNtRWRsw+ovYMkXDQ5S4ZbYHsHAPmevPjPgw44w=="
|
||||
},
|
||||
"node_modules/get-caller-file": {
|
||||
"version": "2.0.5",
|
||||
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
|
||||
|
|
@ -3197,6 +3210,18 @@
|
|||
"node": "6.* || 8.* || >= 10.*"
|
||||
}
|
||||
},
|
||||
"node_modules/get-folder-size": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/get-folder-size/-/get-folder-size-2.0.1.tgz",
|
||||
"integrity": "sha512-+CEb+GDCM7tkOS2wdMKTn9vU7DgnKUTuDlehkNJKNSovdCOVxs14OfKCk4cvSaR3za4gj+OBdl9opPN9xrJ0zA==",
|
||||
"dependencies": {
|
||||
"gar": "^1.0.4",
|
||||
"tiny-each-async": "2.0.3"
|
||||
},
|
||||
"bin": {
|
||||
"get-folder-size": "bin/get-folder-size"
|
||||
}
|
||||
},
|
||||
"node_modules/get-intrinsic": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz",
|
||||
|
|
@ -4099,8 +4124,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/minimatch": {
|
||||
"version": "3.0.4",
|
||||
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
||||
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
},
|
||||
|
|
@ -5473,6 +5499,11 @@
|
|||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/tiny-each-async": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/tiny-each-async/-/tiny-each-async-2.0.3.tgz",
|
||||
"integrity": "sha512-5ROII7nElnAirvFn8g7H7MtpfV1daMcyfTGQwsn/x2VtyV+VPiO5CjReCJtWLvoKTDEDmZocf3cNPraiMnBXLA=="
|
||||
},
|
||||
"node_modules/tmp": {
|
||||
"version": "0.2.1",
|
||||
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz",
|
||||
|
|
@ -6561,6 +6592,12 @@
|
|||
"integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/get-folder-size": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/get-folder-size/-/get-folder-size-2.0.0.tgz",
|
||||
"integrity": "sha512-6VKKrDB20E/6ovi2Pfpy9Pcz8Me1ue/tReaZrwrz9mfVdsr6WAMiDZ+F1oAAcss4U5n2k673i1leDIx2aEBDFQ==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/js-yaml": {
|
||||
"version": "4.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.5.tgz",
|
||||
|
|
@ -8263,12 +8300,26 @@
|
|||
"integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=",
|
||||
"dev": true
|
||||
},
|
||||
"gar": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/gar/-/gar-1.0.4.tgz",
|
||||
"integrity": "sha512-w4n9cPWyP7aHxKxYHFQMegj7WIAsL/YX/C4Bs5Rr8s1H9M1rNtRWRsw+ovYMkXDQ5S4ZbYHsHAPmevPjPgw44w=="
|
||||
},
|
||||
"get-caller-file": {
|
||||
"version": "2.0.5",
|
||||
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
|
||||
"integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
|
||||
"dev": true
|
||||
},
|
||||
"get-folder-size": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/get-folder-size/-/get-folder-size-2.0.1.tgz",
|
||||
"integrity": "sha512-+CEb+GDCM7tkOS2wdMKTn9vU7DgnKUTuDlehkNJKNSovdCOVxs14OfKCk4cvSaR3za4gj+OBdl9opPN9xrJ0zA==",
|
||||
"requires": {
|
||||
"gar": "^1.0.4",
|
||||
"tiny-each-async": "2.0.3"
|
||||
}
|
||||
},
|
||||
"get-intrinsic": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz",
|
||||
|
|
@ -8926,8 +8977,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"minimatch": {
|
||||
"version": "3.0.4",
|
||||
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
||||
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
||||
"requires": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
}
|
||||
|
|
@ -9915,6 +9967,11 @@
|
|||
"integrity": "sha1-mcW/VZWJZq9tBtg73zgA3IL67F0=",
|
||||
"dev": true
|
||||
},
|
||||
"tiny-each-async": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/tiny-each-async/-/tiny-each-async-2.0.3.tgz",
|
||||
"integrity": "sha512-5ROII7nElnAirvFn8g7H7MtpfV1daMcyfTGQwsn/x2VtyV+VPiO5CjReCJtWLvoKTDEDmZocf3cNPraiMnBXLA=="
|
||||
},
|
||||
"tmp": {
|
||||
"version": "0.2.1",
|
||||
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz",
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@
|
|||
"fast-deep-equal": "^3.1.3",
|
||||
"file-url": "^3.0.0",
|
||||
"fs": "0.0.1-security",
|
||||
"get-folder-size": "^2.0.1",
|
||||
"github-linguist": "^2.4.4",
|
||||
"glob": "^8.0.1",
|
||||
"js-yaml": "^4.1.0",
|
||||
|
|
@ -56,6 +57,7 @@
|
|||
"devDependencies": {
|
||||
"@ava/typescript": "3.0.1",
|
||||
"@types/adm-zip": "^0.5.0",
|
||||
"@types/get-folder-size": "^2.0.0",
|
||||
"@types/js-yaml": "^4.0.5",
|
||||
"@types/long": "5.0.0",
|
||||
"@types/node": "16.11.22",
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ test("emptyPaths", async (t) => {
|
|||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
|
|
@ -60,6 +61,7 @@ test("nonEmptyPaths", async (t) => {
|
|||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
||||
|
|
@ -93,6 +95,7 @@ test("exclude temp dir", async (t) => {
|
|||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ import { uploadDatabases } from "./database-upload";
|
|||
import { GitHubFeatureFlags } from "./feature-flags";
|
||||
import { getActionsLogger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import { uploadTrapCaches } from "./trap-caching";
|
||||
import { getTotalCacheSize, uploadTrapCaches } from "./trap-caching";
|
||||
import * as upload_lib from "./upload-lib";
|
||||
import { UploadResult } from "./upload-lib";
|
||||
import * as util from "./util";
|
||||
|
|
@ -29,13 +29,18 @@ interface AnalysisStatusReport
|
|||
|
||||
interface FinishStatusReport
|
||||
extends actionsUtil.StatusReportBase,
|
||||
AnalysisStatusReport {}
|
||||
AnalysisStatusReport {
|
||||
trap_cache_upload_size_bytes: number;
|
||||
trap_cache_upload_duration_ms: number;
|
||||
}
|
||||
|
||||
export async function sendStatusReport(
|
||||
startedAt: Date,
|
||||
config: Config | undefined,
|
||||
stats: AnalysisStatusReport | undefined,
|
||||
error?: Error
|
||||
error?: Error,
|
||||
trapCacheUploadTime?: number,
|
||||
didUploadTrapCaches?: boolean
|
||||
) {
|
||||
const status = actionsUtil.getActionsStatus(
|
||||
error,
|
||||
|
|
@ -57,6 +62,11 @@ export async function sendStatusReport(
|
|||
}
|
||||
: {}),
|
||||
...(stats || {}),
|
||||
trap_cache_upload_duration_ms: trapCacheUploadTime || 0,
|
||||
trap_cache_upload_size_bytes:
|
||||
config && didUploadTrapCaches
|
||||
? await getTotalCacheSize(config.trapCaches)
|
||||
: 0,
|
||||
};
|
||||
await actionsUtil.sendStatusReport(statusReport);
|
||||
}
|
||||
|
|
@ -66,6 +76,8 @@ async function run() {
|
|||
let uploadResult: UploadResult | undefined = undefined;
|
||||
let runStats: QueriesStatusReport | undefined = undefined;
|
||||
let config: Config | undefined = undefined;
|
||||
let trapCacheUploadTime: number | undefined = undefined;
|
||||
let didUploadTrapCaches = false;
|
||||
util.initializeEnvironment(util.Mode.actions, pkg.version);
|
||||
await util.checkActionVersion(pkg.version);
|
||||
|
||||
|
|
@ -163,8 +175,10 @@ async function run() {
|
|||
await uploadDatabases(repositoryNwo, config, apiDetails, logger);
|
||||
|
||||
// Possibly upload the TRAP caches for later re-use
|
||||
const trapCacheUploadStartTime = Date.now();
|
||||
const codeql = await getCodeQL(config.codeQLCmd);
|
||||
await uploadTrapCaches(codeql, config, logger);
|
||||
trapCacheUploadTime = Date.now() - trapCacheUploadStartTime;
|
||||
didUploadTrapCaches = await uploadTrapCaches(codeql, config, logger);
|
||||
|
||||
// We don't upload results in test mode, so don't wait for processing
|
||||
if (util.isInTestMode()) {
|
||||
|
|
@ -188,23 +202,58 @@ async function run() {
|
|||
|
||||
if (error instanceof CodeQLAnalysisError) {
|
||||
const stats = { ...error.queriesStatusReport };
|
||||
await sendStatusReport(startedAt, config, stats, error);
|
||||
await sendStatusReport(
|
||||
startedAt,
|
||||
config,
|
||||
stats,
|
||||
error,
|
||||
trapCacheUploadTime,
|
||||
didUploadTrapCaches
|
||||
);
|
||||
} else {
|
||||
await sendStatusReport(startedAt, config, undefined, error);
|
||||
await sendStatusReport(
|
||||
startedAt,
|
||||
config,
|
||||
undefined,
|
||||
error,
|
||||
trapCacheUploadTime,
|
||||
didUploadTrapCaches
|
||||
);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (runStats && uploadResult) {
|
||||
await sendStatusReport(startedAt, config, {
|
||||
...runStats,
|
||||
...uploadResult.statusReport,
|
||||
});
|
||||
await sendStatusReport(
|
||||
startedAt,
|
||||
config,
|
||||
{
|
||||
...runStats,
|
||||
...uploadResult.statusReport,
|
||||
},
|
||||
undefined,
|
||||
trapCacheUploadTime,
|
||||
didUploadTrapCaches
|
||||
);
|
||||
} else if (runStats) {
|
||||
await sendStatusReport(startedAt, config, { ...runStats });
|
||||
await sendStatusReport(
|
||||
startedAt,
|
||||
config,
|
||||
{ ...runStats },
|
||||
undefined,
|
||||
trapCacheUploadTime,
|
||||
didUploadTrapCaches
|
||||
);
|
||||
} else {
|
||||
await sendStatusReport(startedAt, config, undefined);
|
||||
await sendStatusReport(
|
||||
startedAt,
|
||||
config,
|
||||
undefined,
|
||||
undefined,
|
||||
trapCacheUploadTime,
|
||||
didUploadTrapCaches
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -124,6 +124,7 @@ test("status report fields and search path setting", async (t) => {
|
|||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||
recursive: true,
|
||||
|
|
@ -442,6 +443,7 @@ const stubConfig: Config = {
|
|||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
|
||||
for (const options of [
|
||||
|
|
|
|||
|
|
@ -60,6 +60,7 @@ test.beforeEach(() => {
|
|||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -360,6 +360,7 @@ test("load non-empty input", async (t) => {
|
|||
debugDatabaseName: "my-db",
|
||||
augmentationProperties: configUtils.defaultAugmentationProperties,
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
|
||||
const languages = "javascript";
|
||||
|
|
|
|||
|
|
@ -183,6 +183,11 @@ export interface Config {
|
|||
* If a key is omitted, then TRAP caching should not be used for that language.
|
||||
*/
|
||||
trapCaches: Partial<Record<Language, string>>;
|
||||
|
||||
/**
|
||||
* Time taken to download TRAP caches. Used for status reporting.
|
||||
*/
|
||||
trapCacheDownloadTime: number;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -1015,6 +1020,13 @@ export async function getDefaultConfig(
|
|||
);
|
||||
}
|
||||
|
||||
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(
|
||||
trapCachingEnabled,
|
||||
codeQL,
|
||||
languages,
|
||||
logger
|
||||
);
|
||||
|
||||
return {
|
||||
languages,
|
||||
queries,
|
||||
|
|
@ -1030,12 +1042,27 @@ export async function getDefaultConfig(
|
|||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
augmentationProperties,
|
||||
trapCaches: trapCachingEnabled
|
||||
? await downloadTrapCaches(codeQL, languages, logger)
|
||||
: {},
|
||||
trapCaches,
|
||||
trapCacheDownloadTime,
|
||||
};
|
||||
}
|
||||
|
||||
async function downloadCacheWithTime(
|
||||
trapCachingEnabled: boolean,
|
||||
codeQL: CodeQL,
|
||||
languages: Language[],
|
||||
logger: Logger
|
||||
) {
|
||||
let trapCaches = {};
|
||||
let trapCacheDownloadTime = 0;
|
||||
if (trapCachingEnabled) {
|
||||
const start = Date.now();
|
||||
trapCaches = await downloadTrapCaches(codeQL, languages, logger);
|
||||
trapCacheDownloadTime = Date.now() - start;
|
||||
}
|
||||
return { trapCaches, trapCacheDownloadTime };
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the config from the given file.
|
||||
*/
|
||||
|
|
@ -1204,6 +1231,13 @@ async function loadConfig(
|
|||
}
|
||||
}
|
||||
|
||||
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(
|
||||
trapCachingEnabled,
|
||||
codeQL,
|
||||
languages,
|
||||
logger
|
||||
);
|
||||
|
||||
return {
|
||||
languages,
|
||||
queries,
|
||||
|
|
@ -1219,9 +1253,8 @@ async function loadConfig(
|
|||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
augmentationProperties,
|
||||
trapCaches: trapCachingEnabled
|
||||
? await downloadTrapCaches(codeQL, languages, logger)
|
||||
: {},
|
||||
trapCaches,
|
||||
trapCacheDownloadTime,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -58,6 +58,7 @@ function getTestConfig(tmpDir: string): Config {
|
|||
debugDatabaseName: DEFAULT_DEBUG_DATABASE_NAME,
|
||||
augmentationProperties: defaultAugmentationProperties,
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ import {
|
|||
import { Language } from "./languages";
|
||||
import { getActionsLogger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import { getTotalCacheSize } from "./trap-caching";
|
||||
import {
|
||||
checkActionVersion,
|
||||
checkGitHubVersionInRange,
|
||||
|
|
@ -65,6 +66,12 @@ interface InitSuccessStatusReport extends StatusReportBase {
|
|||
tools_resolved_version: string;
|
||||
/** Comma-separated list of languages specified explicitly in the workflow file. */
|
||||
workflow_languages: string;
|
||||
/** Comma-separated list of languages for which we are using TRAP caching. */
|
||||
trap_cache_languages: string;
|
||||
/** Size of TRAP caches that we downloaded, in megabytes. */
|
||||
trap_cache_download_size_bytes: number;
|
||||
/** Time taken to download TRAP caches, in milliseconds. */
|
||||
trap_cache_download_duration_ms: number;
|
||||
}
|
||||
|
||||
async function sendSuccessStatusReport(
|
||||
|
|
@ -115,6 +122,9 @@ async function sendSuccessStatusReport(
|
|||
tools_input: getOptionalInput("tools") || "",
|
||||
tools_resolved_version: toolsVersion,
|
||||
workflow_languages: workflowLanguages || "",
|
||||
trap_cache_languages: Object.keys(config.trapCaches).join(","),
|
||||
trap_cache_download_size_bytes: await getTotalCacheSize(config.trapCaches),
|
||||
trap_cache_download_duration_ms: config.trapCacheDownloadTime,
|
||||
};
|
||||
|
||||
await sendStatusReport(statusReport);
|
||||
|
|
|
|||
|
|
@ -33,6 +33,7 @@ function getTestConfig(tmpDir: string): configUtils.Config {
|
|||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
augmentationProperties: configUtils.defaultAugmentationProperties,
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -87,6 +87,7 @@ const testConfigWithoutTmpDir: Config = {
|
|||
trapCaches: {
|
||||
javascript: "/some/cache/dir",
|
||||
},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
|
||||
function getTestConfigWithTempDir(tmpDir: string): configUtils.Config {
|
||||
|
|
@ -113,6 +114,7 @@ function getTestConfigWithTempDir(tmpDir: string): configUtils.Config {
|
|||
javascript: path.resolve(tmpDir, "jsCache"),
|
||||
ruby: path.resolve(tmpDir, "rubyCache"),
|
||||
},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import * as fs from "fs";
|
|||
import * as path from "path";
|
||||
|
||||
import * as cache from "@actions/cache";
|
||||
import getFolderSize from "get-folder-size";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { CodeQL, CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES } from "./codeql";
|
||||
|
|
@ -118,12 +119,19 @@ export async function downloadTrapCaches(
|
|||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Possibly upload TRAP caches to the Actions cache.
|
||||
* @param codeql The CodeQL instance to use.
|
||||
* @param config The configuration for this workflow.
|
||||
* @param logger A logger to record some informational messages to.
|
||||
* @returns Whether the TRAP caches were uploaded.
|
||||
*/
|
||||
export async function uploadTrapCaches(
|
||||
codeql: CodeQL,
|
||||
config: Config,
|
||||
logger: Logger
|
||||
): Promise<void> {
|
||||
if (!(await actionsUtil.isAnalyzingDefaultBranch())) return; // Only upload caches from the default branch
|
||||
): Promise<boolean> {
|
||||
if (!(await actionsUtil.isAnalyzingDefaultBranch())) return false; // Only upload caches from the default branch
|
||||
|
||||
const toAwait: Array<Promise<number>> = [];
|
||||
for (const language of config.languages) {
|
||||
|
|
@ -138,6 +146,7 @@ export async function uploadTrapCaches(
|
|||
toAwait.push(cache.saveCache([cacheDir], key));
|
||||
}
|
||||
await Promise.all(toAwait);
|
||||
return true;
|
||||
}
|
||||
|
||||
export async function getLanguagesSupportingCaching(
|
||||
|
|
@ -175,6 +184,23 @@ export async function getLanguagesSupportingCaching(
|
|||
return result;
|
||||
}
|
||||
|
||||
export async function getTotalCacheSize(
|
||||
trapCaches: Partial<Record<Language, string>>
|
||||
): Promise<number> {
|
||||
const sizes = await Promise.all(
|
||||
Object.values(trapCaches).map(async (cacheDir) => {
|
||||
return new Promise<number>((resolve) => {
|
||||
getFolderSize(cacheDir, (err, size) => {
|
||||
// Ignore file system errors when getting the size. It's only used for telemetry anyway.
|
||||
if (err) resolve(0);
|
||||
resolve(size);
|
||||
});
|
||||
});
|
||||
})
|
||||
);
|
||||
return sizes.reduce((a, b) => a + b, 0);
|
||||
}
|
||||
|
||||
async function cacheKey(
|
||||
codeql: CodeQL,
|
||||
language: Language,
|
||||
|
|
|
|||
|
|
@ -357,6 +357,7 @@ for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
|
|||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
|
||||
t.is(util.getMlPoweredJsQueriesStatus(config), expectedStatus);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue