Upgrade Ava to v4

This commit is contained in:
Henry Mercer 2022-02-01 18:01:11 +00:00
parent 9a40cc5274
commit ce89f1b611
1153 changed files with 27264 additions and 95308 deletions

View file

@ -10,7 +10,8 @@
"plugin:@typescript-eslint/recommended",
"plugin:@typescript-eslint/recommended-requiring-type-checking",
"plugin:github/recommended",
"plugin:github/typescript"
"plugin:github/typescript",
"plugin:import/typescript"
],
"rules": {
"filenames/match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"],

2
.gitignore vendored
View file

@ -1,2 +1,4 @@
/runner/dist/
/runner/node_modules/
# Ignore for example failing-tests.json from AVA
node_modules/.cache

113
lib/config-utils.test.js generated
View file

@ -117,7 +117,10 @@ function mockListLanguages(languages) {
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// And that same newly-initialised config should now be returned by getConfig
const config2 = await configUtils.getConfig(tmpDir, logger);
t.deepEqual(config1, config2);
t.not(config2, undefined);
if (config2 !== undefined) {
t.deepEqual(config1, config2);
}
});
});
(0, ava_1.default)("load input outside of workspace", async (t) => {
@ -760,28 +763,26 @@ const invalidPaths = ["a/***/b", "a/**b", "a/b**", "**"];
/**
* Test macro for ensuring the packs block is valid
*/
function parsePacksMacro(t, packsByLanguage, languages, expected) {
t.deepEqual(configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), expected);
}
parsePacksMacro.title = (providedTitle) => `Parse Packs: ${providedTitle}`;
const parsePacksMacro = ava_1.default.macro({
exec: (t, packsByLanguage, languages, expected) => t.deepEqual(configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), expected),
title: (providedTitle = "") => `Parse Packs: ${providedTitle}`,
});
/**
* Test macro for testing when the packs block is invalid
*/
function parsePacksErrorMacro(t, packsByLanguage, languages, expected) {
t.throws(() => {
configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b");
}, {
const parsePacksErrorMacro = ava_1.default.macro({
exec: (t, packsByLanguage, languages, expected) => t.throws(() => configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), {
message: expected,
});
}
parsePacksErrorMacro.title = (providedTitle) => `Parse Packs Error: ${providedTitle}`;
}),
title: (providedTitle = "") => `Parse Packs Error: ${providedTitle}`,
});
/**
* Test macro for testing when the packs block is invalid
*/
function invalidPackNameMacro(t, name) {
parsePacksErrorMacro(t, { [languages_1.Language.cpp]: [name] }, [languages_1.Language.cpp], new RegExp(`The configuration file "/a/b" is invalid: property "packs" "${name}" is not a valid pack`));
}
invalidPackNameMacro.title = (_, arg) => `Invalid pack string: ${arg}`;
const invalidPackNameMacro = ava_1.default.macro({
exec: (t, name) => parsePacksErrorMacro.exec(t, { [languages_1.Language.cpp]: [name] }, [languages_1.Language.cpp], new RegExp(`The configuration file "/a/b" is invalid: property "packs" "${name}" is not a valid pack`)),
title: (_providedTitle, arg) => `Invalid pack string: ${arg}`,
});
(0, ava_1.default)("no packs", parsePacksMacro, {}, [], {});
(0, ava_1.default)("two packs", parsePacksMacro, ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp], {
[languages_1.Language.cpp]: [
@ -870,46 +871,48 @@ parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and
(0, ava_1.default)("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], /multi-language analysis/);
(0, ava_1.default)("input with + only", parseInputAndConfigErrorMacro, {}, " + ", [languages_1.Language.cpp], /remove the '\+'/);
(0, ava_1.default)("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], /"xxx" is not a valid pack/);
async function mlPoweredQueriesMacro(t, codeQLVersion, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries) {
return await util.withTmpDir(async (tmpDir) => {
const codeQL = (0, codeql_1.setCodeQL)({
async getVersion() {
return codeQLVersion;
},
async resolveQueries() {
return {
byLanguage: {
javascript: { "fake-query.ql": {} },
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
},
});
const { packs } = await configUtils.initConfig("javascript", queriesInput, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)(isMlPoweredQueriesFlagEnabled
? [feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled]
: []), (0, logging_1.getRunnerLogger)(true));
if (shouldRunMlPoweredQueries) {
t.deepEqual(packs, {
[languages_1.Language.javascript]: [
{
packName: "codeql/javascript-experimental-atm-queries",
version: "~0.0.2",
},
],
const mlPoweredQueriesMacro = ava_1.default.macro({
exec: async (t, codeQLVersion, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries) => {
return await util.withTmpDir(async (tmpDir) => {
const codeQL = (0, codeql_1.setCodeQL)({
async getVersion() {
return codeQLVersion;
},
async resolveQueries() {
return {
byLanguage: {
javascript: { "fake-query.ql": {} },
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
},
});
}
else {
t.deepEqual(packs, {});
}
});
}
mlPoweredQueriesMacro.title = (_providedTitle, codeQLVersion, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries) => {
const queriesInputDescription = queriesInput
? `'queries: ${queriesInput}'`
: "default config";
return `ML-powered queries ${shouldRunMlPoweredQueries ? "are" : "aren't"} loaded for ${queriesInputDescription} using CLI v${codeQLVersion} when feature flag is ${isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"}`;
};
const { packs } = await configUtils.initConfig("javascript", queriesInput, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)(isMlPoweredQueriesFlagEnabled
? [feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled]
: []), (0, logging_1.getRunnerLogger)(true));
if (shouldRunMlPoweredQueries) {
t.deepEqual(packs, {
[languages_1.Language.javascript]: [
{
packName: "codeql/javascript-experimental-atm-queries",
version: "~0.0.2",
},
],
});
}
else {
t.deepEqual(packs, {});
}
});
},
title: (_providedTitle, codeQLVersion, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries) => {
const queriesInputDescription = queriesInput
? `'queries: ${queriesInput}'`
: "default config";
return `ML-powered queries ${shouldRunMlPoweredQueries ? "are" : "aren't"} loaded for ${queriesInputDescription} using CLI v${codeQLVersion} when feature flag is ${isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"}`;
},
});
// macro, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, "security-extended", false);
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, "security-extended", false);

File diff suppressed because one or more lines are too long

View file

@ -1 +1 @@
{"version":3,"file":"feature-flags.test.js","sourceRoot":"","sources":["../src/feature-flags.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAGvB,mDAAkE;AAClE,uCAA4C;AAC5C,6CAAkD;AAClD,mDAMyB;AAEzB,iCAAgF;AAEhF,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC;AAEH,MAAM,cAAc,GAAqB;IACvC,IAAI,EAAE,MAAM;IACZ,GAAG,EAAE,oBAAoB;CAC1B,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAA,+BAAkB,EAAC,gBAAgB,CAAC,CAAC;AAE/D,MAAM,mCAAmC,GAGpC;IACH;QACE,WAAW,EAAE,MAAM;QACnB,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC9D;IACD,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,EAAE;CACrE,CAAC;AAEF,KAAK,MAAM,OAAO,IAAI,mCAAmC,EAAE;IACzD,IAAA,aAAI,EAAC,qDAAqD,OAAO,CAAC,WAAW,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC3F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;YAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,OAAO,CAAC,aAAa,EACrB,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;YAEF,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;gBAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;aACzD;YAED,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,8DAA8D,CACnE,KAAK,SAAS,CAChB,CAAC;QACJ,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ;AAED,IAAA,aAAI,EAAC,oEAAoE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;QAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;YAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;SACzD;QAED,KAAK,MAAM,WAAW,IAAI;YACxB,0BAA0B;YAC1B,4BAA4B;SAC7B,EAAE;YACD,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,iBAAiB,WAAW,uDAAuD,CACxF,KAAK,SAAS,CAChB,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iEAAiE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,MAAM,CAAC,CAAC,WAAW,CACjB,KAAK,IAAI,EAAE,CAAC,YAAY,CAAC,QAAQ,CAAC,2BAAW,CAAC,sBAAsB,CAAC,EACrE;YACE,OAAO,EACL,oFAAoF;SACvF,CACF,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,MAAM,aAAa,GAAG;IACpB,0BAA0B;IAC1B,4BAA4B;CAC7B,CAAC;AAEF,KAAK,MAAM,WAAW,IAAI,aAAa,EAAE;IACvC,IAAA,aAAI,EAAC,iBAAiB,WAAW,6CAA6C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC1F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;YAEF,MAAM,oBAAoB,GAAG,EAAE,CAAC;YAChC,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;gBAC7B,oBAAoB,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;aACjC;YACD,oBAAoB,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC;YACzC,IAAA,0CAA0B,EAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;YAEtD,MAAM,kBAAkB,GAAG;gBACzB,wBAAwB,EAAE,MAAM,YAAY,CAAC,QAAQ,CACnD,2BAAW,CAAC,sBAAsB,CACnC;gBACD,0BAA0B,EAAE,MAAM,YAAY,CAAC,QAAQ,CACrD,2BAAW,CAAC,uBAAuB,CACpC;aACF,CAAC;YAEF,CAAC,CAAC,SAAS,CAAC,kBAAkB,EAAE,oBAAoB,CAAC,CAAC;QACxD,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ"}
{"version":3,"file":"feature-flags.test.js","sourceRoot":"","sources":["../src/feature-flags.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAGvB,mDAAkE;AAClE,uCAA4C;AAC5C,6CAAkD;AAClD,mDAMyB;AAEzB,iCAAgF;AAEhF,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC;AAEH,MAAM,cAAc,GAAqB;IACvC,IAAI,EAAE,MAAM;IACZ,GAAG,EAAE,oBAAoB;CAC1B,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAA,+BAAkB,EAAC,gBAAgB,CAAC,CAAC;AAE/D,MAAM,mCAAmC,GAGpC;IACH;QACE,WAAW,EAAE,MAAM;QACnB,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC9D;IACD,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,EAAE;CACrE,CAAC;AAEF,KAAK,MAAM,OAAO,IAAI,mCAAmC,EAAE;IACzD,IAAA,aAAI,EAAC,qDAAqD,OAAO,CAAC,WAAW,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC3F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;YAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,OAAO,CAAC,aAAa,EACrB,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;YAEF,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;gBAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;aACzD;YAED,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,8DAA8D,CACnE,KAAK,SAAS,CAChB,CAAC;QACJ,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ;AAED,IAAA,aAAI,EAAC,oEAAoE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;QAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;YAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;SACzD;QAED,KAAK,MAAM,WAAW,IAAI;YACxB,0BAA0B;YAC1B,4BAA4B;SAC7B,EAAE;YACD,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,iBAAiB,WAAW,uDAAuD,CACxF,KAAK,SAAS,CAChB,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iEAAiE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,MAAM,CAAC,CAAC,WAAW,CACjB,KAAK,IAAI,EAAE,CAAC,YAAY,CAAC,QAAQ,CAAC,2BAAW,CAAC,sBAAsB,CAAC,EACrE;YACE,OAAO,EACL,oFAAoF;SACvF,CACF,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,MAAM,aAAa,GAAG;IACpB,0BAA0B;IAC1B,4BAA4B;CAC7B,CAAC;AAEF,KAAK,MAAM,WAAW,IAAI,aAAa,EAAE;IACvC,IAAA,aAAI,EAAC,iBAAiB,WAAW,6CAA6C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC1F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;YAEF,MAAM,oBAAoB,GAAgC,EAAE,CAAC;YAC7D,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;gBAC7B,oBAAoB,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;aACjC;YACD,oBAAoB,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC;YACzC,IAAA,0CAA0B,EAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;YAEtD,MAAM,kBAAkB,GAAgC;gBACtD,wBAAwB,EAAE,MAAM,YAAY,CAAC,QAAQ,CACnD,2BAAW,CAAC,sBAAsB,CACnC;gBACD,0BAA0B,EAAE,MAAM,YAAY,CAAC,QAAQ,CACrD,2BAAW,CAAC,uBAAuB,CACpC;aACF,CAAC;YAEF,CAAC,CAAC,SAAS,CAAC,kBAAkB,EAAE,oBAAoB,CAAC,CAAC;QACxD,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ"}

View file

@ -1 +1 @@
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAEnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC"}
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAEnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAiB;IAC1C,MAAM,SAAS,GAAG,IAA2B,CAAC;IAE9C,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC"}

View file

@ -162,7 +162,10 @@ function getTestConfig(tmpDir) {
javascript: { spec, env: { a: "a", b: "b" } },
python: { spec, env: { b: "c" } },
}, config));
t.deepEqual(e.message, "Incompatible values in environment parameter b: b and c");
// If e is undefined, then the previous assertion will fail.
if (e !== undefined) {
t.deepEqual(e.message, "Incompatible values in environment parameter b: b and c");
}
});
});
(0, ava_1.default)("concatTracerConfigs - cpp spec lines come last if present", async (t) => {

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

2
node_modules/.bin/ava generated vendored
View file

@ -1 +1 @@
../ava/cli.js
../ava/entrypoints/cli.mjs

View file

@ -1 +0,0 @@
../import-local/fixtures/cli.js

1
node_modules/.bin/is-ci generated vendored
View file

@ -1 +0,0 @@
../is-ci/bin.js

1
node_modules/.bin/rc generated vendored
View file

@ -1 +0,0 @@
../rc/cli.js

2012
node_modules/.package-lock.json generated vendored

File diff suppressed because it is too large Load diff

View file

@ -1,14 +0,0 @@
ISC License (ISC)
Copyright (c) 2017, Mark Wubben <mark@novemberborn.net> (novemberborn.net)
Permission to use, copy, modify, and/or distribute this software for any purpose
with or without fee is hereby granted, provided that the above copyright notice
and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.

View file

@ -1,18 +0,0 @@
# @concordance/react
React plugin for [Concordance](https://github.com/concordancejs/concordance).
Allows
[`React.createElement()`](https://facebook.github.io/react/docs/react-api.html#createelement)
objects to be compared, formatted, diffed and serialized. Also supports
`toJSON()` renderings of
[`react-test-renderer`](https://www.npmjs.com/package/react-test-renderer).
These may be compared to `React.createElement()` objects.
When comparing [React
component](https://facebook.github.io/react/docs/components-and-props.html)
elements, the element type is compared by identity. After deserialization the
element types are compared by function name.
Component elements are formatted with a &#x235F; character after the element
name. Properties and children are formatted by [Concordance](https://github.com/concordancejs/concordance).

View file

@ -1,75 +0,0 @@
'use strict'
const pkg = require('./package.json')
const elementFactory = require('./lib/elementFactory')
const testJsonFactory = require('./lib/testJsonFactory')
// Must be unique across all registered plugins.
exports.name = pkg.name
// Expected API version to be passed to register().
exports.apiVersion = 1
// Expected minimal version of Concordance. Concordance will increment its API
// version for breaking changes, this is useful if you rely on features or
// patches that were introduced in a specific version of Concordance.
exports.minimalConcordanceVersion = '1.0.0'
// Plugin-specific version of its serialization output.
exports.serializerVersion = 2
exports.theme = {
react: {
functionType: '\u235F',
openTag: {
start: '<',
end: '>',
selfClose: '/',
selfCloseVoid: ' /'
},
closeTag: {
open: '</',
close: '>'
},
tagName: {open: '', close: ''},
attribute: {
separator: '=',
value: {
openBracket: '{',
closeBracket: '}',
string: {
line: {open: '"', close: '"', escapeQuote: '"'}
}
}
},
child: {
openBracket: '{',
closeBracket: '}',
string: {
line: {open: '', close: '', escapeQuote: ''},
multiline: {start: '', end: '', escapeQuote: ''}
}
}
}
}
const ELEMENT = Symbol.for('react.element')
const TEST_JSON = Symbol.for('react.test.json')
function register (api) {
const reactTags = new Set()
const element = elementFactory(api, reactTags)
const testJson = testJsonFactory(api, element)
api.addDescriptor(0x01, element.tag, element.deserialize)
api.addDescriptor(0x02, testJson.tag, testJson.deserialize)
reactTags.add(element.tag).add(testJson.tag)
return value => {
if (value.$$typeof === ELEMENT) return element.describe
if (value.$$typeof === TEST_JSON) return testJson.describe
return null
}
}
exports.register = register

View file

@ -1,239 +0,0 @@
'use strict'
function diffShallow (api, actual, expected, theme, indent) {
const childBuffer = api.lineBuilder.buffer()
const propertyBuffer = api.lineBuilder.buffer()
return {
append (formatted, origin) {
if (origin.isItem === true) {
childBuffer.append(formatted)
} else {
propertyBuffer.append(formatted)
}
},
finalize: () => {
const namesAreEqual = actual.compareNames(expected)
const actualName = actual.formatName(theme)
const expectedName = expected.formatName(theme)
const openTag = theme.react.openTag
const innerIndentation = indent.increase()
const allChildren = childBuffer.withFirstPrefixed(innerIndentation)
const children = allChildren.decompose()
const allProperties = propertyBuffer.withFirstPrefixed(innerIndentation)
const properties = allProperties.decompose()
// If the first properties are also the last, and either side has no
// children, ensure the properties are treated as being last. This
// leads to a better balanced diff.
if (properties.remaining.isEmpty && (!actual.hasChildren || !expected.hasChildren)) {
properties.last = properties.first
properties.first = {actual: api.lineBuilder.buffer(), expected: api.lineBuilder.buffer()}
}
const result = api.lineBuilder.buffer()
// Create a custom diff that is as neat as possible. It's likely
// there's a generic algorithm that can be used, but for expediency's
// sake handles all possible diffs by brute force instead.
if (actual.hasProperties && expected.hasProperties) {
if (namesAreEqual) {
result
.append(api.lineBuilder.first(openTag.start + actualName))
.append(properties.first.actual.stripFlags())
.append(properties.first.expected.stripFlags())
} else {
result
.append(api.lineBuilder.actual.first(openTag.start + actualName))
.append(properties.first.actual.stripFlags())
.append(api.lineBuilder.expected.first(openTag.start + expectedName))
.append(properties.first.expected.stripFlags())
}
result.append(properties.remaining.stripFlags())
if (actual.hasChildren && expected.hasChildren) {
result
.append(properties.last.actual.stripFlags())
.append(properties.last.expected.stripFlags())
.append(api.lineBuilder.line(indent + openTag.end))
if (namesAreEqual) {
result
.append(allChildren.stripFlags())
.append(api.lineBuilder.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
} else {
result
.append(children.first.actual.stripFlags())
.append(children.first.expected.stripFlags())
.append(children.remaining.stripFlags())
.append(children.last.actual.stripFlags())
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
.append(children.last.expected.stripFlags())
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
}
} else if (actual.hasChildren) {
result
.append(properties.last.actual.stripFlags())
.append(api.lineBuilder.actual.line(indent + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
.append(properties.last.expected.stripFlags())
.append(api.lineBuilder.expected.last(indent + openTag.selfClose + openTag.end))
} else if (expected.hasChildren) {
result
.append(properties.last.actual.stripFlags())
.append(api.lineBuilder.actual.last(indent + openTag.selfClose + openTag.end))
.append(properties.last.expected.stripFlags())
.append(api.lineBuilder.expected.line(indent + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
} else {
result
.append(properties.last.actual.stripFlags())
.append(properties.last.expected.stripFlags())
.append(api.lineBuilder.last(indent + openTag.selfClose + openTag.end))
}
} else if (actual.hasProperties) {
result
.append(api.lineBuilder.actual.first(openTag.start + actualName))
.append(allProperties.stripFlags())
if (actual.hasChildren && expected.hasChildren) {
result
.append(api.lineBuilder.actual.line(indent + openTag.end))
.append(children.first.actual.stripFlags())
.append(api.lineBuilder.expected.first(openTag.start + expectedName + openTag.end))
.append(children.first.expected.stripFlags())
.append(children.remaining.stripFlags())
if (namesAreEqual) {
result
.append(children.last.actual.stripFlags())
.append(children.last.expected.stripFlags())
.append(api.lineBuilder.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
} else {
result
.append(children.last.actual.stripFlags())
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
.append(children.last.expected.stripFlags())
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
}
} else if (actual.hasChildren) {
result
.append(api.lineBuilder.actual.last(indent + openTag.selfClose + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
.append(api.lineBuilder.expected.single(openTag.start + expectedName + openTag.selfCloseVoid + openTag.end))
} else if (expected.hasChildren) {
result
.append(api.lineBuilder.actual.last(indent + openTag.selfClose + openTag.end))
.append(api.lineBuilder.expected.first(openTag.start + expectedName + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
} else {
result
.append(api.lineBuilder.actual.last(indent + openTag.selfClose + openTag.end))
.append(api.lineBuilder.expected.single(openTag.start + expectedName + openTag.selfCloseVoid + openTag.end))
}
} else if (expected.hasProperties) {
if (actual.hasChildren && expected.hasChildren) {
result
.append(api.lineBuilder.actual.first(openTag.start + actualName + openTag.end))
.append(children.first.actual.stripFlags())
.append(api.lineBuilder.expected.first(openTag.start + expectedName))
.append(allProperties.stripFlags())
.append(api.lineBuilder.expected.line(indent + openTag.end))
.append(children.first.expected.stripFlags())
.append(children.remaining.stripFlags())
if (namesAreEqual) {
result
.append(children.last.actual.stripFlags())
.append(children.last.expected.stripFlags())
.append(api.lineBuilder.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
} else {
result
.append(children.last.actual.stripFlags())
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
.append(children.last.expected.stripFlags())
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
}
} else if (actual.hasChildren) {
result
.append(api.lineBuilder.actual.first(openTag.start + actualName + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
.append(api.lineBuilder.expected.first(openTag.start + expectedName))
.append(allProperties.stripFlags())
.append(api.lineBuilder.expected.last(indent + openTag.selfClose + openTag.end))
} else if (expected.hasChildren) {
result
.append(api.lineBuilder.actual.single(openTag.start + actualName + openTag.selfCloseVoid + openTag.end))
.append(api.lineBuilder.expected.first(openTag.start + expectedName))
.append(allProperties.stripFlags())
.append(api.lineBuilder.expected.line(indent + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
} else {
result
.append(api.lineBuilder.actual.single(openTag.start + actualName + openTag.selfCloseVoid + openTag.end))
.append(api.lineBuilder.expected.first(openTag.start + expectedName))
.append(allProperties.stripFlags())
.append(api.lineBuilder.expected.last(indent + openTag.selfCloseVoid + openTag.end))
}
} else {
if (actual.hasChildren && expected.hasChildren) {
if (namesAreEqual) {
result
.append(api.lineBuilder.first(openTag.start + actualName + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
} else {
result
.append(api.lineBuilder.actual.first(openTag.start + actualName + openTag.end))
.append(children.first.actual.stripFlags())
.append(api.lineBuilder.expected.first(openTag.start + expectedName + openTag.end))
.append(children.first.expected.stripFlags())
.append(children.remaining.stripFlags())
.append(children.last.actual.stripFlags())
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
.append(children.last.expected.stripFlags())
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
}
} else if (actual.hasChildren) {
result
.append(api.lineBuilder.actual.first(openTag.start + actualName + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
.append(api.lineBuilder.expected.single(openTag.start + expectedName + openTag.selfCloseVoid + openTag.end))
} else if (expected.hasChildren) {
result
.append(api.lineBuilder.actual.single(openTag.start + actualName + openTag.selfCloseVoid + openTag.end))
.append(api.lineBuilder.expected.first(openTag.start + expectedName + openTag.end))
.append(allChildren.stripFlags())
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
} else {
if (namesAreEqual) {
result.append(api.lineBuilder.single(openTag.start + actualName + openTag.selfCloseVoid + openTag.end))
} else {
result
.append(api.lineBuilder.actual.single(openTag.start + actualName + openTag.selfCloseVoid + openTag.end))
.append(api.lineBuilder.expected.single(openTag.start + expectedName + openTag.selfCloseVoid + openTag.end))
}
}
}
return result
},
shouldFormat (subject) {
return subject.isItem === true || subject.isProperty === true
},
increaseIndent: true
}
}
module.exports = diffShallow

View file

@ -1,353 +0,0 @@
'use strict'
const arrify = require('arrify')
const diffShallow = require('./diffShallow')
const escapeText = require('./escapeText')
const FRAGMENT_NAME = Symbol.for('react.fragment')
function factory (api, reactTags) {
const tag = Symbol('@concordance/react.ElementValue')
function customPropertyFormatter (theme, indent, key, value) {
const separator = theme.react.attribute.separator + theme.react.attribute.value.openBracket
if (value.isSingle) {
return value
.withFirstPrefixed(key.formatAsKey(theme) + separator)
.withLastPostfixed(theme.react.attribute.value.closeBracket)
}
return api.lineBuilder.first(key.formatAsKey(theme) + separator)
.concat(value.withFirstPrefixed(indent.increase()).stripFlags())
.append(api.lineBuilder.last(indent + theme.react.attribute.value.closeBracket))
}
function themeProperty (theme) {
theme.property.increaseValueIndent = true
theme.property.customFormat = customPropertyFormatter
}
function themeStringProperty (theme) {
theme.property.separator = theme.react.attribute.separator
theme.property.after = ''
Object.assign(theme.string.line, theme.react.attribute.value.string.line)
}
function customItemFormatter (theme, indent, value) {
if (value.isSingle) {
return value
.withFirstPrefixed(theme.react.child.openBracket)
.withLastPostfixed(theme.react.child.closeBracket)
}
return api.lineBuilder.first(theme.react.child.openBracket)
.concat(value.withFirstPrefixed(indent.increase()).stripFlags())
.append(api.lineBuilder.last(indent + theme.react.child.closeBracket))
}
function themeChild (theme) {
theme.item.increaseValueIndent = true
theme.item.customFormat = customItemFormatter
}
function themeReactChild (theme) {
theme.item.after = ''
}
function themeStringChild (theme) {
theme.item.after = ''
Object.assign(theme.string, theme.react.child.string)
}
function describe (props) {
const element = props.value
const type = element.type
const hasTypeFn = typeof type === 'function'
const typeFn = hasTypeFn ? type : null
const name = hasTypeFn ? type.displayName || type.name : type
const children = arrify(element.props.children)
const properties = Object.assign({}, element.props)
delete properties.children
if (element.key !== null) {
properties.key = element.key
}
const hasProperties = Object.keys(properties).length > 0
return new DescribedElementValue(Object.assign({
children,
hasProperties,
hasTypeFn,
name,
properties,
typeFn,
isList: children.length > 0
}, props))
}
function deserialize (state, recursor) {
return new DeserializedElementValue(state, recursor)
}
class ElementValue extends api.ObjectValue {
constructor (props) {
super(props)
this.isFragment = props.name === FRAGMENT_NAME
this.name = props.name
this.hasProperties = props.hasProperties
this.hasTypeFn = props.hasTypeFn
this.hasChildren = this.isList
}
compare (expected) {
return this.tag === expected.tag && this.name === expected.name
? api.SHALLOW_EQUAL
: api.UNEQUAL
}
formatName (theme) {
const formatted = api.wrapFromTheme(theme.react.tagName, this.isFragment ? 'React.Fragment' : this.name)
return this.hasTypeFn
? formatted + theme.react.functionType
: formatted
}
compareNames (expected) {
return this.name === expected.name && this.hasTypeFn === expected.hasTypeFn
}
formatShallow (theme, indent) {
const childBuffer = api.lineBuilder.buffer()
const propertyBuffer = api.lineBuilder.buffer()
return {
append (formatted, origin) {
if (origin.isItem === true) {
childBuffer.append(formatted)
} else {
propertyBuffer.append(formatted)
}
},
finalize: () => {
const name = this.formatName(theme)
const openTag = theme.react.openTag
if (!this.hasChildren && !this.hasProperties) {
return api.lineBuilder.single(openTag.start + name + openTag.selfCloseVoid + openTag.end)
}
const innerIndentation = indent.increase()
const children = childBuffer.withFirstPrefixed(innerIndentation).stripFlags()
const properties = propertyBuffer.withFirstPrefixed(innerIndentation).stripFlags()
const result = api.lineBuilder.buffer()
if (this.hasProperties) {
result
.append(api.lineBuilder.first(openTag.start + name))
.append(properties)
if (this.hasChildren) {
result.append(api.lineBuilder.line(indent + openTag.end))
} else {
result.append(api.lineBuilder.last(indent + openTag.selfClose + openTag.end))
}
} else {
result.append(api.lineBuilder.first(openTag.start + name + openTag.end))
}
if (this.hasChildren) {
result
.append(children)
.append(api.lineBuilder.last(indent + api.wrapFromTheme(theme.react.closeTag, name)))
}
return result
},
maxDepth: () => {
const name = this.formatName(theme)
const openTag = theme.react.openTag
if (!this.hasChildren && !this.hasProperties) {
return api.lineBuilder.single(openTag.start + name + openTag.selfCloseVoid + openTag.end)
}
let str = openTag.start + name
if (this.hasProperties) {
str += theme.maxDepth
if (this.hasChildren) {
str += openTag.end
} else {
str += ' ' + openTag.selfClose + openTag.end
}
} else {
str += openTag.end
}
if (this.hasChildren) {
str += theme.maxDepth + api.wrapFromTheme(theme.react.closeTag, name)
}
return api.lineBuilder.single(str)
},
shouldFormat (subject) {
return subject.isItem === true || subject.isProperty === true
},
increaseIndent: true
}
}
prepareDiff (expected) {
return {
compareResult: this.tag === expected.tag
? api.SHALLOW_EQUAL
: api.UNEQUAL
}
}
diffShallow (expected, theme, indent) {
return diffShallow(api, this, expected, theme, indent)
}
serialize () {
return [this.isFragment, this.isFragment ? null : this.name, this.hasProperties, this.hasTypeFn, super.serialize()]
}
}
Object.defineProperty(ElementValue.prototype, 'tag', {value: tag})
function modifyThemes (recursor) {
return api.mapRecursor(recursor, next => {
let modifier
if (next.isItem === true) {
if (next.tag === api.descriptorTags.primitiveItem && next.value.tag === api.descriptorTags.string) {
modifier = themeStringChild
} else if (next.tag === api.descriptorTags.complexItem && reactTags.has(next.value.tag)) {
modifier = themeReactChild
} else {
modifier = themeChild
}
} else if (next.isProperty === true) {
if (
next.tag === api.descriptorTags.primitiveProperty &&
next.value.tag === api.descriptorTags.string &&
!next.value.includesLinebreaks
) {
modifier = themeStringProperty
} else {
modifier = themeProperty
}
}
return modifier
? api.modifyTheme(next, modifier)
: next
})
}
function DescribedMixin (base) {
return class extends api.DescribedMixin(base) {
constructor (props) {
super(props)
this.children = props.children
this.properties = props.properties
this.typeFn = props.typeFn
}
compare (expected) {
const result = super.compare(expected)
return result === api.SHALLOW_EQUAL && this.typeFn !== expected.typeFn
? api.UNEQUAL
: result
}
compareNames (expected) {
return super.compareNames(expected) && this.typeFn === expected.typeFn
}
createPropertyRecursor () {
// Symbols are not valid property keys for React elements. This code
// also assumes that the keys can be formatted as JSX-like attribute
// names. Keys are not pre-escaped before being passed to Concordance's
// property descriptor.
const keys = Object.keys(this.properties).sort()
const size = keys.length
let index = 0
const next = () => {
if (index === size) return null
const key = keys[index++]
// Note that string values are not specifically escaped such that the
// output is valid JSX.
return this.describeProperty(key, this.describeAny(this.properties[key]))
}
return {size, next}
}
createListRecursor () {
if (!this.isList) return super.createListRecursor()
const size = this.children.length
let index = 0
const next = () => {
if (index === size) return null
const current = index++
const child = this.children[current]
const type = typeof child
let descriptor
if (type === 'string') {
descriptor = this.describeAny(escapeText(child))
} else {
descriptor = this.describeAny(child)
}
return this.describeItem(current, descriptor)
}
return {size, next}
}
createRecursor () {
return modifyThemes(super.createRecursor())
}
}
}
function DeserializedMixin (base) {
return class extends api.DeserializedMixin(base) {
constructor (state, recursor) {
super(state[4], recursor)
this.isFragment = state[0]
this.name = this.isFragment ? FRAGMENT_NAME : state[1]
this.hasProperties = state[2]
this.hasTypeFn = state[3]
}
createRecursor () {
return modifyThemes(super.createRecursor())
}
}
}
const DescribedElementValue = DescribedMixin(ElementValue)
const DeserializedElementValue = DeserializedMixin(ElementValue)
return {
DescribedMixin,
DeserializedMixin,
ElementValue,
describe,
deserialize,
tag
}
}
module.exports = factory

View file

@ -1,10 +0,0 @@
'use strict'
function escapeText (text) {
return text
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
// TODO: Escape characters that Concordance would otherwise replace with \u
// sequences.
}
module.exports = escapeText

View file

@ -1,59 +0,0 @@
'use strict'
const arrify = require('arrify')
function factory (api, element) {
const tag = Symbol('@concordance/react.TestJsonValue')
function describe (props) {
const obj = props.value
const name = obj.type
const children = arrify(obj.children)
const properties = Object.assign({}, obj.props)
const hasProperties = Object.keys(properties).length > 0
return new DescribedTestJsonValue(Object.assign({
children,
hasProperties,
hasTypeFn: false,
name,
properties,
typeFn: null,
isList: children.length > 0
}, props))
}
function deserialize (state, recursor) {
return new DeserializedTestJsonValue(state, recursor)
}
class TestJsonValue extends element.ElementValue {
compare (expected) {
// Allow expected value to be a React element.
return (this.tag === expected.tag || expected.tag === element.tag) && this.name === expected.name
? api.SHALLOW_EQUAL
: api.UNEQUAL
}
prepareDiff (expected) {
return {
// Allow expected value to be a React element.
compareResult: this.tag === expected.tag || expected.tag === element.tag
? api.SHALLOW_EQUAL
: api.UNEQUAL
}
}
}
Object.defineProperty(TestJsonValue.prototype, 'tag', {value: tag})
const DescribedTestJsonValue = element.DescribedMixin(TestJsonValue)
const DeserializedTestJsonValue = element.DeserializedMixin(TestJsonValue)
return {
describe,
deserialize,
tag
}
}
module.exports = factory

View file

@ -1,8 +0,0 @@
'use strict';
module.exports = function (val) {
if (val === null || val === undefined) {
return [];
}
return Array.isArray(val) ? val : [val];
};

View file

@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View file

@ -1,36 +0,0 @@
# arrify [![Build Status](https://travis-ci.org/sindresorhus/arrify.svg?branch=master)](https://travis-ci.org/sindresorhus/arrify)
> Convert a value to an array
## Install
```
$ npm install --save arrify
```
## Usage
```js
const arrify = require('arrify');
arrify('unicorn');
//=> ['unicorn']
arrify(['unicorn']);
//=> ['unicorn']
arrify(null);
//=> []
arrify(undefined);
//=> []
```
*Supplying `null` or `undefined` results in an empty array.*
## License
MIT © [Sindre Sorhus](http://sindresorhus.com)

View file

@ -1,75 +0,0 @@
{
"name": "@concordance/react",
"version": "2.0.0",
"description": "Compare, format, diff and serialize React trees with Concordance",
"main": "index.js",
"files": [
"lib",
"index.js"
],
"engines": {
"node": ">=6.12.3 <7 || >=8.9.4 <9 || >=10.0.0"
},
"scripts": {
"build:fixtures": "babel --presets=module:@babel/preset-react,module:ava/stage-4 --out-dir=test/fixtures/react --extensions=.jsx test/fixtures/react",
"lint": "as-i-preach",
"pretest": "npm run -s build:fixtures",
"test": "npm run -s lint && nyc ava"
},
"repository": {
"type": "git",
"url": "git+https://github.com/concordancejs/react.git"
},
"author": "Mark Wubben (https://novemberborn.net/)",
"license": "ISC",
"bugs": {
"url": "https://github.com/concordancejs/react/issues"
},
"homepage": "https://github.com/concordancejs/react#readme",
"keywords": [
"concordance-plugin",
"concordance",
"react"
],
"dependencies": {
"arrify": "^1.0.1"
},
"devDependencies": {
"@babel/cli": "^7.1.0",
"@babel/core": "^7.1.0",
"@babel/preset-react": "^7.0.0",
"@novemberborn/as-i-preach": "^10.1.0",
"ava": "1.0.0-beta.8",
"codecov": "^3.1.0",
"concordance": "^4.0.0",
"nyc": "^13.0.1",
"react": "^16.5.2",
"react-test-renderer": "^16.5.2"
},
"as-i-preach": {
"allowDevDependencies": [
"test/**/*.js",
"test/**/*.jsx"
],
"ignore": [
"test/fixtures/react/*.js"
]
},
"ava": {
"babel": {
"testOptions": {
"presets": [
"module:@babel/preset-react"
]
}
}
},
"nyc": {
"reporter": [
"html",
"lcov",
"text"
]
},
"standard-engine": "@novemberborn/as-i-preach"
}

View file

@ -1,132 +0,0 @@
/// <reference types="node" />
/// <reference lib="es2016" />
/// <reference lib="es2017.sharedmemory" />
/// <reference lib="esnext.asynciterable" />
/// <reference lib="dom" />
declare type TypedArray = Int8Array | Uint8Array | Uint8ClampedArray | Int16Array | Uint16Array | Int32Array | Uint32Array | Float32Array | Float64Array;
declare type Primitive = null | undefined | string | number | boolean | Symbol;
export interface ArrayLike {
length: number;
}
export interface Class<T = unknown> {
new (...args: any[]): T;
}
declare type DomElement = object & {
nodeType: 1;
nodeName: string;
};
declare type NodeStream = object & {
pipe: Function;
};
export declare const enum TypeName {
null = "null",
boolean = "boolean",
undefined = "undefined",
string = "string",
number = "number",
symbol = "symbol",
Function = "Function",
GeneratorFunction = "GeneratorFunction",
AsyncFunction = "AsyncFunction",
Observable = "Observable",
Array = "Array",
Buffer = "Buffer",
Object = "Object",
RegExp = "RegExp",
Date = "Date",
Error = "Error",
Map = "Map",
Set = "Set",
WeakMap = "WeakMap",
WeakSet = "WeakSet",
Int8Array = "Int8Array",
Uint8Array = "Uint8Array",
Uint8ClampedArray = "Uint8ClampedArray",
Int16Array = "Int16Array",
Uint16Array = "Uint16Array",
Int32Array = "Int32Array",
Uint32Array = "Uint32Array",
Float32Array = "Float32Array",
Float64Array = "Float64Array",
ArrayBuffer = "ArrayBuffer",
SharedArrayBuffer = "SharedArrayBuffer",
DataView = "DataView",
Promise = "Promise",
URL = "URL"
}
declare function is(value: unknown): TypeName;
declare namespace is {
const undefined: (value: unknown) => value is undefined;
const string: (value: unknown) => value is string;
const number: (value: unknown) => value is number;
const function_: (value: unknown) => value is Function;
const null_: (value: unknown) => value is null;
const class_: (value: unknown) => value is Class<unknown>;
const boolean: (value: unknown) => value is boolean;
const symbol: (value: unknown) => value is Symbol;
const numericString: (value: unknown) => boolean;
const array: (arg: any) => arg is any[];
const buffer: (input: unknown) => input is Buffer;
const nullOrUndefined: (value: unknown) => value is null | undefined;
const object: (value: unknown) => value is object;
const iterable: (value: unknown) => value is IterableIterator<unknown>;
const asyncIterable: (value: unknown) => value is AsyncIterableIterator<unknown>;
const generator: (value: unknown) => value is Generator;
const nativePromise: (value: unknown) => value is Promise<unknown>;
const promise: (value: unknown) => value is Promise<unknown>;
const generatorFunction: (value: unknown) => value is GeneratorFunction;
const asyncFunction: (value: unknown) => value is Function;
const boundFunction: (value: unknown) => value is Function;
const regExp: (value: unknown) => value is RegExp;
const date: (value: unknown) => value is Date;
const error: (value: unknown) => value is Error;
const map: (value: unknown) => value is Map<unknown, unknown>;
const set: (value: unknown) => value is Set<unknown>;
const weakMap: (value: unknown) => value is WeakMap<object, unknown>;
const weakSet: (value: unknown) => value is WeakSet<object>;
const int8Array: (value: unknown) => value is Int8Array;
const uint8Array: (value: unknown) => value is Uint8Array;
const uint8ClampedArray: (value: unknown) => value is Uint8ClampedArray;
const int16Array: (value: unknown) => value is Int16Array;
const uint16Array: (value: unknown) => value is Uint16Array;
const int32Array: (value: unknown) => value is Int32Array;
const uint32Array: (value: unknown) => value is Uint32Array;
const float32Array: (value: unknown) => value is Float32Array;
const float64Array: (value: unknown) => value is Float64Array;
const arrayBuffer: (value: unknown) => value is ArrayBuffer;
const sharedArrayBuffer: (value: unknown) => value is SharedArrayBuffer;
const dataView: (value: unknown) => value is DataView;
const directInstanceOf: <T>(instance: unknown, klass: Class<T>) => instance is T;
const urlInstance: (value: unknown) => value is URL;
const urlString: (value: unknown) => boolean;
const truthy: (value: unknown) => boolean;
const falsy: (value: unknown) => boolean;
const nan: (value: unknown) => boolean;
const primitive: (value: unknown) => value is Primitive;
const integer: (value: unknown) => value is number;
const safeInteger: (value: unknown) => value is number;
const plainObject: (value: unknown) => boolean;
const typedArray: (value: unknown) => value is TypedArray;
const arrayLike: (value: unknown) => value is ArrayLike;
const inRange: (value: number, range: number | number[]) => boolean;
const domElement: (value: unknown) => value is DomElement;
const observable: (value: unknown) => boolean;
const nodeStream: (value: unknown) => value is NodeStream;
const infinite: (value: unknown) => boolean;
const even: (value: number) => boolean;
const odd: (value: number) => boolean;
const emptyArray: (value: unknown) => boolean;
const nonEmptyArray: (value: unknown) => boolean;
const emptyString: (value: unknown) => boolean;
const nonEmptyString: (value: unknown) => boolean;
const emptyStringOrWhitespace: (value: unknown) => boolean;
const emptyObject: (value: unknown) => boolean;
const nonEmptyObject: (value: unknown) => boolean;
const emptySet: (value: unknown) => boolean;
const nonEmptySet: (value: unknown) => boolean;
const emptyMap: (value: unknown) => boolean;
const nonEmptyMap: (value: unknown) => boolean;
const any: (predicate: unknown, ...values: unknown[]) => boolean;
const all: (predicate: unknown, ...values: unknown[]) => boolean;
}
export default is;

View file

@ -1,245 +0,0 @@
"use strict";
/// <reference lib="es2016"/>
/// <reference lib="es2017.sharedmemory"/>
/// <reference lib="esnext.asynciterable"/>
/// <reference lib="dom"/>
Object.defineProperty(exports, "__esModule", { value: true });
// TODO: Use the `URL` global when targeting Node.js 10
// tslint:disable-next-line
const URLGlobal = typeof URL === 'undefined' ? require('url').URL : URL;
const toString = Object.prototype.toString;
const isOfType = (type) => (value) => typeof value === type;
const isBuffer = (input) => !is.nullOrUndefined(input) && !is.nullOrUndefined(input.constructor) && is.function_(input.constructor.isBuffer) && input.constructor.isBuffer(input);
const getObjectType = (value) => {
const objectName = toString.call(value).slice(8, -1);
if (objectName) {
return objectName;
}
return null;
};
const isObjectOfType = (type) => (value) => getObjectType(value) === type;
function is(value) {
switch (value) {
case null:
return "null" /* null */;
case true:
case false:
return "boolean" /* boolean */;
default:
}
switch (typeof value) {
case 'undefined':
return "undefined" /* undefined */;
case 'string':
return "string" /* string */;
case 'number':
return "number" /* number */;
case 'symbol':
return "symbol" /* symbol */;
default:
}
if (is.function_(value)) {
return "Function" /* Function */;
}
if (is.observable(value)) {
return "Observable" /* Observable */;
}
if (Array.isArray(value)) {
return "Array" /* Array */;
}
if (isBuffer(value)) {
return "Buffer" /* Buffer */;
}
const tagType = getObjectType(value);
if (tagType) {
return tagType;
}
if (value instanceof String || value instanceof Boolean || value instanceof Number) {
throw new TypeError('Please don\'t use object wrappers for primitive types');
}
return "Object" /* Object */;
}
(function (is) {
// tslint:disable-next-line:strict-type-predicates
const isObject = (value) => typeof value === 'object';
// tslint:disable:variable-name
is.undefined = isOfType('undefined');
is.string = isOfType('string');
is.number = isOfType('number');
is.function_ = isOfType('function');
// tslint:disable-next-line:strict-type-predicates
is.null_ = (value) => value === null;
is.class_ = (value) => is.function_(value) && value.toString().startsWith('class ');
is.boolean = (value) => value === true || value === false;
is.symbol = isOfType('symbol');
// tslint:enable:variable-name
is.numericString = (value) => is.string(value) && value.length > 0 && !Number.isNaN(Number(value));
is.array = Array.isArray;
is.buffer = isBuffer;
is.nullOrUndefined = (value) => is.null_(value) || is.undefined(value);
is.object = (value) => !is.nullOrUndefined(value) && (is.function_(value) || isObject(value));
is.iterable = (value) => !is.nullOrUndefined(value) && is.function_(value[Symbol.iterator]);
is.asyncIterable = (value) => !is.nullOrUndefined(value) && is.function_(value[Symbol.asyncIterator]);
is.generator = (value) => is.iterable(value) && is.function_(value.next) && is.function_(value.throw);
is.nativePromise = (value) => isObjectOfType("Promise" /* Promise */)(value);
const hasPromiseAPI = (value) => !is.null_(value) &&
isObject(value) &&
is.function_(value.then) &&
is.function_(value.catch);
is.promise = (value) => is.nativePromise(value) || hasPromiseAPI(value);
is.generatorFunction = isObjectOfType("GeneratorFunction" /* GeneratorFunction */);
is.asyncFunction = isObjectOfType("AsyncFunction" /* AsyncFunction */);
is.boundFunction = (value) => is.function_(value) && !value.hasOwnProperty('prototype');
is.regExp = isObjectOfType("RegExp" /* RegExp */);
is.date = isObjectOfType("Date" /* Date */);
is.error = isObjectOfType("Error" /* Error */);
is.map = (value) => isObjectOfType("Map" /* Map */)(value);
is.set = (value) => isObjectOfType("Set" /* Set */)(value);
is.weakMap = (value) => isObjectOfType("WeakMap" /* WeakMap */)(value);
is.weakSet = (value) => isObjectOfType("WeakSet" /* WeakSet */)(value);
is.int8Array = isObjectOfType("Int8Array" /* Int8Array */);
is.uint8Array = isObjectOfType("Uint8Array" /* Uint8Array */);
is.uint8ClampedArray = isObjectOfType("Uint8ClampedArray" /* Uint8ClampedArray */);
is.int16Array = isObjectOfType("Int16Array" /* Int16Array */);
is.uint16Array = isObjectOfType("Uint16Array" /* Uint16Array */);
is.int32Array = isObjectOfType("Int32Array" /* Int32Array */);
is.uint32Array = isObjectOfType("Uint32Array" /* Uint32Array */);
is.float32Array = isObjectOfType("Float32Array" /* Float32Array */);
is.float64Array = isObjectOfType("Float64Array" /* Float64Array */);
is.arrayBuffer = isObjectOfType("ArrayBuffer" /* ArrayBuffer */);
is.sharedArrayBuffer = isObjectOfType("SharedArrayBuffer" /* SharedArrayBuffer */);
is.dataView = isObjectOfType("DataView" /* DataView */);
is.directInstanceOf = (instance, klass) => Object.getPrototypeOf(instance) === klass.prototype;
is.urlInstance = (value) => isObjectOfType("URL" /* URL */)(value);
is.urlString = (value) => {
if (!is.string(value)) {
return false;
}
try {
new URLGlobal(value); // tslint:disable-line no-unused-expression
return true;
}
catch (_a) {
return false;
}
};
is.truthy = (value) => Boolean(value);
is.falsy = (value) => !value;
is.nan = (value) => Number.isNaN(value);
const primitiveTypes = new Set([
'undefined',
'string',
'number',
'boolean',
'symbol'
]);
is.primitive = (value) => is.null_(value) || primitiveTypes.has(typeof value);
is.integer = (value) => Number.isInteger(value);
is.safeInteger = (value) => Number.isSafeInteger(value);
is.plainObject = (value) => {
// From: https://github.com/sindresorhus/is-plain-obj/blob/master/index.js
let prototype;
return getObjectType(value) === "Object" /* Object */ &&
(prototype = Object.getPrototypeOf(value), prototype === null || // tslint:disable-line:ban-comma-operator
prototype === Object.getPrototypeOf({}));
};
const typedArrayTypes = new Set([
"Int8Array" /* Int8Array */,
"Uint8Array" /* Uint8Array */,
"Uint8ClampedArray" /* Uint8ClampedArray */,
"Int16Array" /* Int16Array */,
"Uint16Array" /* Uint16Array */,
"Int32Array" /* Int32Array */,
"Uint32Array" /* Uint32Array */,
"Float32Array" /* Float32Array */,
"Float64Array" /* Float64Array */
]);
is.typedArray = (value) => {
const objectType = getObjectType(value);
if (objectType === null) {
return false;
}
return typedArrayTypes.has(objectType);
};
const isValidLength = (value) => is.safeInteger(value) && value > -1;
is.arrayLike = (value) => !is.nullOrUndefined(value) && !is.function_(value) && isValidLength(value.length);
is.inRange = (value, range) => {
if (is.number(range)) {
return value >= Math.min(0, range) && value <= Math.max(range, 0);
}
if (is.array(range) && range.length === 2) {
return value >= Math.min(...range) && value <= Math.max(...range);
}
throw new TypeError(`Invalid range: ${JSON.stringify(range)}`);
};
const NODE_TYPE_ELEMENT = 1;
const DOM_PROPERTIES_TO_CHECK = [
'innerHTML',
'ownerDocument',
'style',
'attributes',
'nodeValue'
];
is.domElement = (value) => is.object(value) && value.nodeType === NODE_TYPE_ELEMENT && is.string(value.nodeName) &&
!is.plainObject(value) && DOM_PROPERTIES_TO_CHECK.every(property => property in value);
is.observable = (value) => {
if (!value) {
return false;
}
if (value[Symbol.observable] && value === value[Symbol.observable]()) {
return true;
}
if (value['@@observable'] && value === value['@@observable']()) {
return true;
}
return false;
};
is.nodeStream = (value) => !is.nullOrUndefined(value) && isObject(value) && is.function_(value.pipe) && !is.observable(value);
is.infinite = (value) => value === Infinity || value === -Infinity;
const isAbsoluteMod2 = (rem) => (value) => is.integer(value) && Math.abs(value % 2) === rem;
is.even = isAbsoluteMod2(0);
is.odd = isAbsoluteMod2(1);
const isWhiteSpaceString = (value) => is.string(value) && /\S/.test(value) === false;
is.emptyArray = (value) => is.array(value) && value.length === 0;
is.nonEmptyArray = (value) => is.array(value) && value.length > 0;
is.emptyString = (value) => is.string(value) && value.length === 0;
is.nonEmptyString = (value) => is.string(value) && value.length > 0;
is.emptyStringOrWhitespace = (value) => is.emptyString(value) || isWhiteSpaceString(value);
is.emptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length === 0;
is.nonEmptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length > 0;
is.emptySet = (value) => is.set(value) && value.size === 0;
is.nonEmptySet = (value) => is.set(value) && value.size > 0;
is.emptyMap = (value) => is.map(value) && value.size === 0;
is.nonEmptyMap = (value) => is.map(value) && value.size > 0;
const predicateOnArray = (method, predicate, values) => {
if (is.function_(predicate) === false) {
throw new TypeError(`Invalid predicate: ${JSON.stringify(predicate)}`);
}
if (values.length === 0) {
throw new TypeError('Invalid number of values');
}
return method.call(values, predicate);
};
// tslint:disable variable-name
is.any = (predicate, ...values) => predicateOnArray(Array.prototype.some, predicate, values);
is.all = (predicate, ...values) => predicateOnArray(Array.prototype.every, predicate, values);
// tslint:enable variable-name
})(is || (is = {}));
// Some few keywords are reserved, but we'll populate them for Node.js users
// See https://github.com/Microsoft/TypeScript/issues/2536
Object.defineProperties(is, {
class: {
value: is.class_
},
function: {
value: is.function_
},
null: {
value: is.null_
}
});
exports.default = is;
// For CommonJS default export support
module.exports = is;
module.exports.default = is;
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

View file

@ -1,63 +0,0 @@
{
"name": "@sindresorhus/is",
"version": "0.14.0",
"description": "Type check values: `is.string('🦄') //=> true`",
"license": "MIT",
"repository": "sindresorhus/is",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "sindresorhus.com"
},
"main": "dist/index.js",
"engines": {
"node": ">=6"
},
"scripts": {
"lint": "tslint --format stylish --project .",
"build": "del dist && tsc",
"test": "npm run lint && npm run build && ava dist/tests",
"prepublish": "npm run build && del dist/tests"
},
"files": [
"dist"
],
"keywords": [
"type",
"types",
"is",
"check",
"checking",
"validate",
"validation",
"utility",
"util",
"typeof",
"instanceof",
"object",
"assert",
"assertion",
"test",
"kind",
"primitive",
"verify",
"compare"
],
"devDependencies": {
"@sindresorhus/tsconfig": "^0.1.0",
"@types/jsdom": "^11.12.0",
"@types/node": "^10.12.10",
"@types/tempy": "^0.2.0",
"@types/zen-observable": "^0.8.0",
"ava": "^0.25.0",
"del-cli": "^1.1.0",
"jsdom": "^11.6.2",
"rxjs": "^6.3.3",
"tempy": "^0.2.1",
"tslint": "^5.9.1",
"tslint-xo": "^0.10.0",
"typescript": "^3.2.1",
"zen-observable": "^0.8.8"
},
"types": "dist/index.d.ts"
}

View file

@ -1,451 +0,0 @@
# is [![Build Status](https://travis-ci.org/sindresorhus/is.svg?branch=master)](https://travis-ci.org/sindresorhus/is)
> Type check values: `is.string('🦄') //=> true`
<img src="header.gif" width="182" align="right">
## Install
```
$ npm install @sindresorhus/is
```
## Usage
```js
const is = require('@sindresorhus/is');
is('🦄');
//=> 'string'
is(new Map());
//=> 'Map'
is.number(6);
//=> true
```
When using `is` together with TypeScript, [type guards](http://www.typescriptlang.org/docs/handbook/advanced-types.html#type-guards-and-differentiating-types) are being used to infer the correct type inside if-else statements.
```ts
import is from '@sindresorhus/is';
const padLeft = (value: string, padding: string | number) => {
if (is.number(padding)) {
// `padding` is typed as `number`
return Array(padding + 1).join(' ') + value;
}
if (is.string(padding)) {
// `padding` is typed as `string`
return padding + value;
}
throw new TypeError(`Expected 'padding' to be of type 'string' or 'number', got '${is(padding)}'.`);
}
padLeft('🦄', 3);
//=> ' 🦄'
padLeft('🦄', '🌈');
//=> '🌈🦄'
```
## API
### is(value)
Returns the type of `value`.
Primitives are lowercase and object types are camelcase.
Example:
- `'undefined'`
- `'null'`
- `'string'`
- `'symbol'`
- `'Array'`
- `'Function'`
- `'Object'`
Note: It will throw an error if you try to feed it object-wrapped primitives, as that's a bad practice. For example `new String('foo')`.
### is.{method}
All the below methods accept a value and returns a boolean for whether the value is of the desired type.
#### Primitives
##### .undefined(value)
##### .null(value)
##### .string(value)
##### .number(value)
##### .boolean(value)
##### .symbol(value)
#### Built-in types
##### .array(value)
##### .function(value)
##### .buffer(value)
##### .object(value)
Keep in mind that [functions are objects too](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions).
##### .numericString(value)
Returns `true` for a string that represents a number. For example, `'42'` and `'-8'`.
Note: `'NaN'` returns `false`, but `'Infinity'` and `'-Infinity'` return `true`.
##### .regExp(value)
##### .date(value)
##### .error(value)
##### .nativePromise(value)
##### .promise(value)
Returns `true` for any object with a `.then()` and `.catch()` method. Prefer this one over `.nativePromise()` as you usually want to allow userland promise implementations too.
##### .generator(value)
Returns `true` for any object that implements its own `.next()` and `.throw()` methods and has a function definition for `Symbol.iterator`.
##### .generatorFunction(value)
##### .asyncFunction(value)
Returns `true` for any `async` function that can be called with the `await` operator.
```js
is.asyncFunction(async () => {});
// => true
is.asyncFunction(() => {});
// => false
```
##### .boundFunction(value)
Returns `true` for any `bound` function.
```js
is.boundFunction(() => {});
// => true
is.boundFunction(function () {}.bind(null));
// => true
is.boundFunction(function () {});
// => false
```
##### .map(value)
##### .set(value)
##### .weakMap(value)
##### .weakSet(value)
#### Typed arrays
##### .int8Array(value)
##### .uint8Array(value)
##### .uint8ClampedArray(value)
##### .int16Array(value)
##### .uint16Array(value)
##### .int32Array(value)
##### .uint32Array(value)
##### .float32Array(value)
##### .float64Array(value)
#### Structured data
##### .arrayBuffer(value)
##### .sharedArrayBuffer(value)
##### .dataView(value)
#### Emptiness
##### .emptyString(value)
Returns `true` if the value is a `string` and the `.length` is 0.
##### .nonEmptyString(value)
Returns `true` if the value is a `string` and the `.length` is more than 0.
##### .emptyStringOrWhitespace(value)
Returns `true` if `is.emptyString(value)` or if it's a `string` that is all whitespace.
##### .emptyArray(value)
Returns `true` if the value is an `Array` and the `.length` is 0.
##### .nonEmptyArray(value)
Returns `true` if the value is an `Array` and the `.length` is more than 0.
##### .emptyObject(value)
Returns `true` if the value is an `Object` and `Object.keys(value).length` is 0.
Please note that `Object.keys` returns only own enumerable properties. Hence something like this can happen:
```js
const object1 = {};
Object.defineProperty(object1, 'property1', {
value: 42,
writable: true,
enumerable: false,
configurable: true
});
is.emptyObject(object1);
// => true
```
##### .nonEmptyObject(value)
Returns `true` if the value is an `Object` and `Object.keys(value).length` is more than 0.
##### .emptySet(value)
Returns `true` if the value is a `Set` and the `.size` is 0.
##### .nonEmptySet(Value)
Returns `true` if the value is a `Set` and the `.size` is more than 0.
##### .emptyMap(value)
Returns `true` if the value is a `Map` and the `.size` is 0.
##### .nonEmptyMap(value)
Returns `true` if the value is a `Map` and the `.size` is more than 0.
#### Miscellaneous
##### .directInstanceOf(value, class)
Returns `true` if `value` is a direct instance of `class`.
```js
is.directInstanceOf(new Error(), Error);
//=> true
class UnicornError extends Error {}
is.directInstanceOf(new UnicornError(), Error);
//=> false
```
##### .urlInstance(value)
Returns `true` if `value` is an instance of the [`URL` class](https://developer.mozilla.org/en-US/docs/Web/API/URL).
```js
const url = new URL('https://example.com');
is.urlInstance(url);
//=> true
```
### .url(value)
Returns `true` if `value` is a URL string.
Note: this only does basic checking using the [`URL` class](https://developer.mozilla.org/en-US/docs/Web/API/URL) constructor.
```js
const url = 'https://example.com';
is.url(url);
//=> true
is.url(new URL(url));
//=> false
```
##### .truthy(value)
Returns `true` for all values that evaluate to true in a boolean context:
```js
is.truthy('🦄');
//=> true
is.truthy(undefined);
//=> false
```
##### .falsy(value)
Returns `true` if `value` is one of: `false`, `0`, `''`, `null`, `undefined`, `NaN`.
##### .nan(value)
##### .nullOrUndefined(value)
##### .primitive(value)
JavaScript primitives are as follows: `null`, `undefined`, `string`, `number`, `boolean`, `symbol`.
##### .integer(value)
##### .safeInteger(value)
Returns `true` if `value` is a [safe integer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isSafeInteger).
##### .plainObject(value)
An object is plain if it's created by either `{}`, `new Object()`, or `Object.create(null)`.
##### .iterable(value)
##### .asyncIterable(value)
##### .class(value)
Returns `true` for instances created by a class.
##### .typedArray(value)
##### .arrayLike(value)
A `value` is array-like if it is not a function and has a `value.length` that is a safe integer greater than or equal to 0.
```js
is.arrayLike(document.forms);
//=> true
function foo() {
is.arrayLike(arguments);
//=> true
}
foo();
```
##### .inRange(value, range)
Check if `value` (number) is in the given `range`. The range is an array of two values, lower bound and upper bound, in no specific order.
```js
is.inRange(3, [0, 5]);
is.inRange(3, [5, 0]);
is.inRange(0, [-2, 2]);
```
##### .inRange(value, upperBound)
Check if `value` (number) is in the range of `0` to `upperBound`.
```js
is.inRange(3, 10);
```
##### .domElement(value)
Returns `true` if `value` is a DOM Element.
##### .nodeStream(value)
Returns `true` if `value` is a Node.js [stream](https://nodejs.org/api/stream.html).
```js
const fs = require('fs');
is.nodeStream(fs.createReadStream('unicorn.png'));
//=> true
```
##### .observable(value)
Returns `true` if `value` is an `Observable`.
```js
const {Observable} = require('rxjs');
is.observable(new Observable());
//=> true
```
##### .infinite(value)
Check if `value` is `Infinity` or `-Infinity`.
##### .even(value)
Returns `true` if `value` is an even integer.
##### .odd(value)
Returns `true` if `value` is an odd integer.
##### .any(predicate, ...values)
Returns `true` if **any** of the input `values` returns true in the `predicate`:
```js
is.any(is.string, {}, true, '🦄');
//=> true
is.any(is.boolean, 'unicorns', [], new Map());
//=> false
```
##### .all(predicate, ...values)
Returns `true` if **all** of the input `values` returns true in the `predicate`:
```js
is.all(is.object, {}, new Map(), new Set());
//=> true
is.all(is.string, '🦄', [], 'unicorns');
//=> false
```
## FAQ
### Why yet another type checking module?
There are hundreds of type checking modules on npm, unfortunately, I couldn't find any that fit my needs:
- Includes both type methods and ability to get the type
- Types of primitives returned as lowercase and object types as camelcase
- Covers all built-ins
- Unsurprising behavior
- Well-maintained
- Comprehensive test suite
For the ones I found, pick 3 of these.
The most common mistakes I noticed in these modules was using `instanceof` for type checking, forgetting that functions are objects, and omitting `symbol` as a primitive.
## Related
- [ow](https://github.com/sindresorhus/ow) - Function argument validation for humans
- [is-stream](https://github.com/sindresorhus/is-stream) - Check if something is a Node.js stream
- [is-observable](https://github.com/sindresorhus/is-observable) - Check if a value is an Observable
- [file-type](https://github.com/sindresorhus/file-type) - Detect the file type of a Buffer/Uint8Array
- [is-ip](https://github.com/sindresorhus/is-ip) - Check if a string is an IP address
- [is-array-sorted](https://github.com/sindresorhus/is-array-sorted) - Check if an Array is sorted
- [is-error-constructor](https://github.com/sindresorhus/is-error-constructor) - Check if a value is an error constructor
- [is-empty-iterable](https://github.com/sindresorhus/is-empty-iterable) - Check if an Iterable is empty
- [is-blob](https://github.com/sindresorhus/is-blob) - Check if a value is a Blob - File-like object of immutable, raw data
- [has-emoji](https://github.com/sindresorhus/has-emoji) - Check whether a string has any emoji
## Created by
- [Sindre Sorhus](https://github.com/sindresorhus)
- [Giora Guttsait](https://github.com/gioragutt)
- [Brandon Smith](https://github.com/brandon93s)
## License
MIT

View file

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2018 Szymon Marczak
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -1,70 +0,0 @@
# http-timer
> Timings for HTTP requests
[![Build Status](https://travis-ci.org/szmarczak/http-timer.svg?branch=master)](https://travis-ci.org/szmarczak/http-timer)
[![Coverage Status](https://coveralls.io/repos/github/szmarczak/http-timer/badge.svg?branch=master)](https://coveralls.io/github/szmarczak/http-timer?branch=master)
[![install size](https://packagephobia.now.sh/badge?p=@szmarczak/http-timer)](https://packagephobia.now.sh/result?p=@szmarczak/http-timer)
Inspired by the [`request` package](https://github.com/request/request).
## Usage
```js
'use strict';
const https = require('https');
const timer = require('@szmarczak/http-timer');
const request = https.get('https://httpbin.org/anything');
const timings = timer(request);
request.on('response', response => {
response.on('data', () => {}); // Consume the data somehow
response.on('end', () => {
console.log(timings);
});
});
// { start: 1535708511443,
// socket: 1535708511444,
// lookup: 1535708511444,
// connect: 1535708511582,
// upload: 1535708511887,
// response: 1535708512037,
// end: 1535708512040,
// phases:
// { wait: 1,
// dns: 0,
// tcp: 138,
// request: 305,
// firstByte: 150,
// download: 3,
// total: 597 } }
```
## API
### timer(request)
Returns: `Object`
- `start` - Time when the request started.
- `socket` - Time when a socket was assigned to the request.
- `lookup` - Time when the DNS lookup finished.
- `connect` - Time when the socket successfully connected.
- `upload` - Time when the request finished uploading.
- `response` - Time when the request fired the `response` event.
- `end` - Time when the response fired the `end` event.
- `error` - Time when the request fired the `error` event.
- `phases`
- `wait` - `timings.socket - timings.start`
- `dns` - `timings.lookup - timings.socket`
- `tcp` - `timings.connect - timings.lookup`
- `request` - `timings.upload - timings.connect`
- `firstByte` - `timings.response - timings.upload`
- `download` - `timings.end - timings.response`
- `total` - `timings.end - timings.start` or `timings.error - timings.start`
**Note**: The time is a `number` representing the milliseconds elapsed since the UNIX epoch.
## License
MIT

View file

@ -1,47 +0,0 @@
{
"name": "@szmarczak/http-timer",
"version": "1.1.2",
"description": "Timings for HTTP requests",
"main": "source",
"engines": {
"node": ">=6"
},
"scripts": {
"test": "xo && nyc ava",
"coveralls": "nyc report --reporter=text-lcov | coveralls"
},
"files": [
"source"
],
"keywords": [
"http",
"https",
"timer",
"timings"
],
"repository": {
"type": "git",
"url": "git+https://github.com/szmarczak/http-timer.git"
},
"author": "Szymon Marczak",
"license": "MIT",
"bugs": {
"url": "https://github.com/szmarczak/http-timer/issues"
},
"homepage": "https://github.com/szmarczak/http-timer#readme",
"xo": {
"rules": {
"unicorn/filename-case": "camelCase"
}
},
"devDependencies": {
"ava": "^0.25.0",
"coveralls": "^3.0.2",
"p-event": "^2.1.0",
"nyc": "^12.0.2",
"xo": "^0.22.0"
},
"dependencies": {
"defer-to-connect": "^1.0.1"
}
}

View file

@ -1,99 +0,0 @@
'use strict';
const deferToConnect = require('defer-to-connect');
module.exports = request => {
const timings = {
start: Date.now(),
socket: null,
lookup: null,
connect: null,
upload: null,
response: null,
end: null,
error: null,
phases: {
wait: null,
dns: null,
tcp: null,
request: null,
firstByte: null,
download: null,
total: null
}
};
const handleError = origin => {
const emit = origin.emit.bind(origin);
origin.emit = (event, ...args) => {
// Catches the `error` event
if (event === 'error') {
timings.error = Date.now();
timings.phases.total = timings.error - timings.start;
origin.emit = emit;
}
// Saves the original behavior
return emit(event, ...args);
};
};
let uploadFinished = false;
const onUpload = () => {
timings.upload = Date.now();
timings.phases.request = timings.upload - timings.connect;
};
handleError(request);
request.once('socket', socket => {
timings.socket = Date.now();
timings.phases.wait = timings.socket - timings.start;
const lookupListener = () => {
timings.lookup = Date.now();
timings.phases.dns = timings.lookup - timings.socket;
};
socket.once('lookup', lookupListener);
deferToConnect(socket, () => {
timings.connect = Date.now();
if (timings.lookup === null) {
socket.removeListener('lookup', lookupListener);
timings.lookup = timings.connect;
timings.phases.dns = timings.lookup - timings.socket;
}
timings.phases.tcp = timings.connect - timings.lookup;
if (uploadFinished && !timings.upload) {
onUpload();
}
});
});
request.once('finish', () => {
uploadFinished = true;
if (timings.connect) {
onUpload();
}
});
request.once('response', response => {
timings.response = Date.now();
timings.phases.firstByte = timings.response - timings.upload;
handleError(response);
response.once('end', () => {
timings.end = Date.now();
timings.phases.download = timings.end - timings.response;
timings.phases.total = timings.end - timings.start;
});
});
return timings;
};

View file

@ -1,21 +0,0 @@
MIT License
Copyright (c) Microsoft Corporation. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE

View file

@ -1,16 +0,0 @@
# Installation
> `npm install --save @types/normalize-package-data`
# Summary
This package contains type definitions for normalize-package-data (https://github.com/npm/normalize-package-data#readme).
# Details
Files were exported from https://www.github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/normalize-package-data
Additional Details
* Last updated: Sun, 07 Jan 2018 07:34:38 GMT
* Dependencies: none
* Global values: none
# Credits
These definitions were written by Jeff Dickey <https://github.com/jdxcode>.

View file

@ -1,46 +0,0 @@
// Type definitions for normalize-package-data 2.4
// Project: https://github.com/npm/normalize-package-data#readme
// Definitions by: Jeff Dickey <https://github.com/jdxcode>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
export = normalize;
declare function normalize(data: normalize.Input, warn?: normalize.WarnFn, strict?: boolean): void;
declare function normalize(data: normalize.Input, strict?: boolean): void;
declare namespace normalize {
type WarnFn = (msg: string) => void;
interface Input {[k: string]: any; }
interface Person {
name?: string;
email?: string;
url?: string;
}
interface Package {
[k: string]: any;
name: string;
version: string;
files?: string[];
bin?: {[k: string]: string };
man?: string[];
keywords?: string[];
author?: Person;
maintainers?: Person[];
contributors?: Person[];
bundleDependencies?: {[name: string]: string; };
dependencies?: {[name: string]: string; };
devDependencies?: {[name: string]: string; };
optionalDependencies?: {[name: string]: string; };
description?: string;
engines?: {[type: string]: string };
license?: string;
repository?: { type: string, url: string };
bugs?: { url: string, email?: string } | { url?: string, email: string };
homepage?: string;
scripts?: {[k: string]: string};
readme: string;
_id: string;
}
}

View file

@ -1,22 +0,0 @@
{
"name": "@types/normalize-package-data",
"version": "2.4.0",
"description": "TypeScript definitions for normalize-package-data",
"license": "MIT",
"contributors": [
{
"name": "Jeff Dickey",
"url": "https://github.com/jdxcode",
"githubUsername": "jdxcode"
}
],
"main": "",
"repository": {
"type": "git",
"url": "https://www.github.com/DefinitelyTyped/DefinitelyTyped.git"
},
"scripts": {},
"dependencies": {},
"typesPublisherContentHash": "5d2101e9e55c73e1d649a6c311e0d40bdfaa25bb06bb75ea6f3bb0d149c1303b",
"typeScriptVersion": "2.0"
}

58
node_modules/ansi-align/CHANGELOG.md generated vendored
View file

@ -1,58 +0,0 @@
# Changelog
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
### [3.0.1](https://github.com/nexdrew/ansi-align/compare/v3.0.0...v3.0.1) (2021-09-27)
### Bug Fixes
* **package:** update string-width to version 4.1.0 ([#52](https://github.com/nexdrew/ansi-align/issues/52)) ([ab5b733](https://github.com/nexdrew/ansi-align/commit/ab5b733b1c30eef87b75e15459f2216db28d7ed3))
<a name="3.0.0"></a>
# [3.0.0](https://github.com/nexdrew/ansi-align/compare/v2.0.0...v3.0.0) (2018-12-17)
### Bug Fixes
* **package:** update string-width to version 3.0.0 ([#50](https://github.com/nexdrew/ansi-align/issues/50)) ([67f0d8f](https://github.com/nexdrew/ansi-align/commit/67f0d8f))
### BREAKING CHANGES
* **package:** Node 4 no longer supported, please update to Node 6+ or use ansi-align@2.0.0
<a name="2.0.0"></a>
# [2.0.0](https://github.com/nexdrew/ansi-align/compare/v1.1.0...v2.0.0) (2017-05-01)
### Features
* ES2015ify, dropping support for Node <4 ([#30](https://github.com/nexdrew/ansi-align/issues/30)) ([7b43f48](https://github.com/nexdrew/ansi-align/commit/7b43f48))
### BREAKING CHANGES
* Node 0.10 or 0.12 no longer supported, please update to Node 4+ or use ansi-align@1.1.0
<a name="1.1.0"></a>
# [1.1.0](https://github.com/nexdrew/ansi-align/compare/v1.0.0...v1.1.0) (2016-06-06)
### Features
* support left-alignment as no-op ([#3](https://github.com/nexdrew/ansi-align/issues/3)) ([e581db6](https://github.com/nexdrew/ansi-align/commit/e581db6))
<a name="1.0.0"></a>
# 1.0.0 (2016-04-30)
### Features
* initial commit ([1914d90](https://github.com/nexdrew/ansi-align/commit/1914d90))

13
node_modules/ansi-align/LICENSE generated vendored
View file

@ -1,13 +0,0 @@
Copyright (c) 2016, Contributors
Permission to use, copy, modify, and/or distribute this software for any purpose
with or without fee is hereby granted, provided that the above copyright notice
and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.

80
node_modules/ansi-align/README.md generated vendored
View file

@ -1,80 +0,0 @@
# ansi-align
> align-text with ANSI support for CLIs
[![Build Status](https://travis-ci.org/nexdrew/ansi-align.svg?branch=master)](https://travis-ci.org/nexdrew/ansi-align)
[![Coverage Status](https://coveralls.io/repos/github/nexdrew/ansi-align/badge.svg?branch=master)](https://coveralls.io/github/nexdrew/ansi-align?branch=master)
[![Standard Version](https://img.shields.io/badge/release-standard%20version-brightgreen.svg)](https://github.com/conventional-changelog/standard-version)
[![Greenkeeper badge](https://badges.greenkeeper.io/nexdrew/ansi-align.svg)](https://greenkeeper.io/)
Easily center- or right- align a block of text, carefully ignoring ANSI escape codes.
E.g. turn this:
<img width="281" alt="ansi text block no alignment :(" src="https://cloud.githubusercontent.com/assets/1929625/14937509/7c3076dc-0ed7-11e6-8c16-4f6a4ccc8346.png">
Into this:
<img width="278" alt="ansi text block center aligned!" src="https://cloud.githubusercontent.com/assets/1929625/14937510/7c3ca0b0-0ed7-11e6-8f0a-541ca39b6e0a.png">
## Install
```sh
npm install --save ansi-align
```
```js
var ansiAlign = require('ansi-align')
```
## API
### `ansiAlign(text, [opts])`
Align the given text per the line with the greatest [`string-width`](https://github.com/sindresorhus/string-width), returning a new string (or array).
#### Arguments
- `text`: required, string or array
The text to align. If a string is given, it will be split using either the `opts.split` value or `'\n'` by default. If an array is given, a different array of modified strings will be returned.
- `opts`: optional, object
Options to change behavior, see below.
#### Options
- `opts.align`: string, default `'center'`
The alignment mode. Use `'center'` for center-alignment, `'right'` for right-alignment, or `'left'` for left-alignment. Note that the given `text` is assumed to be left-aligned already, so specifying `align: 'left'` just returns the `text` as is (no-op).
- `opts.split`: string or RegExp, default `'\n'`
The separator to use when splitting the text. Only used if text is given as a string.
- `opts.pad`: string, default `' '`
The value used to left-pad (prepend to) lines of lesser width. Will be repeated as necessary to adjust alignment to the line with the greatest width.
### `ansiAlign.center(text)`
Alias for `ansiAlign(text, { align: 'center' })`.
### `ansiAlign.right(text)`
Alias for `ansiAlign(text, { align: 'right' })`.
### `ansiAlign.left(text)`
Alias for `ansiAlign(text, { align: 'left' })`, which is a no-op.
## Similar Packages
- [`center-align`](https://github.com/jonschlinkert/center-align): Very close to this package, except it doesn't support ANSI codes.
- [`left-pad`](https://github.com/camwest/left-pad): Great for left-padding but does not support center alignment or ANSI codes.
- Pretty much anything by the [chalk](https://github.com/chalk) team
## License
ISC © Contributors

61
node_modules/ansi-align/index.js generated vendored
View file

@ -1,61 +0,0 @@
'use strict'
const stringWidth = require('string-width')
function ansiAlign (text, opts) {
if (!text) return text
opts = opts || {}
const align = opts.align || 'center'
// short-circuit `align: 'left'` as no-op
if (align === 'left') return text
const split = opts.split || '\n'
const pad = opts.pad || ' '
const widthDiffFn = align !== 'right' ? halfDiff : fullDiff
let returnString = false
if (!Array.isArray(text)) {
returnString = true
text = String(text).split(split)
}
let width
let maxWidth = 0
text = text.map(function (str) {
str = String(str)
width = stringWidth(str)
maxWidth = Math.max(width, maxWidth)
return {
str,
width
}
}).map(function (obj) {
return new Array(widthDiffFn(maxWidth, obj.width) + 1).join(pad) + obj.str
})
return returnString ? text.join(split) : text
}
ansiAlign.left = function left (text) {
return ansiAlign(text, { align: 'left' })
}
ansiAlign.center = function center (text) {
return ansiAlign(text, { align: 'center' })
}
ansiAlign.right = function right (text) {
return ansiAlign(text, { align: 'right' })
}
module.exports = ansiAlign
function halfDiff (maxWidth, curWidth) {
return Math.floor((maxWidth - curWidth) / 2)
}
function fullDiff (maxWidth, curWidth) {
return maxWidth - curWidth
}

43
node_modules/ansi-align/package.json generated vendored
View file

@ -1,43 +0,0 @@
{
"name": "ansi-align",
"version": "3.0.1",
"description": "align-text with ANSI support for CLIs",
"main": "index.js",
"scripts": {
"pretest": "standard",
"test": "nyc ava",
"coverage": "nyc report --reporter=text-lcov | coveralls",
"release": "standard-version"
},
"files": [
"index.js"
],
"repository": {
"type": "git",
"url": "git+https://github.com/nexdrew/ansi-align.git"
},
"keywords": [
"ansi",
"align",
"cli",
"center",
"pad"
],
"author": "nexdrew",
"license": "ISC",
"bugs": {
"url": "https://github.com/nexdrew/ansi-align/issues"
},
"homepage": "https://github.com/nexdrew/ansi-align#readme",
"dependencies": {
"string-width": "^4.1.0"
},
"devDependencies": {
"ava": "^2.0.0",
"chalk": "^2.4.2",
"coveralls": "^3.0.3",
"nyc": "^14.0.0",
"standard": "^14.0.0",
"standard-version": "^7.0.0"
}
}

11
node_modules/ava/cli.js generated vendored
View file

@ -1,11 +0,0 @@
#!/usr/bin/env node
'use strict';
const debug = require('debug')('ava');
const importLocal = require('import-local');
// Prefer the local installation of AVA
if (importLocal(__filename)) {
debug('Using local install of AVA');
} else {
require('./lib/cli').run();
}

4
node_modules/ava/entrypoints/cli.mjs generated vendored Executable file
View file

@ -0,0 +1,4 @@
#!/usr/bin/env node
import run from '../lib/cli.js';
run();

109
node_modules/ava/entrypoints/eslint-plugin-helper.cjs generated vendored Normal file
View file

@ -0,0 +1,109 @@
'use strict';
const path = require('path');
const url = require('url');
const v8 = require('v8');
const {Worker} = require('worker_threads');
const {
classify,
hasExtension,
isHelperish,
matches,
normalizeFileForMatching,
normalizePatterns,
} = require('../lib/glob-helpers.cjs');
const MAX_DATA_LENGTH_EXCLUSIVE = 100 * 1024; // Allocate 100 KiB to exchange globs.
let data;
let sync;
let worker;
const resolveGlobsSync = (projectDir, overrideExtensions, overrideFiles) => {
if (worker === undefined) {
const dataBuffer = new SharedArrayBuffer(MAX_DATA_LENGTH_EXCLUSIVE);
data = new Uint8Array(dataBuffer);
const syncBuffer = new SharedArrayBuffer(4);
sync = new Int32Array(syncBuffer);
const filename = path.join(__dirname, '../lib/eslint-plugin-helper-worker.js');
worker = new Worker(url.pathToFileURL(filename), {
workerData: {
dataBuffer,
syncBuffer,
firstMessage: {projectDir, overrideExtensions, overrideFiles},
},
});
worker.unref();
} else {
worker.postMessage({projectDir, overrideExtensions, overrideFiles});
}
const synchronize = Atomics.wait(sync, 0, 0, 10_000);
if (synchronize === 'timed-out') {
throw new Error('Timed out resolving AVA configuration');
}
const byteLength = Atomics.exchange(sync, 0, 0);
if (byteLength === MAX_DATA_LENGTH_EXCLUSIVE) {
throw new Error('Globs are over 100 KiB and cannot be resolved');
}
const globsOrError = v8.deserialize(data.slice(0, byteLength));
if (globsOrError instanceof Error) {
throw globsOrError;
}
return globsOrError;
};
const helperCache = new Map();
function load(projectDir, overrides) {
const cacheKey = `${JSON.stringify(overrides)}\n${projectDir}`;
if (helperCache.has(cacheKey)) {
return helperCache.get(cacheKey);
}
let helperPatterns = [];
if (overrides && overrides.helpers !== undefined) {
if (!Array.isArray(overrides.helpers) || overrides.helpers.length === 0) {
throw new Error('The helpers override must be an array containing glob patterns.');
}
helperPatterns = normalizePatterns(overrides.helpers);
}
const globs = resolveGlobsSync(projectDir, overrides && overrides.extensions, overrides && overrides.files);
const classifyForESLint = file => {
const {isTest} = classify(file, globs);
let isHelper = false;
if (!isTest && hasExtension(globs.extensions, file)) {
file = normalizeFileForMatching(projectDir, file);
isHelper = isHelperish(file) || (helperPatterns.length > 0 && matches(file, helperPatterns));
}
return {isHelper, isTest};
};
const helper = Object.freeze({
classifyFile: classifyForESLint,
classifyImport: importPath => {
if (hasExtension(globs.extensions, importPath)) {
// The importPath has one of the test file extensions: we can classify
// it directly.
return classifyForESLint(importPath);
}
// Add the first extension. If multiple extensions are available, assume
// patterns are not biased to any particular extension.
return classifyForESLint(`${importPath}.${globs.extensions[0]}`);
},
});
helperCache.set(cacheKey, helper);
return helper;
}
exports.load = load;

2
node_modules/ava/entrypoints/main.cjs generated vendored Normal file
View file

@ -0,0 +1,2 @@
'use strict';
module.exports = require('../lib/worker/main.cjs');

1
node_modules/ava/entrypoints/main.mjs generated vendored Normal file
View file

@ -0,0 +1 @@
export {default} from '../lib/worker/main.cjs';

2
node_modules/ava/entrypoints/plugin.cjs generated vendored Normal file
View file

@ -0,0 +1,2 @@
'use strict';
module.exports = require('../lib/worker/plugin.cjs');

4
node_modules/ava/entrypoints/plugin.mjs generated vendored Normal file
View file

@ -0,0 +1,4 @@
import * as plugin from '../lib/worker/plugin.cjs';
const {registerSharedWorker} = plugin;
export {registerSharedWorker};

View file

@ -1,201 +0,0 @@
'use strict';
let isMainThread = true;
let supportsWorkers = false;
try {
({isMainThread} = require('worker_threads'));
supportsWorkers = true;
} catch {}
const {classify, hasExtension, isHelperish, matches, normalizeFileForMatching, normalizeGlobs, normalizePatterns} = require('./lib/globs');
let resolveGlobs;
let resolveGlobsSync;
if (!supportsWorkers || !isMainThread) {
const normalizeExtensions = require('./lib/extensions');
const {loadConfig, loadConfigSync} = require('./lib/load-config');
const providerManager = require('./lib/provider-manager');
const configCache = new Map();
const collectProviders = ({conf, projectDir}) => {
const providers = [];
if (Reflect.has(conf, 'babel')) {
const {level, main} = providerManager.babel(projectDir);
providers.push({
level,
main: main({config: conf.babel}),
type: 'babel'
});
}
if (Reflect.has(conf, 'typescript')) {
const {level, main} = providerManager.typescript(projectDir);
providers.push({
level,
main: main({config: conf.typescript}),
type: 'typescript'
});
}
return providers;
};
const buildGlobs = ({conf, providers, projectDir, overrideExtensions, overrideFiles}) => {
const extensions = overrideExtensions ?
normalizeExtensions(overrideExtensions) :
normalizeExtensions(conf.extensions, providers);
return {
cwd: projectDir,
...normalizeGlobs({
extensions,
files: overrideFiles ? overrideFiles : conf.files,
providers
})
};
};
resolveGlobsSync = (projectDir, overrideExtensions, overrideFiles) => {
if (!configCache.has(projectDir)) {
const conf = loadConfigSync({resolveFrom: projectDir});
const providers = collectProviders({conf, projectDir});
configCache.set(projectDir, {conf, providers});
}
const {conf, providers} = configCache.get(projectDir);
return buildGlobs({conf, providers, projectDir, overrideExtensions, overrideFiles});
};
resolveGlobs = async (projectDir, overrideExtensions, overrideFiles) => {
if (!configCache.has(projectDir)) {
configCache.set(projectDir, loadConfig({resolveFrom: projectDir}).then(conf => { // eslint-disable-line promise/prefer-await-to-then
const providers = collectProviders({conf, projectDir});
return {conf, providers};
}));
}
const {conf, providers} = await configCache.get(projectDir);
return buildGlobs({conf, providers, projectDir, overrideExtensions, overrideFiles});
};
}
if (supportsWorkers) {
const v8 = require('v8');
const MAX_DATA_LENGTH_EXCLUSIVE = 100 * 1024; // Allocate 100 KiB to exchange globs.
if (isMainThread) {
const {Worker} = require('worker_threads');
let data;
let sync;
let worker;
resolveGlobsSync = (projectDir, overrideExtensions, overrideFiles) => {
if (worker === undefined) {
const dataBuffer = new SharedArrayBuffer(MAX_DATA_LENGTH_EXCLUSIVE);
data = new Uint8Array(dataBuffer);
const syncBuffer = new SharedArrayBuffer(4);
sync = new Int32Array(syncBuffer);
worker = new Worker(__filename, {
workerData: {
dataBuffer,
syncBuffer,
firstMessage: {projectDir, overrideExtensions, overrideFiles}
}
});
worker.unref();
} else {
worker.postMessage({projectDir, overrideExtensions, overrideFiles});
}
Atomics.wait(sync, 0, 0);
const byteLength = Atomics.exchange(sync, 0, 0);
if (byteLength === MAX_DATA_LENGTH_EXCLUSIVE) {
throw new Error('Globs are over 100 KiB and cannot be resolved');
}
const globsOrError = v8.deserialize(data.slice(0, byteLength));
if (globsOrError instanceof Error) {
throw globsOrError;
}
return globsOrError;
};
} else {
const {parentPort, workerData} = require('worker_threads');
const data = new Uint8Array(workerData.dataBuffer);
const sync = new Int32Array(workerData.syncBuffer);
const handleMessage = async ({projectDir, overrideExtensions, overrideFiles}) => {
let encoded;
try {
const globs = await resolveGlobs(projectDir, overrideExtensions, overrideFiles);
encoded = v8.serialize(globs);
} catch (error) {
encoded = v8.serialize(error);
}
const byteLength = encoded.length < MAX_DATA_LENGTH_EXCLUSIVE ? encoded.copy(data) : MAX_DATA_LENGTH_EXCLUSIVE;
Atomics.store(sync, 0, byteLength);
Atomics.notify(sync, 0);
};
parentPort.on('message', handleMessage);
handleMessage(workerData.firstMessage);
delete workerData.firstMessage;
}
}
const helperCache = new Map();
function load(projectDir, overrides) {
const cacheKey = `${JSON.stringify(overrides)}\n${projectDir}`;
if (helperCache.has(cacheKey)) {
return helperCache.get(cacheKey);
}
let helperPatterns = [];
if (overrides && overrides.helpers !== undefined) {
if (!Array.isArray(overrides.helpers) || overrides.helpers.length === 0) {
throw new Error('The helpers override must be an array containing glob patterns.');
}
helperPatterns = normalizePatterns(overrides.helpers);
}
const globs = resolveGlobsSync(projectDir, overrides && overrides.extensions, overrides && overrides.files);
const classifyForESLint = file => {
const {isTest} = classify(file, globs);
let isHelper = false;
if (!isTest && hasExtension(globs.extensions, file)) {
file = normalizeFileForMatching(projectDir, file);
isHelper = isHelperish(file) || (helperPatterns.length > 0 && matches(file, helperPatterns));
}
return {isHelper, isTest};
};
const helper = Object.freeze({
classifyFile: classifyForESLint,
classifyImport: importPath => {
if (hasExtension(globs.extensions, importPath)) {
// The importPath has one of the test file extensions: we can classify
// it directly.
return classifyForESLint(importPath);
}
// Add the first extension. If multiple extensions are available, assume
// patterns are not biased to any particular extension.
return classifyForESLint(`${importPath}.${globs.extensions[0]}`);
}
});
helperCache.set(cacheKey, helper);
return helper;
}
exports.load = load;

822
node_modules/ava/index.d.ts generated vendored
View file

@ -1,822 +1,12 @@
export interface Subscribable {
subscribe(observer: {
error(err: any): void;
complete(): void;
}): void;
}
import type {TestFn} from './types/test-fn';
export type Constructor = (new (...args: any[]) => any);
/** Specify one or more expectations the thrown error must satisfy. */
export type ThrowsExpectation = {
/** The thrown error must have a code that equals the given string or number. */
code?: string | number;
/** The thrown error must be an instance of this constructor. */
instanceOf?: Constructor;
/** The thrown error must be strictly equal to this value. */
is?: Error;
/** The thrown error must have a message that equals the given string, or matches the regular expression. */
message?: string | RegExp;
/** The thrown error must have a name that equals the given string. */
name?: string;
};
export type CommitDiscardOptions = {
/**
* Whether the logs should be included in those of the parent test.
*/
retainLogs?: boolean;
};
/** Options that can be passed to the `t.snapshot()` assertion. */
export type SnapshotOptions = {
/** If provided and not an empty string, used to select the snapshot to compare the `expected` value against. */
id?: string;
};
export interface Assertions {
/** Assert that `actual` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy). Comes with power-assert. */
assert: AssertAssertion;
/** Assert that `actual` is [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to `expected`. */
deepEqual: DeepEqualAssertion;
/** Assert that `actual` is like `expected`. */
like: LikeAssertion;
/** Fail the test. */
fail: FailAssertion;
/** Assert that `actual` is strictly false. */
false: FalseAssertion;
/** Assert that `actual` is [falsy](https://developer.mozilla.org/en-US/docs/Glossary/Falsy). */
falsy: FalsyAssertion;
/**
* Assert that `actual` is [the same
* value](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is) as `expected`.
*/
is: IsAssertion;
/**
* Assert that `actual` is not [the same
* value](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is) as `expected`.
*/
not: NotAssertion;
/** Assert that `actual` is not [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to `expected`. */
notDeepEqual: NotDeepEqualAssertion;
/** Assert that `string` does not match the regular expression. */
notRegex: NotRegexAssertion;
/** Assert that the function does not throw. */
notThrows: NotThrowsAssertion;
/** Assert that the async function does not throw, or that the promise does not reject. Must be awaited. */
notThrowsAsync: NotThrowsAsyncAssertion;
/** Count a passing assertion. */
pass: PassAssertion;
/** Assert that `string` matches the regular expression. */
regex: RegexAssertion;
/**
* Assert that `expected` is [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to a
* previously recorded [snapshot](https://github.com/concordancejs/concordance#serialization-details), or if
* necessary record a new snapshot.
*/
snapshot: SnapshotAssertion;
/**
* Assert that the function throws [an error](https://www.npmjs.com/package/is-error). If so, returns the error value.
*/
throws: ThrowsAssertion;
/**
* Assert that the async function throws [an error](https://www.npmjs.com/package/is-error), or the promise rejects
* with one. If so, returns a promise for the error value, which must be awaited.
*/
throwsAsync: ThrowsAsyncAssertion;
/** Assert that `actual` is strictly true. */
true: TrueAssertion;
/** Assert that `actual` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy). */
truthy: TruthyAssertion;
}
export interface AssertAssertion {
/** Assert that `actual` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy). Comes with power-assert. */
(actual: any, message?: string): void;
/** Skip this assertion. */
skip(actual: any, message?: string): void;
}
export interface DeepEqualAssertion {
/** Assert that `actual` is [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to `expected`. */
<ValueType = any>(actual: ValueType, expected: ValueType, message?: string): void;
/** Skip this assertion. */
skip(actual: any, expected: any, message?: string): void;
}
export interface LikeAssertion {
/** Assert that `value` is like `selector`. */
(value: any, selector: Record<string, any>, message?: string): void;
/** Skip this assertion. */
skip(value: any, selector: any, message?: string): void;
}
export interface FailAssertion {
/** Fail the test. */
(message?: string): void;
/** Skip this assertion. */
skip(message?: string): void;
}
export interface FalseAssertion {
/** Assert that `actual` is strictly false. */
(actual: any, message?: string): void;
/** Skip this assertion. */
skip(actual: any, message?: string): void;
}
export interface FalsyAssertion {
/** Assert that `actual` is [falsy](https://developer.mozilla.org/en-US/docs/Glossary/Falsy). */
(actual: any, message?: string): void;
/** Skip this assertion. */
skip(actual: any, message?: string): void;
}
export interface IsAssertion {
/**
* Assert that `actual` is [the same
* value](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is) as `expected`.
*/
<ValueType = any>(actual: ValueType, expected: ValueType, message?: string): void;
/** Skip this assertion. */
skip(actual: any, expected: any, message?: string): void;
}
export interface NotAssertion {
/**
* Assert that `actual` is not [the same
* value](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is) as `expected`.
*/
<ValueType = any>(actual: ValueType, expected: ValueType, message?: string): void;
/** Skip this assertion. */
skip(actual: any, expected: any, message?: string): void;
}
export interface NotDeepEqualAssertion {
/** Assert that `actual` is not [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to `expected`. */
<ValueType = any>(actual: ValueType, expected: ValueType, message?: string): void;
/** Skip this assertion. */
skip(actual: any, expected: any, message?: string): void;
}
export interface NotRegexAssertion {
/** Assert that `string` does not match the regular expression. */
(string: string, regex: RegExp, message?: string): void;
/** Skip this assertion. */
skip(string: string, regex: RegExp, message?: string): void;
}
export interface NotThrowsAssertion {
/** Assert that the function does not throw. */
(fn: () => any, message?: string): void;
/** Skip this assertion. */
skip(fn: () => any, message?: string): void;
}
export interface NotThrowsAsyncAssertion {
/** Assert that the async function does not throw. You must await the result. */
(fn: () => PromiseLike<any>, message?: string): Promise<void>;
/** Assert that the promise does not reject. You must await the result. */
(promise: PromiseLike<any>, message?: string): Promise<void>;
/** Skip this assertion. */
skip(nonThrower: any, message?: string): void;
}
export interface PassAssertion {
/** Count a passing assertion. */
(message?: string): void;
/** Skip this assertion. */
skip(message?: string): void;
}
export interface RegexAssertion {
/** Assert that `string` matches the regular expression. */
(string: string, regex: RegExp, message?: string): void;
/** Skip this assertion. */
skip(string: string, regex: RegExp, message?: string): void;
}
export interface SnapshotAssertion {
/**
* Assert that `expected` is [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to a
* previously recorded [snapshot](https://github.com/concordancejs/concordance#serialization-details), or if
* necessary record a new snapshot.
*/
(expected: any, message?: string): void;
/**
* Assert that `expected` is [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to a
* previously recorded [snapshot](https://github.com/concordancejs/concordance#serialization-details) (selected
* through `options.id` if provided), or if necessary record a new snapshot.
*/
(expected: any, options: SnapshotOptions, message?: string): void;
/** Skip this assertion. */
skip(expected: any, message?: string): void;
/** Skip this assertion. */
skip(expected: any, options: SnapshotOptions, message?: string): void;
}
export interface ThrowsAssertion {
/**
* Assert that the function throws [an error](https://www.npmjs.com/package/is-error). If so, returns the error value.
* The error must satisfy all expectations.
*/
<ThrownError extends Error>(fn: () => any, expectations?: ThrowsExpectation | null, message?: string): ThrownError;
/** Skip this assertion. */
skip(fn: () => any, expectations?: any, message?: string): void;
}
export interface ThrowsAsyncAssertion {
/**
* Assert that the async function throws [an error](https://www.npmjs.com/package/is-error). If so, returns the error
* value. You must await the result.
*/
<ThrownError extends Error>(fn: () => PromiseLike<any>, expectations?: null, message?: string): Promise<ThrownError>;
/**
* Assert that the async function throws [an error](https://www.npmjs.com/package/is-error). If so, returns the error
* value. You must await the result. The error must satisfy all expectations.
*/
<ThrownError extends Error>(fn: () => PromiseLike<any>, expectations: ThrowsExpectation, message?: string): Promise<ThrownError>;
/**
* Assert that the promise rejects with [an error](https://www.npmjs.com/package/is-error). If so, returns the
* rejection reason. You must await the result.
*/
<ThrownError extends Error>(promise: PromiseLike<any>, expectations?: null, message?: string): Promise<ThrownError>;
/**
* Assert that the promise rejects with [an error](https://www.npmjs.com/package/is-error). If so, returns the
* rejection reason. You must await the result. The error must satisfy all expectations.
*/
<ThrownError extends Error>(promise: PromiseLike<any>, expectations: ThrowsExpectation, message?: string): Promise<ThrownError>;
/** Skip this assertion. */
skip(thrower: any, expectations?: any, message?: string): void;
}
export interface TrueAssertion {
/** Assert that `actual` is strictly true. */
(actual: any, message?: string): void;
/** Skip this assertion. */
skip(actual: any, message?: string): void;
}
export interface TruthyAssertion {
/** Assert that `actual` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy). */
(actual: any, message?: string): void;
/** Skip this assertion. */
skip(actual: any, message?: string): void;
}
/** The `t` value passed to test & hook implementations. */
export interface ExecutionContext<Context = unknown> extends Assertions {
/** Test context, shared with hooks. */
context: Context;
/** Title of the test or hook. */
readonly title: string;
/** Whether the test has passed. Only accurate in afterEach hooks. */
readonly passed: boolean;
log: LogFn;
plan: PlanFn;
teardown: TeardownFn;
timeout: TimeoutFn;
try: TryFn<Context>;
}
export interface LogFn {
/** Log one or more values. */
(...values: any[]): void;
/** Skip logging. */
skip(...values: any[]): void;
}
export interface PlanFn {
/**
* Plan how many assertion there are in the test. The test will fail if the actual assertion count doesn't match the
* number of planned assertions. See [assertion planning](https://github.com/avajs/ava#assertion-planning).
*/
(count: number): void;
/** Don't plan assertions. */
skip(count: number): void;
}
export interface TimeoutFn {
/**
* Set a timeout for the test, in milliseconds. The test will fail if the timeout is exceeded.
* The timeout is reset each time an assertion is made.
*/
(ms: number, message?: string): void;
}
export interface TeardownFn {
/** Declare a function to be run after the test has ended. */
(fn: () => void): void;
}
export interface TryFn<Context = unknown> {
/**
* Attempt to run some assertions. The result must be explicitly committed or discarded or else
* the test will fail. A macro may be provided. The title may help distinguish attempts from
* one another.
*/
<Args extends any[]>(title: string, fn: EitherMacro<Args, Context>, ...args: Args): Promise<TryResult>;
/**
* Attempt to run some assertions. The result must be explicitly committed or discarded or else
* the test will fail. A macro may be provided. The title may help distinguish attempts from
* one another.
*/
<Args extends any[]>(title: string, fn: [EitherMacro<Args, Context>, ...Array<EitherMacro<Args, Context>>], ...args: Args): Promise<TryResult[]>;
/**
* Attempt to run some assertions. The result must be explicitly committed or discarded or else
* the test will fail. A macro may be provided.
*/
<Args extends any[]>(fn: EitherMacro<Args, Context>, ...args: Args): Promise<TryResult>;
/**
* Attempt to run some assertions. The result must be explicitly committed or discarded or else
* the test will fail. A macro may be provided.
*/
<Args extends any[]>(fn: [EitherMacro<Args, Context>, ...Array<EitherMacro<Args, Context>>], ...args: Args): Promise<TryResult[]>;
}
export interface AssertionError extends Error {}
export interface TryResult {
/**
* Title of the attempt, helping you tell attempts aparts.
*/
title: string;
/**
* Indicates whether all assertions passed, or at least one failed.
*/
passed: boolean;
/**
* Errors raised for each failed assertion.
*/
errors: AssertionError[];
/**
* Logs created during the attempt using `t.log()`. Contains formatted values.
*/
logs: string[];
/**
* Commit the attempt. Counts as one assertion for the plan count. If the
* attempt failed, calling this will also cause your test to fail.
*/
commit(options?: CommitDiscardOptions): void;
/**
* Discard the attempt.
*/
discard(options?: CommitDiscardOptions): void;
}
/** The `t` value passed to implementations for tests & hooks declared with the `.cb` modifier. */
export interface CbExecutionContext<Context = unknown> extends ExecutionContext<Context> {
/**
* End the test. If `error` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy) the test or hook
* will fail.
*/
end(error?: any): void;
}
export type ImplementationResult = PromiseLike<void> | Subscribable | void;
export type Implementation<Context = unknown> = (t: ExecutionContext<Context>) => ImplementationResult;
export type CbImplementation<Context = unknown> = (t: CbExecutionContext<Context>) => ImplementationResult;
/** A reusable test or hook implementation. */
export type UntitledMacro<Args extends any[], Context = unknown> = (t: ExecutionContext<Context>, ...args: Args) => ImplementationResult;
/** A reusable test or hook implementation. */
export type Macro<Args extends any[], Context = unknown> = UntitledMacro<Args, Context> & {
/**
* Implement this function to generate a test (or hook) title whenever this macro is used. `providedTitle` contains
* the title provided when the test or hook was declared. Also receives the remaining test arguments.
*/
title?: (providedTitle: string | undefined, ...args: Args) => string;
};
export type EitherMacro<Args extends any[], Context> = Macro<Args, Context> | UntitledMacro<Args, Context>;
/** Alias for a single macro, or an array of macros. */
export type OneOrMoreMacros<Args extends any[], Context> = EitherMacro<Args, Context> | [EitherMacro<Args, Context>, ...Array<EitherMacro<Args, Context>>];
/** A reusable test or hook implementation, for tests & hooks declared with the `.cb` modifier. */
export type UntitledCbMacro<Args extends any[], Context = unknown> = (t: CbExecutionContext<Context>, ...args: Args) => ImplementationResult;
/** A reusable test or hook implementation, for tests & hooks declared with the `.cb` modifier. */
export type CbMacro<Args extends any[], Context = unknown> = UntitledCbMacro<Args, Context> & {
title?: (providedTitle: string | undefined, ...args: Args) => string;
};
export type EitherCbMacro<Args extends any[], Context> = CbMacro<Args, Context> | UntitledCbMacro<Args, Context>;
/** Alias for a single macro, or an array of macros, used for tests & hooks declared with the `.cb` modifier. */
export type OneOrMoreCbMacros<Args extends any[], Context> = EitherCbMacro<Args, Context> | [EitherCbMacro<Args, Context>, ...Array<EitherCbMacro<Args, Context>>];
export interface TestInterface<Context = unknown> {
/** Declare a concurrent test. */
(title: string, implementation: Implementation<Context>): void;
/** Declare a concurrent test that uses one or more macros. Additional arguments are passed to the macro. */
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a concurrent test that uses one or more macros. The macro is responsible for generating a unique test title. */
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a hook that is run once, after all tests have passed. */
after: AfterInterface<Context>;
/** Declare a hook that is run after each passing test. */
afterEach: AfterInterface<Context>;
/** Declare a hook that is run once, before all tests. */
before: BeforeInterface<Context>;
/** Declare a hook that is run before each test. */
beforeEach: BeforeInterface<Context>;
/** Declare a test that must call `t.end()` when it's done. */
cb: CbInterface<Context>;
/** Declare a test that is expected to fail. */
failing: FailingInterface<Context>;
/** Declare tests and hooks that are run serially. */
serial: SerialInterface<Context>;
only: OnlyInterface<Context>;
skip: SkipInterface<Context>;
todo: TodoDeclaration;
meta: MetaInterface;
}
export interface AfterInterface<Context = unknown> {
/** Declare a hook that is run once, after all tests have passed. */
(implementation: Implementation<Context>): void;
/** Declare a hook that is run once, after all tests have passed. */
(title: string, implementation: Implementation<Context>): void;
/** Declare a hook that is run once, after all tests have passed. Additional arguments are passed to the macro. */
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a hook that is run once, after all tests have passed. */
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a hook that is run once, after all tests are done. */
always: AlwaysInterface<Context>;
/** Declare a hook that must call `t.end()` when it's done. */
cb: HookCbInterface<Context>;
skip: HookSkipInterface<Context>;
}
export interface AlwaysInterface<Context = unknown> {
/** Declare a hook that is run once, after all tests are done. */
(implementation: Implementation<Context>): void;
/** Declare a hook that is run once, after all tests are done. */
(title: string, implementation: Implementation<Context>): void;
/** Declare a hook that is run once, after all tests are done. Additional arguments are passed to the macro. */
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a hook that is run once, after all tests are done. */
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a hook that must call `t.end()` when it's done. */
cb: HookCbInterface<Context>;
skip: HookSkipInterface<Context>;
}
export interface BeforeInterface<Context = unknown> {
/** Declare a hook that is run once, before all tests. */
(implementation: Implementation<Context>): void;
/** Declare a hook that is run once, before all tests. */
(title: string, implementation: Implementation<Context>): void;
/** Declare a hook that is run once, before all tests. Additional arguments are passed to the macro. */
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a hook that is run once, before all tests. */
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a hook that must call `t.end()` when it's done. */
cb: HookCbInterface<Context>;
skip: HookSkipInterface<Context>;
}
export interface CbInterface<Context = unknown> {
/** Declare a test that must call `t.end()` when it's done. */
(title: string, implementation: CbImplementation<Context>): void;
/**
* Declare a concurrent test that uses one or more macros. The macros must call `t.end()` when they're done.
* Additional arguments are passed to the macro.
*/
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
/**
* Declare a concurrent test that uses one or more macros. The macros must call `t.end()` when they're done.
* The macro is responsible for generating a unique test title.
*/
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
/** Declare a test that is expected to fail. */
failing: CbFailingInterface<Context>;
only: CbOnlyInterface<Context>;
skip: CbSkipInterface<Context>;
}
export interface CbFailingInterface<Context = unknown> {
/** Declare a test that must call `t.end()` when it's done. The test is expected to fail. */
(title: string, implementation: CbImplementation<Context>): void;
/**
* Declare a test that uses one or more macros. The macros must call `t.end()` when they're done.
* Additional arguments are passed to the macro. The test is expected to fail.
*/
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
/**
* Declare a test that uses one or more macros. The macros must call `t.end()` when they're done.
* The test is expected to fail.
*/
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
only: CbOnlyInterface<Context>;
skip: CbSkipInterface<Context>;
}
export interface CbOnlyInterface<Context = unknown> {
/**
* Declare a test that must call `t.end()` when it's done. Only this test and others declared with `.only()` are run.
*/
(title: string, implementation: CbImplementation<Context>): void;
/**
* Declare a test that uses one or more macros. The macros must call `t.end()` when they're done.
* Additional arguments are passed to the macro. Only this test and others declared with `.only()` are run.
*/
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
/**
* Declare a test that uses one or more macros. The macros must call `t.end()` when they're done.
* Additional arguments are passed to the macro. Only this test and others declared with `.only()` are run.
*/
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
}
export interface CbSkipInterface<Context = unknown> {
/** Skip this test. */
(title: string, implementation: CbImplementation<Context>): void;
/** Skip this test. */
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
/** Skip this test. */
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
}
export interface FailingInterface<Context = unknown> {
/** Declare a concurrent test. The test is expected to fail. */
(title: string, implementation: Implementation<Context>): void;
/**
* Declare a concurrent test that uses one or more macros. Additional arguments are passed to the macro.
* The test is expected to fail.
*/
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/**
* Declare a concurrent test that uses one or more macros. The macro is responsible for generating a unique test title.
* The test is expected to fail.
*/
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
only: OnlyInterface<Context>;
skip: SkipInterface<Context>;
}
export interface HookCbInterface<Context = unknown> {
/** Declare a hook that must call `t.end()` when it's done. */
(implementation: CbImplementation<Context>): void;
/** Declare a hook that must call `t.end()` when it's done. */
(title: string, implementation: CbImplementation<Context>): void;
/**
* Declare a hook that uses one or more macros. The macros must call `t.end()` when they're done.
* Additional arguments are passed to the macro.
*/
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
/**
* Declare a hook that uses one or more macros. The macros must call `t.end()` when they're done.
*/
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
skip: HookCbSkipInterface<Context>;
}
export interface HookCbSkipInterface<Context = unknown> {
/** Skip this hook. */
(implementation: CbImplementation<Context>): void;
/** Skip this hook. */
(title: string, implementation: CbImplementation<Context>): void;
/** Skip this hook. */
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
/** Skip this hook. */
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
}
export interface HookSkipInterface<Context = unknown> {
/** Skip this hook. */
(implementation: Implementation<Context>): void;
/** Skip this hook. */
(title: string, implementation: Implementation<Context>): void;
/** Skip this hook. */
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Skip this hook. */
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
}
export interface OnlyInterface<Context = unknown> {
/** Declare a test. Only this test and others declared with `.only()` are run. */
(title: string, implementation: Implementation<Context>): void;
/**
* Declare a test that uses one or more macros. Additional arguments are passed to the macro.
* Only this test and others declared with `.only()` are run.
*/
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/**
* Declare a test that uses one or more macros. The macro is responsible for generating a unique test title.
* Only this test and others declared with `.only()` are run.
*/
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
}
export interface SerialInterface<Context = unknown> {
/** Declare a serial test. */
(title: string, implementation: Implementation<Context>): void;
/** Declare a serial test that uses one or more macros. Additional arguments are passed to the macro. */
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/**
* Declare a serial test that uses one or more macros. The macro is responsible for generating a unique test title.
*/
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a serial hook that is run once, after all tests have passed. */
after: AfterInterface<Context>;
/** Declare a serial hook that is run after each passing test. */
afterEach: AfterInterface<Context>;
/** Declare a serial hook that is run once, before all tests. */
before: BeforeInterface<Context>;
/** Declare a serial hook that is run before each test. */
beforeEach: BeforeInterface<Context>;
/** Declare a serial test that must call `t.end()` when it's done. */
cb: CbInterface<Context>;
/** Declare a serial test that is expected to fail. */
failing: FailingInterface<Context>;
only: OnlyInterface<Context>;
skip: SkipInterface<Context>;
todo: TodoDeclaration;
}
export interface SkipInterface<Context = unknown> {
/** Skip this test. */
(title: string, implementation: Implementation<Context>): void;
/** Skip this test. */
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Skip this test. */
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
}
export interface TodoDeclaration {
/** Declare a test that should be implemented later. */
(title: string): void;
}
export interface MetaInterface {
/** Path to the test file being executed. */
file: string;
/** Directory where snapshots are stored. */
snapshotDirectory: string;
}
export * from './types/assertions';
export * from './types/try-fn';
export * from './types/test-fn';
export * from './types/subscribable';
/** Call to declare a test, or chain to declare hooks or test modifiers */
declare const test: TestInterface;
declare const test: TestFn;
/** Call to declare a test, or chain to declare hooks or test modifiers */
export default test;
/** Call to declare a hook that is run once, after all tests have passed, or chain to declare modifiers. */
export const after: AfterInterface;
/** Call to declare a hook that is run after each passing test, or chain to declare modifiers. */
export const afterEach: AfterInterface;
/** Call to declare a hook that is run once, before all tests, or chain to declare modifiers. */
export const before: BeforeInterface;
/** Call to declare a hook that is run before each test, or chain to declare modifiers. */
export const beforeEach: BeforeInterface;
/** Call to declare a test that must invoke `t.end()` when it's done, or chain to declare modifiers. */
export const cb: CbInterface;
/** Call to declare a test that is expected to fail, or chain to declare modifiers. */
export const failing: FailingInterface;
/** Call to declare a test that is run exclusively, along with other tests declared with `.only()`. */
export const only: OnlyInterface;
/** Call to declare a serial test, or chain to declare serial hooks or test modifiers. */
export const serial: SerialInterface;
/** Skip this test. */
export const skip: SkipInterface;
/** Declare a test that should be implemented later. */
export const todo: TodoDeclaration;
/** Meta data associated with the current process. */
export const meta: MetaInterface;

8
node_modules/ava/index.js generated vendored
View file

@ -1,8 +0,0 @@
'use strict';
// Ensure the same AVA install is loaded by the test file as by the test worker
if (process.env.AVA_PATH && process.env.AVA_PATH !== __dirname) {
module.exports = require(process.env.AVA_PATH);
} else {
module.exports = require('./lib/worker/main');
}

157
node_modules/ava/lib/api.js generated vendored
View file

@ -1,23 +1,25 @@
'use strict';
const fs = require('fs');
const path = require('path');
const os = require('os');
const commonPathPrefix = require('common-path-prefix');
const resolveCwd = require('resolve-cwd');
const debounce = require('lodash/debounce');
const arrify = require('arrify');
const ms = require('ms');
const chunkd = require('chunkd');
const Emittery = require('emittery');
const pMap = require('p-map');
const tempDir = require('temp-dir');
const globs = require('./globs');
const isCi = require('./is-ci');
const RunStatus = require('./run-status');
const fork = require('./fork');
const serializeError = require('./serialize-error');
const {getApplicableLineNumbers} = require('./line-numbers');
const sharedWorkers = require('./plugin-support/shared-workers');
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import process from 'node:process';
import arrify from 'arrify';
import chunkd from 'chunkd';
import commonPathPrefix from 'common-path-prefix';
import Emittery from 'emittery';
import ms from 'ms';
import pMap from 'p-map';
import resolveCwd from 'resolve-cwd';
import tempDir from 'temp-dir';
import fork from './fork.js';
import * as globs from './globs.js';
import isCi from './is-ci.js';
import {getApplicableLineNumbers} from './line-numbers.js';
import {observeWorkerProcess} from './plugin-support/shared-workers.js';
import RunStatus from './run-status.js';
import scheduler from './scheduler.js';
import serializeError from './serialize-error.js';
function resolveModules(modules) {
return arrify(modules).map(name => {
@ -40,7 +42,40 @@ function getFilePathPrefix(files) {
return commonPathPrefix(files);
}
class Api extends Emittery {
class TimeoutTrigger {
constructor(fn, waitMs = 0) {
this.fn = fn.bind(null);
this.ignoreUntil = 0;
this.waitMs = waitMs;
this.timer = undefined;
}
debounce() {
if (this.timer === undefined) {
this.timer = setTimeout(() => this.trigger(), this.waitMs);
} else {
this.timer.refresh();
}
}
discard() {
// N.B. this.timer is not cleared so if debounce() is called after it will
// not run again.
clearTimeout(this.timer);
}
ignoreFor(periodMs) {
this.ignoreUntil = Math.max(this.ignoreUntil, Date.now() + periodMs);
}
trigger() {
if (Date.now() >= this.ignoreUntil) {
this.fn();
}
}
}
export default class Api extends Emittery {
constructor(options) {
super();
@ -55,7 +90,7 @@ class Api extends Emittery {
}
}
async run({files: selectedFiles = [], filter = [], runtimeOptions = {}} = {}) {
async run({files: selectedFiles = [], filter = [], runtimeOptions = {}} = {}) { // eslint-disable-line complexity
let setupOrGlobError;
const apiOptions = this.options;
@ -70,11 +105,11 @@ class Api extends Emittery {
let bailed = false;
const pendingWorkers = new Set();
const timedOutWorkerFiles = new Set();
let restartTimer;
let timeoutTrigger;
if (apiOptions.timeout && !apiOptions.debug) {
const timeout = ms(apiOptions.timeout);
restartTimer = debounce(() => {
timeoutTrigger = new TimeoutTrigger(() => {
// If failFast is active, prevent new test files from running after
// the current ones are exited.
if (failFast) {
@ -89,7 +124,7 @@ class Api extends Emittery {
}
}, timeout);
} else {
restartTimer = Object.assign(() => {}, {cancel() {}});
timeoutTrigger = new TimeoutTrigger(() => {});
}
this._interruptHandler = () => {
@ -102,7 +137,7 @@ class Api extends Emittery {
bailed = true;
// Make sure we don't run the timeout handler
restartTimer.cancel();
timeoutTrigger.discard();
runStatus.emitStateChange({type: 'interrupt'});
@ -111,6 +146,8 @@ class Api extends Emittery {
}
};
const {providers = []} = this.options;
let testFiles;
try {
testFiles = await globs.findTests({cwd: this.options.projectDir, ...apiOptions.globs});
@ -118,7 +155,8 @@ class Api extends Emittery {
selectedFiles = filter.length === 0 ? testFiles : globs.applyTestFileFilter({
cwd: this.options.projectDir,
filter: filter.map(({pattern}) => pattern),
testFiles
providers,
testFiles,
});
}
} catch (error) {
@ -126,6 +164,13 @@ class Api extends Emittery {
setupOrGlobError = error;
}
const selectionInsights = {
filter,
ignoredFilterPatternFiles: selectedFiles.ignoredFilterPatternFiles || [],
testFileCount: testFiles.length,
selectionCount: selectedFiles.length,
};
try {
if (this.options.parallelRuns) {
const {currentIndex, totalRuns} = this.options.parallelRuns;
@ -137,11 +182,13 @@ class Api extends Emittery {
const currentFileCount = selectedFiles.length;
runStatus = new RunStatus(fileCount, {currentFileCount, currentIndex, totalRuns});
runStatus = new RunStatus(fileCount, {currentFileCount, currentIndex, totalRuns}, selectionInsights);
} else {
runStatus = new RunStatus(selectedFiles.length, null);
runStatus = new RunStatus(selectedFiles.length, null, selectionInsights);
}
selectedFiles = scheduler.failingTestsFirst(selectedFiles, this._getLocalCacheDir(), this.options.cacheEnabled);
const debugWithoutSpecificFile = Boolean(this.options.debug) && !this.options.debug.active && selectedFiles.length !== 1;
await this.emit('run', {
@ -155,7 +202,7 @@ class Api extends Emittery {
previousFailures: runtimeOptions.previousFailures || 0,
runOnlyExclusive: runtimeOptions.runOnlyExclusive === true,
runVector: runtimeOptions.runVector || 0,
status: runStatus
status: runStatus,
});
if (setupOrGlobError) {
@ -169,9 +216,9 @@ class Api extends Emittery {
runStatus.on('stateChange', record => {
if (record.testFile && !timedOutWorkerFiles.has(record.testFile)) {
// Restart the timer whenever there is activity from workers that
// Debounce the timer whenever there is activity from workers that
// haven't already timed out.
restartTimer();
timeoutTrigger.debounce();
}
if (failFast && (record.type === 'hook-failed' || record.type === 'test-failed' || record.type === 'worker-failed')) {
@ -185,14 +232,16 @@ class Api extends Emittery {
}
});
const {providers = []} = this.options;
const providerStates = (await Promise.all(providers.map(async ({type, main}) => {
const providerStates = [];
await Promise.all(providers.map(async ({type, main}) => {
const state = await main.compile({cacheDir: this._createCacheDir(), files: testFiles});
return state === null ? null : {type, state};
}))).filter(state => state !== null);
if (state !== null) {
providerStates.push({type, state});
}
}));
// Resolve the correct concurrency value.
let concurrency = Math.min(os.cpus().length, isCi ? 2 : Infinity);
let concurrency = Math.min(os.cpus().length, isCi ? 2 : Number.POSITIVE_INFINITY);
if (apiOptions.concurrency > 0) {
concurrency = apiOptions.concurrency;
}
@ -212,13 +261,15 @@ class Api extends Emittery {
}
const lineNumbers = getApplicableLineNumbers(globs.normalizeFileForMatching(apiOptions.projectDir, file), filter);
// Removing `providers` field because they cannot be transfered to the worker threads.
const {providers, ...forkOptions} = apiOptions;
const options = {
...apiOptions,
...forkOptions,
providerStates,
lineNumbers,
recordNewSnapshots: !isCi,
// If we're looking for matches, run every single test process in exclusive-only mode
runOnlyExclusive: apiOptions.match.length > 0 || runtimeOptions.runOnlyExclusive === true
runOnlyExclusive: apiOptions.match.length > 0 || runtimeOptions.runOnlyExclusive === true,
};
if (runtimeOptions.updateSnapshots) {
@ -227,42 +278,52 @@ class Api extends Emittery {
}
const worker = fork(file, options, apiOptions.nodeArguments);
worker.onStateChange(data => {
if (data.type === 'test-timeout-configured' && !apiOptions.debug) {
timeoutTrigger.ignoreFor(data.period);
}
});
runStatus.observeWorker(worker, file, {selectingLines: lineNumbers.length > 0});
deregisteredSharedWorkers.push(sharedWorkers.observeWorkerProcess(worker, runStatus));
deregisteredSharedWorkers.push(observeWorkerProcess(worker, runStatus));
pendingWorkers.add(worker);
worker.promise.then(() => {
pendingWorkers.delete(worker);
});
restartTimer();
timeoutTrigger.debounce();
await worker.promise;
}, {concurrency, stopOnError: false});
// Allow shared workers to clean up before the run ends.
await Promise.all(deregisteredSharedWorkers);
scheduler.storeFailedTestFiles(runStatus, this.options.cacheEnabled === false ? null : this._createCacheDir());
} catch (error) {
if (error && error.name === 'AggregateError') {
for (const err of error) {
runStatus.emitStateChange({type: 'internal-error', err: serializeError('Internal error', false, err)});
for (const error_ of error.errors) {
runStatus.emitStateChange({type: 'internal-error', err: serializeError('Internal error', false, error_)});
}
} else {
runStatus.emitStateChange({type: 'internal-error', err: serializeError('Internal error', false, error)});
}
}
restartTimer.cancel();
timeoutTrigger.discard();
return runStatus;
}
_getLocalCacheDir() {
return path.join(this.options.projectDir, 'node_modules', '.cache', 'ava');
}
_createCacheDir() {
if (this._cacheDir) {
return this._cacheDir;
}
const cacheDir = this.options.cacheEnabled === false ?
fs.mkdtempSync(`${tempDir}${path.sep}`) :
path.join(this.options.projectDir, 'node_modules', '.cache', 'ava');
const cacheDir = this.options.cacheEnabled === false
? fs.mkdtempSync(`${tempDir}${path.sep}`)
: this._getLocalCacheDir();
// Ensure cacheDir exists
fs.mkdirSync(cacheDir, {recursive: true});
@ -272,5 +333,3 @@ class Api extends Emittery {
return cacheDir;
}
}
module.exports = Api;

527
node_modules/ava/lib/assert.js generated vendored

File diff suppressed because it is too large Load diff

23
node_modules/ava/lib/chalk.js generated vendored
View file

@ -1,20 +1,15 @@
'use strict';
const chalk = require('chalk');
import {Chalk} from 'chalk'; // eslint-disable-line unicorn/import-style
let ctx = null;
exports.get = () => {
if (!ctx) {
throw new Error('Chalk has not yet been configured');
}
let chalk = new Chalk(); // eslint-disable-line import/no-mutable-exports
return ctx;
};
export {chalk};
exports.set = options => {
if (ctx) {
let configured = false;
export function set(options) {
if (configured) {
throw new Error('Chalk has already been configured');
}
ctx = new chalk.Instance(options);
return ctx;
};
configured = true;
chalk = new Chalk(options);
}

230
node_modules/ava/lib/cli.js generated vendored
View file

@ -1,89 +1,112 @@
'use strict';
const path = require('path');
const del = require('del');
const updateNotifier = require('update-notifier');
const figures = require('figures');
const arrify = require('arrify');
const yargs = require('yargs');
const readPkg = require('read-pkg');
const isCi = require('./is-ci');
const {loadConfig} = require('./load-config');
import fs from 'node:fs';
import path from 'node:path';
import process from 'node:process';
import arrify from 'arrify';
import ciParallelVars from 'ci-parallel-vars';
import del from 'del';
import figures from 'figures';
import yargs from 'yargs';
import {hideBin} from 'yargs/helpers'; // eslint-disable-line node/file-extension-in-import
import Api from './api.js';
import {chalk} from './chalk.js';
import validateEnvironmentVariables from './environment-variables.js';
import normalizeExtensions from './extensions.js';
import {normalizeGlobs, normalizePattern} from './globs.js';
import {controlFlow} from './ipc-flow-control.cjs';
import isCi from './is-ci.js';
import {splitPatternAndLineNumbers} from './line-numbers.js';
import {loadConfig} from './load-config.js';
import normalizeModuleTypes from './module-types.js';
import normalizeNodeArguments from './node-arguments.js';
import providerManager from './provider-manager.js';
import DefaultReporter from './reporters/default.js';
import TapReporter from './reporters/tap.js';
import Watcher from './watcher.js';
function exit(message) {
console.error(`\n ${require('./chalk').get().red(figures.cross)} ${message}`);
console.error(`\n ${chalk.red(figures.cross)} ${message}`);
process.exit(1); // eslint-disable-line unicorn/no-process-exit
}
const coerceLastValue = value => {
return Array.isArray(value) ? value.pop() : value;
};
const coerceLastValue = value => Array.isArray(value) ? value.pop() : value;
const FLAGS = {
concurrency: {
alias: 'c',
coerce: coerceLastValue,
description: 'Max number of test files running at the same time (default: CPU cores)',
type: 'number'
type: 'number',
},
'fail-fast': {
coerce: coerceLastValue,
description: 'Stop after first test failure',
type: 'boolean'
type: 'boolean',
},
match: {
alias: 'm',
description: 'Only run tests with matching title (can be repeated)',
type: 'string'
type: 'string',
},
'no-worker-threads': {
coerce: coerceLastValue,
description: 'Don\'t use worker threads',
type: 'boolean',
},
'node-arguments': {
coerce: coerceLastValue,
description: 'Additional Node.js arguments for launching worker processes (specify as a single string)',
type: 'string'
type: 'string',
},
serial: {
alias: 's',
coerce: coerceLastValue,
description: 'Run tests serially',
type: 'boolean'
type: 'boolean',
},
tap: {
alias: 't',
coerce: coerceLastValue,
description: 'Generate TAP output',
type: 'boolean'
type: 'boolean',
},
timeout: {
alias: 'T',
coerce: coerceLastValue,
description: 'Set global timeout (milliseconds or human-readable, e.g. 10s, 2m)',
type: 'string'
type: 'string',
},
'update-snapshots': {
alias: 'u',
coerce: coerceLastValue,
description: 'Update snapshots',
type: 'boolean'
type: 'boolean',
},
verbose: {
alias: 'v',
coerce: coerceLastValue,
description: 'Enable verbose output',
type: 'boolean'
description: 'Enable verbose output (default)',
type: 'boolean',
},
watch: {
alias: 'w',
coerce: coerceLastValue,
description: 'Re-run tests when files change',
type: 'boolean'
}
type: 'boolean',
},
};
exports.run = async () => { // eslint-disable-line complexity
let conf = {};
let confError = null;
export default async function loadCli() { // eslint-disable-line complexity
let conf;
let confError;
try {
const {argv: {config: configFile}} = yargs.help(false);
const {argv: {config: configFile}} = yargs(hideBin(process.argv)).help(false);
conf = await loadConfig({configFile});
if (conf.configFile && path.basename(conf.configFile) !== path.relative(conf.projectDir, conf.configFile)) {
console.log(chalk.magenta(` ${figures.warning} Using configuration from ${conf.configFile}`));
}
} catch (error) {
confError = error;
}
@ -91,18 +114,24 @@ exports.run = async () => { // eslint-disable-line complexity
// Enter debug mode if the main process is being inspected. This assumes the
// worker processes are automatically inspected, too. It is not necessary to
// run AVA with the debug command, though it's allowed.
const activeInspector = require('inspector').url() !== undefined; // eslint-disable-line node/no-unsupported-features/node-builtins
let debug = activeInspector ?
{
let activeInspector = false;
try {
const {default: inspector} = await import('node:inspector'); // eslint-disable-line node/no-unsupported-features/es-syntax
activeInspector = inspector.url() !== undefined;
} catch {}
let debug = activeInspector
? {
active: true,
break: false,
files: [],
host: undefined,
port: undefined
port: undefined,
} : null;
let resetCache = false;
const {argv} = yargs
const {argv} = yargs(hideBin(process.argv))
.parserConfiguration({
'boolean-negation': true,
'camel-case-expansion': false,
@ -116,7 +145,7 @@ exports.run = async () => { // eslint-disable-line complexity
'set-placeholder-key': false,
'short-option-groups': true,
'strip-aliased': true,
'unknown-options-as-args': false
'unknown-options-as-args': false,
})
.usage('$0 [<pattern>...]')
.usage('$0 debug [<pattern>...]')
@ -124,16 +153,16 @@ exports.run = async () => { // eslint-disable-line complexity
.options({
color: {
description: 'Force color output',
type: 'boolean'
type: 'boolean',
},
config: {
description: 'Specific JavaScript file for AVA to read its config from, instead of using package.json or ava.config.* files'
}
description: 'Specific JavaScript file for AVA to read its config from, instead of using package.json or ava.config.* files',
},
})
.command('* [<pattern>...]', 'Run tests', yargs => yargs.options(FLAGS).positional('pattern', {
array: true,
describe: 'Glob patterns to select what test files to run. Leave empty if you want AVA to run all test files instead. Add a colon and specify line numbers of specific tests to run',
type: 'string'
describe: 'Select which test files to run. Leave empty if you want AVA to run all test files as per your configuration. Accepts glob patterns, directories that (recursively) contain test files, and file paths. Add a colon and specify line numbers of specific tests to run',
type: 'string',
}), argv => {
if (activeInspector) {
debug.files = argv.pattern || [];
@ -145,22 +174,22 @@ exports.run = async () => { // eslint-disable-line complexity
yargs => yargs.options(FLAGS).options({
break: {
description: 'Break before the test file is loaded',
type: 'boolean'
type: 'boolean',
},
host: {
default: '127.0.0.1',
description: 'Address or hostname through which you can connect to the inspector',
type: 'string'
type: 'string',
},
port: {
default: 9229,
description: 'Port on which you can connect to the inspector',
type: 'number'
}
type: 'number',
},
}).positional('pattern', {
demand: true,
describe: 'Glob patterns to select a single test file to debug. Add a colon and specify line numbers of specific tests to run',
type: 'string'
type: 'string',
}),
argv => {
debug = {
@ -168,12 +197,12 @@ exports.run = async () => { // eslint-disable-line complexity
break: argv.break === true,
files: argv.pattern,
host: argv.host,
port: argv.port
port: argv.port,
};
})
.command(
'reset-cache',
'Reset AVAs compilation cache and exit',
'Delete any temporary files and state kept by AVA, then exit',
yargs => yargs,
() => {
resetCache = true;
@ -184,8 +213,14 @@ exports.run = async () => { // eslint-disable-line complexity
.help();
const combined = {...conf};
for (const flag of Object.keys(FLAGS)) {
if (Reflect.has(argv, flag)) {
if (flag === 'no-worker-threads' && Reflect.has(argv, 'worker-threads')) {
combined.workerThreads = argv['worker-threads'];
continue;
}
if (argv[flag] !== undefined) {
if (flag === 'fail-fast') {
combined.failFast = argv[flag];
} else if (flag === 'update-snapshots') {
@ -196,13 +231,15 @@ exports.run = async () => { // eslint-disable-line complexity
}
}
const chalkOptions = {level: combined.color === false ? 0 : require('chalk').level};
const chalk = require('./chalk').set(chalkOptions);
if (combined.updateSnapshots && combined.match) {
exit('Snapshots cannot be updated when matching specific tests.');
const chalkOptions = {level: 0};
if (combined.color !== false) {
const {supportsColor: {level}} = await import('chalk'); // eslint-disable-line node/no-unsupported-features/es-syntax, unicorn/import-style
chalkOptions.level = level;
}
const {set: setChalk} = await import('./chalk.js'); // eslint-disable-line node/no-unsupported-features/es-syntax
setChalk(chalkOptions);
if (confError) {
if (confError.parent) {
exit(`${confError.message}\n\n${chalk.gray((confError.parent && confError.parent.stack) || confError.parent)}`);
@ -211,23 +248,23 @@ exports.run = async () => { // eslint-disable-line complexity
}
}
updateNotifier({pkg: require('../package.json')}).notify();
const {nonSemVerExperiments: experiments, projectDir} = conf;
if (resetCache) {
const cacheDir = path.join(projectDir, 'node_modules', '.cache', 'ava');
try {
await del('*', {
cwd: cacheDir,
nodir: true
});
console.error(`\n${chalk.green(figures.tick)} Removed AVA cache files in ${cacheDir}`);
const deletedFilePaths = await del('*', {cwd: cacheDir});
if (deletedFilePaths.length === 0) {
console.log(`\n${chalk.green(figures.tick)} No cache files to remove`);
} else {
console.log(`\n${chalk.green(figures.tick)} Removed AVA cache files in ${cacheDir}`);
}
process.exit(0); // eslint-disable-line unicorn/no-process-exit
} catch (error) {
exit(`Error removing AVA cache files in ${cacheDir}\n\n${chalk.gray((error && error.stack) || error)}`);
}
return;
}
if (argv.watch) {
@ -266,6 +303,10 @@ exports.run = async () => { // eslint-disable-line complexity
console.log(chalk.magenta(` ${figures.warning} Experiments are enabled. These are unsupported and may change or be removed at any time.`));
}
if (Reflect.has(conf, 'babel')) {
exit('Built-in Babel support has been removed.');
}
if (Reflect.has(conf, 'compileEnhancements')) {
exit('Enhancement compilation must be configured in AVAs Babel options.');
}
@ -278,22 +319,9 @@ exports.run = async () => { // eslint-disable-line complexity
exit('sources has been removed. Use ignoredByWatcher to provide glob patterns of files that the watcher should ignore.');
}
const ciParallelVars = require('ci-parallel-vars');
const Api = require('./api');
const DefaultReporter = require('./reporters/default');
const TapReporter = require('./reporters/tap');
const Watcher = require('./watcher');
const normalizeExtensions = require('./extensions');
const normalizeModuleTypes = require('./module-types');
const {normalizeGlobs, normalizePattern} = require('./globs');
const normalizeNodeArguments = require('./node-arguments');
const validateEnvironmentVariables = require('./environment-variables');
const {splitPatternAndLineNumbers} = require('./line-numbers');
const providerManager = require('./provider-manager');
let pkg;
try {
pkg = readPkg.sync({cwd: projectDir});
pkg = JSON.parse(fs.readFileSync(path.resolve(projectDir, 'package.json')));
} catch (error) {
if (error.code !== 'ENOENT') {
throw error;
@ -303,26 +331,13 @@ exports.run = async () => { // eslint-disable-line complexity
const {type: defaultModuleType = 'commonjs'} = pkg || {};
const providers = [];
if (Reflect.has(conf, 'babel')) {
try {
const {level, main} = providerManager.babel(projectDir);
providers.push({
level,
main: main({config: conf.babel}),
type: 'babel'
});
} catch (error) {
exit(error.message);
}
}
if (Reflect.has(conf, 'typescript')) {
try {
const {level, main} = providerManager.typescript(projectDir);
const {level, main} = await providerManager.typescript(projectDir);
providers.push({
level,
main: main({config: conf.typescript}),
type: 'typescript'
type: 'typescript',
});
} catch (error) {
exit(error.message);
@ -377,16 +392,14 @@ exports.run = async () => { // eslint-disable-line complexity
.map(pattern => splitPatternAndLineNumbers(pattern))
.map(({pattern, ...rest}) => ({
pattern: normalizePattern(path.relative(projectDir, path.resolve(process.cwd(), pattern))),
...rest
...rest,
}));
if (combined.updateSnapshots && filter.some(condition => condition.lineNumbers !== null)) {
exit('Snapshots cannot be updated when selecting specific tests by their line number.');
}
const api = new Api({
cacheEnabled: combined.cache !== false,
chalkOptions,
concurrency: combined.concurrency || 0,
workerThreads: combined.workerThreads !== false,
debug,
environmentVariables,
experiments,
@ -406,38 +419,31 @@ exports.run = async () => { // eslint-disable-line complexity
snapshotDir: combined.snapshotDir ? path.resolve(projectDir, combined.snapshotDir) : null,
timeout: combined.timeout || '10s',
updateSnapshots: combined.updateSnapshots,
workerArgv: argv['--']
workerArgv: argv['--'],
});
const reporter = combined.tap && !combined.watch && debug === null ? new TapReporter({
extensions: globs.extensions,
projectDir,
reportStream: process.stdout,
stdStream: process.stderr
stdStream: process.stderr,
}) : new DefaultReporter({
extensions: globs.extensions,
projectDir,
reportStream: process.stdout,
stdStream: process.stderr,
watching: combined.watch,
verbose: debug !== null || combined.verbose || isCi || !process.stdout.isTTY
});
api.on('run', plan => {
reporter.startRun(plan);
if (process.env.AVA_EMIT_RUN_STATUS_OVER_IPC === 'I\'ll find a payphone baby / Take some time to talk to you') {
const {controlFlow} = require('./ipc-flow-control');
const bufferedSend = controlFlow(process);
if (process.versions.node >= '12.16.0') {
plan.status.on('stateChange', evt => {
bufferedSend(evt);
});
} else {
const v8 = require('v8');
plan.status.on('stateChange', evt => {
bufferedSend([...v8.serialize(evt)]);
});
}
plan.status.on('stateChange', evt => {
bufferedSend(evt);
});
}
plan.status.on('stateChange', evt => {
@ -455,7 +461,7 @@ exports.run = async () => { // eslint-disable-line complexity
globs,
projectDir,
providers,
reporter
reporter,
});
watcher.observeStdin(process.stdin);
} else {
@ -476,4 +482,4 @@ exports.run = async () => { // eslint-disable-line complexity
process.exitCode = runStatus.suggestExitCode({matching: match.length > 0});
reporter.endRun();
}
};
}

29
node_modules/ava/lib/code-excerpt.js generated vendored
View file

@ -1,14 +1,14 @@
'use strict';
const fs = require('fs');
const equalLength = require('equal-length');
const codeExcerpt = require('code-excerpt');
const truncate = require('cli-truncate');
const chalk = require('./chalk').get();
import fs from 'node:fs';
import truncate from 'cli-truncate';
import codeExcerpt from 'code-excerpt';
import {chalk} from './chalk.js';
const formatLineNumber = (lineNumber, maxLineNumber) =>
' '.repeat(Math.max(0, String(maxLineNumber).length - String(lineNumber).length)) + lineNumber;
module.exports = (source, options = {}) => {
export default function exceptCode(source, options = {}) {
if (!source.isWithinProject || source.isDependency) {
return null;
}
@ -18,7 +18,7 @@ module.exports = (source, options = {}) => {
let contents;
try {
contents = fs.readFileSync(file, 'utf8');
contents = fs.readFileSync(new URL(file), 'utf8');
} catch {
return null;
}
@ -30,25 +30,20 @@ module.exports = (source, options = {}) => {
const lines = excerpt.map(item => ({
line: item.line,
value: truncate(item.value, maxWidth - String(line).length - 5)
value: truncate(item.value, maxWidth - String(line).length - 5),
}));
const joinedLines = lines.map(line => line.value).join('\n');
const extendedLines = equalLength(joinedLines).split('\n');
const extendedWidth = Math.max(...lines.map(item => item.value.length));
return lines
.map((item, index) => ({
line: item.line,
value: extendedLines[index]
}))
.map(item => {
const isErrorSource = item.line === line;
const lineNumber = formatLineNumber(item.line, line) + ':';
const coloredLineNumber = isErrorSource ? lineNumber : chalk.grey(lineNumber);
const result = ` ${coloredLineNumber} ${item.value}`;
const result = ` ${coloredLineNumber} ${item.value.padEnd(extendedWidth)}`;
return isErrorSource ? chalk.bgRed(result) : result;
})
.join('\n');
};
}

View file

@ -1,37 +1,32 @@
'use strict';
const util = require('util'); // eslint-disable-line unicorn/import-style
const ansiStyles = require('ansi-styles');
const stripAnsi = require('strip-ansi');
const cloneDeepWith = require('lodash/cloneDeepWith');
const reactPlugin = require('@concordance/react');
const chalk = require('./chalk').get();
import {inspect} from 'node:util';
// Wrap Concordance's React plugin. Change the name to avoid collisions if in
// the future users can register plugins themselves.
const avaReactPlugin = {...reactPlugin, name: 'ava-plugin-react'};
const plugins = [avaReactPlugin];
import ansiStyles from 'ansi-styles';
import {Chalk} from 'chalk'; // eslint-disable-line unicorn/import-style
import stripAnsi from 'strip-ansi';
const forceColor = new chalk.Instance({level: Math.max(chalk.level, 1)});
import {chalk} from './chalk.js';
const forceColor = new Chalk({level: Math.max(chalk.level, 1)});
const colorTheme = {
boolean: ansiStyles.yellow,
circular: forceColor.grey('[Circular]'),
date: {
invalid: forceColor.red('invalid'),
value: ansiStyles.blue
value: ansiStyles.blue,
},
diffGutters: {
actual: forceColor.red('-') + ' ',
expected: forceColor.green('+') + ' ',
padding: ' '
padding: ' ',
},
error: {
ctor: {open: ansiStyles.grey.open + '(', close: ')' + ansiStyles.grey.close},
name: ansiStyles.magenta
name: ansiStyles.magenta,
},
function: {
name: ansiStyles.blue,
stringTag: ansiStyles.magenta
stringTag: ansiStyles.magenta,
},
global: ansiStyles.magenta,
item: {after: forceColor.grey(',')},
@ -45,44 +40,16 @@ const colorTheme = {
closeBracket: forceColor.grey('}'),
ctor: ansiStyles.magenta,
stringTag: {open: ansiStyles.magenta.open + '@', close: ansiStyles.magenta.close},
secondaryStringTag: {open: ansiStyles.grey.open + '@', close: ansiStyles.grey.close}
secondaryStringTag: {open: ansiStyles.grey.open + '@', close: ansiStyles.grey.close},
},
property: {
after: forceColor.grey(','),
keyBracket: {open: forceColor.grey('['), close: forceColor.grey(']')},
valueFallback: forceColor.grey('…')
},
react: {
functionType: forceColor.grey('\u235F'),
openTag: {
start: forceColor.grey('<'),
end: forceColor.grey('>'),
selfClose: forceColor.grey('/'),
selfCloseVoid: ' ' + forceColor.grey('/')
},
closeTag: {
open: forceColor.grey('</'),
close: forceColor.grey('>')
},
tagName: ansiStyles.magenta,
attribute: {
separator: '=',
value: {
openBracket: forceColor.grey('{'),
closeBracket: forceColor.grey('}'),
string: {
line: {open: forceColor.blue('"'), close: forceColor.blue('"'), escapeQuote: '"'}
}
}
},
child: {
openBracket: forceColor.grey('{'),
closeBracket: forceColor.grey('}')
}
valueFallback: forceColor.grey('…'),
},
regexp: {
source: {open: ansiStyles.blue.open + '/', close: '/' + ansiStyles.blue.close},
flags: ansiStyles.yellow
flags: ansiStyles.yellow,
},
stats: {separator: forceColor.grey('---')},
string: {
@ -94,45 +61,42 @@ const colorTheme = {
diff: {
insert: {
open: ansiStyles.bgGreen.open + ansiStyles.black.open,
close: ansiStyles.black.close + ansiStyles.bgGreen.close
close: ansiStyles.black.close + ansiStyles.bgGreen.close,
},
delete: {
open: ansiStyles.bgRed.open + ansiStyles.black.open,
close: ansiStyles.black.close + ansiStyles.bgRed.close
close: ansiStyles.black.close + ansiStyles.bgRed.close,
},
equal: ansiStyles.blue,
insertLine: {
open: ansiStyles.green.open,
close: ansiStyles.green.close
close: ansiStyles.green.close,
},
deleteLine: {
open: ansiStyles.red.open,
close: ansiStyles.red.close
}
}
close: ansiStyles.red.close,
},
},
},
symbol: ansiStyles.yellow,
typedArray: {
bytes: ansiStyles.yellow
bytes: ansiStyles.yellow,
},
undefined: ansiStyles.yellow
undefined: ansiStyles.yellow,
};
const plainTheme = cloneDeepWith(colorTheme, value => {
if (typeof value === 'string') {
return stripAnsi(value);
}
});
const plainTheme = JSON.parse(JSON.stringify(colorTheme), value => typeof value === 'string' ? stripAnsi(value) : value);
const theme = chalk.level > 0 ? colorTheme : plainTheme;
exports.default = {
const concordanceOptions = {
// Use Node's object inspection depth, clamped to a minimum of 3
get maxDepth() {
return Math.max(3, util.inspect.defaultOptions.depth);
return Math.max(3, inspect.defaultOptions.depth);
},
plugins,
theme
theme,
};
exports.snapshotManager = {plugins, theme: plainTheme};
export default concordanceOptions;
export const snapshotManager = {theme: plainTheme};

View file

@ -1,7 +1,4 @@
'use strict';
const clone = require('lodash/clone');
class ContextRef {
export default class ContextRef {
constructor() {
this.value = {};
}
@ -18,7 +15,6 @@ class ContextRef {
return new LateBinding(this);
}
}
module.exports = ContextRef;
class LateBinding extends ContextRef {
constructor(ref) {
@ -29,7 +25,8 @@ class LateBinding extends ContextRef {
get() {
if (!this.bound) {
this.set(clone(this.ref.get()));
const value = this.ref.get();
this.set(value !== null && typeof value === 'object' ? {...value} : value);
}
return super.get();

52
node_modules/ava/lib/create-chain.js generated vendored
View file

@ -1,4 +1,3 @@
'use strict';
const chainRegistry = new WeakMap();
function startChain(name, call, defaults) {
@ -48,20 +47,16 @@ function createHookChain(hook, isAfterHook) {
// * `skip` must come at the end
// * no `only`
// * no repeating
extendChain(hook, 'cb', 'callback');
extendChain(hook, 'skip', 'skipped');
extendChain(hook.cb, 'skip', 'skipped');
if (isAfterHook) {
extendChain(hook, 'always');
extendChain(hook.always, 'cb', 'callback');
extendChain(hook.always, 'skip', 'skipped');
extendChain(hook.always.cb, 'skip', 'skipped');
}
return hook;
}
function createChain(fn, defaults, meta) {
export default function createChain(fn, defaults, meta) {
// Test chaining rules:
// * `serial` must come at the start
// * `only` and `skip` must come at the end
@ -69,27 +64,15 @@ function createChain(fn, defaults, meta) {
// * `only` and `skip` cannot be chained together
// * no repeating
const root = startChain('test', fn, {...defaults, type: 'test'});
extendChain(root, 'cb', 'callback');
extendChain(root, 'failing');
extendChain(root, 'only', 'exclusive');
extendChain(root, 'serial');
extendChain(root, 'skip', 'skipped');
extendChain(root.cb, 'failing');
extendChain(root.cb, 'only', 'exclusive');
extendChain(root.cb, 'skip', 'skipped');
extendChain(root.cb.failing, 'only', 'exclusive');
extendChain(root.cb.failing, 'skip', 'skipped');
extendChain(root.failing, 'only', 'exclusive');
extendChain(root.failing, 'skip', 'skipped');
extendChain(root.serial, 'cb', 'callback');
extendChain(root.serial, 'failing');
extendChain(root.serial, 'only', 'exclusive');
extendChain(root.serial, 'skip', 'skipped');
extendChain(root.serial.cb, 'failing');
extendChain(root.serial.cb, 'only', 'exclusive');
extendChain(root.serial.cb, 'skip', 'skipped');
extendChain(root.serial.cb.failing, 'only', 'exclusive');
extendChain(root.serial.cb.failing, 'skip', 'skipped');
extendChain(root.serial.failing, 'only', 'exclusive');
extendChain(root.serial.failing, 'skip', 'skipped');
@ -108,9 +91,38 @@ function createChain(fn, defaults, meta) {
root.todo = startChain('test.todo', fn, {...defaults, type: 'test', todo: true});
root.serial.todo = startChain('test.serial.todo', fn, {...defaults, serial: true, type: 'test', todo: true});
root.macro = options => {
if (typeof options === 'function') {
return Object.freeze({exec: options});
}
return Object.freeze({exec: options.exec, title: options.title});
};
root.meta = meta;
// Our type definition uses ESM syntax; when using CJS with VSCode, the
// auto-completion assumes the root is accessed through `require('ava').default`.
// Placate VSCode by adding a mostly hidden default property on the root.
// This is available through both CJS and ESM imports. We use a proxy so that
// we don't end up with root.default.default.default chains.
Object.defineProperty(root, 'default', {
configurable: false,
enumerable: false,
writable: false,
value: new Proxy(root, {
apply(target, thisArg, argumentsList) {
target.apply(thisArg, argumentsList);
},
get(target, prop) {
if (prop === 'default') {
throw new TypeError('Cannot access default.default');
}
return target[prop];
},
}),
});
return root;
}
module.exports = createChain;

View file

@ -1,5 +1,4 @@
'use strict';
function validateEnvironmentVariables(environmentVariables) {
export default function validateEnvironmentVariables(environmentVariables) {
if (!environmentVariables) {
return {};
}
@ -12,5 +11,3 @@ function validateEnvironmentVariables(environmentVariables) {
return environmentVariables;
}
module.exports = validateEnvironmentVariables;

73
node_modules/ava/lib/eslint-plugin-helper-worker.js generated vendored Normal file
View file

@ -0,0 +1,73 @@
import v8 from 'node:v8';
import {parentPort, workerData} from 'node:worker_threads';
import normalizeExtensions from './extensions.js';
import {normalizeGlobs} from './globs.js';
import {loadConfig} from './load-config.js';
import providerManager from './provider-manager.js';
const MAX_DATA_LENGTH_EXCLUSIVE = 100 * 1024; // Allocate 100 KiB to exchange globs.
const configCache = new Map();
const collectProviders = async ({conf, projectDir}) => {
const providers = [];
if (Reflect.has(conf, 'typescript')) {
const {level, main} = await providerManager.typescript(projectDir);
providers.push({
level,
main: main({config: conf.typescript}),
type: 'typescript',
});
}
return providers;
};
const buildGlobs = ({conf, providers, projectDir, overrideExtensions, overrideFiles}) => {
const extensions = overrideExtensions
? normalizeExtensions(overrideExtensions)
: normalizeExtensions(conf.extensions, providers);
return {
cwd: projectDir,
...normalizeGlobs({
extensions,
files: overrideFiles ? overrideFiles : conf.files,
providers,
}),
};
};
const resolveGlobs = async (projectDir, overrideExtensions, overrideFiles) => {
if (!configCache.has(projectDir)) {
configCache.set(projectDir, loadConfig({resolveFrom: projectDir}).then(async conf => {
const providers = await collectProviders({conf, projectDir});
return {conf, providers};
}));
}
const {conf, providers} = await configCache.get(projectDir);
return buildGlobs({conf, providers, projectDir, overrideExtensions, overrideFiles});
};
const data = new Uint8Array(workerData.dataBuffer);
const sync = new Int32Array(workerData.syncBuffer);
const handleMessage = async ({projectDir, overrideExtensions, overrideFiles}) => {
let encoded;
try {
const globs = await resolveGlobs(projectDir, overrideExtensions, overrideFiles);
encoded = v8.serialize(globs);
} catch (error) {
encoded = v8.serialize(error);
}
const byteLength = encoded.length < MAX_DATA_LENGTH_EXCLUSIVE ? encoded.copy(data) : MAX_DATA_LENGTH_EXCLUSIVE;
Atomics.store(sync, 0, byteLength);
Atomics.notify(sync, 0);
};
parentPort.on('message', handleMessage);
handleMessage(workerData.firstMessage);
delete workerData.firstMessage;

4
node_modules/ava/lib/extensions.js generated vendored
View file

@ -1,4 +1,4 @@
module.exports = (configuredExtensions, providers = []) => {
export default function resolveExtensions(configuredExtensions, providers = []) {
// Combine all extensions possible for testing. Remove duplicate extensions.
const duplicates = new Set();
const seen = new Set();
@ -43,4 +43,4 @@ module.exports = (configuredExtensions, providers = []) => {
}
return [...seen];
};
}

163
node_modules/ava/lib/fork.js generated vendored
View file

@ -1,68 +1,71 @@
'use strict';
const childProcess = require('child_process');
const path = require('path');
const fs = require('fs');
const Emittery = require('emittery');
const {controlFlow} = require('./ipc-flow-control');
import childProcess from 'node:child_process';
import process from 'node:process';
import {fileURLToPath} from 'node:url';
import {Worker} from 'node:worker_threads';
if (fs.realpathSync(__filename) !== __filename) {
console.warn('WARNING: `npm link ava` and the `--preserve-symlink` flag are incompatible. We have detected that AVA is linked via `npm link`, and that you are using either an early version of Node 6, or the `--preserve-symlink` flag. This breaks AVA. You should upgrade to Node 6.2.0+, avoid the `--preserve-symlink` flag, or avoid using `npm link ava`.');
import Emittery from 'emittery';
import {pEvent} from 'p-event';
import {controlFlow} from './ipc-flow-control.cjs';
import serializeError from './serialize-error.js';
let workerPath = new URL('worker/base.js', import.meta.url);
export function _testOnlyReplaceWorkerPath(replacement) {
workerPath = replacement;
}
// In case the test file imports a different AVA install,
// the presence of this variable allows it to require this one instead
const AVA_PATH = path.resolve(__dirname, '..');
const WORKER_PATH = require.resolve('./worker/subprocess');
const additionalExecArgv = ['--enable-source-maps'];
class SharedWorkerChannel extends Emittery {
constructor({channelId, filename, initialData}, sendToFork) {
super();
this.id = channelId;
this.filename = filename;
this.initialData = initialData;
this.sendToFork = sendToFork;
}
signalReady() {
this.sendToFork({
type: 'shared-worker-ready',
channelId: this.id
const createWorker = (options, execArgv) => {
let worker;
let postMessage;
let close;
if (options.workerThreads) {
worker = new Worker(workerPath, {
argv: options.workerArgv,
env: {NODE_ENV: 'test', ...process.env, ...options.environmentVariables},
execArgv: [...execArgv, ...additionalExecArgv],
workerData: {
options,
},
trackUnmanagedFds: true,
stdin: true,
stdout: true,
stderr: true,
});
}
postMessage = worker.postMessage.bind(worker);
signalError() {
this.sendToFork({
type: 'shared-worker-error',
channelId: this.id
// Ensure we've seen this event before we terminate the worker thread, as a
// workaround for https://github.com/nodejs/node/issues/38418.
const starting = pEvent(worker, 'message', ({ava}) => ava && ava.type === 'starting');
close = async () => {
try {
await starting;
await worker.terminate();
} finally {
// No-op
}
};
} else {
worker = childProcess.fork(fileURLToPath(workerPath), options.workerArgv, {
cwd: options.projectDir,
silent: true,
env: {NODE_ENV: 'test', ...process.env, ...options.environmentVariables},
execArgv: [...execArgv, ...additionalExecArgv],
});
postMessage = controlFlow(worker);
close = async () => worker.kill();
}
emitMessage({messageId, replyTo, serializedData}) {
this.emit('message', {
messageId,
replyTo,
serializedData
});
}
forwardMessageToFork({messageId, replyTo, serializedData}) {
this.sendToFork({
type: 'shared-worker-message',
channelId: this.id,
messageId,
replyTo,
serializedData
});
}
}
let forkCounter = 0;
module.exports = (file, options, execArgv = process.execArgv) => {
const forkId = `fork/${++forkCounter}`;
const sharedWorkerChannels = new Map();
return {
worker,
postMessage,
close,
};
};
export default function loadFork(file, options, execArgv = process.execArgv) {
let finished = false;
const emitter = new Emittery();
@ -75,31 +78,22 @@ module.exports = (file, options, execArgv = process.execArgv) => {
options = {
baseDir: process.cwd(),
file,
forkId,
...options
...options,
};
const subprocess = childProcess.fork(WORKER_PATH, options.workerArgv, {
cwd: options.projectDir,
silent: true,
env: {NODE_ENV: 'test', ...process.env, ...options.environmentVariables, AVA_PATH},
execArgv
});
subprocess.stdout.on('data', chunk => {
const {worker, postMessage, close} = createWorker(options, execArgv);
worker.stdout.on('data', chunk => {
emitStateChange({type: 'worker-stdout', chunk});
});
subprocess.stderr.on('data', chunk => {
worker.stderr.on('data', chunk => {
emitStateChange({type: 'worker-stderr', chunk});
});
const bufferedSend = controlFlow(subprocess);
let forcedExit = false;
const send = evt => {
if (!finished && !forcedExit) {
bufferedSend({ava: evt});
postMessage({ava: evt});
}
};
@ -109,7 +103,7 @@ module.exports = (file, options, execArgv = process.execArgv) => {
resolve();
};
subprocess.on('message', message => {
worker.on('message', message => {
if (!message.ava) {
return;
}
@ -119,15 +113,18 @@ module.exports = (file, options, execArgv = process.execArgv) => {
send({type: 'options', options});
break;
case 'shared-worker-connect': {
const channel = new SharedWorkerChannel(message.ava, send);
sharedWorkerChannels.set(channel.id, channel);
emitter.emit('connectSharedWorker', channel);
const {channelId, filename, initialData, port} = message.ava;
emitter.emit('connectSharedWorker', {
filename,
initialData,
port,
signalError() {
send({type: 'shared-worker-error', channelId});
},
});
break;
}
case 'shared-worker-message':
sharedWorkerChannels.get(message.ava.channelId).emitMessage(message.ava);
break;
case 'ping':
send({type: 'pong'});
break;
@ -136,12 +133,12 @@ module.exports = (file, options, execArgv = process.execArgv) => {
}
});
subprocess.on('error', err => {
emitStateChange({type: 'worker-failed', err});
worker.on('error', error => {
emitStateChange({type: 'worker-failed', err: serializeError('Worker error', false, error, file)});
finish();
});
subprocess.on('exit', (code, signal) => {
worker.on('exit', (code, signal) => {
if (forcedExit) {
emitStateChange({type: 'worker-finished', forcedExit});
} else if (code > 0) {
@ -158,12 +155,12 @@ module.exports = (file, options, execArgv = process.execArgv) => {
return {
file,
forkId,
threadId: worker.threadId,
promise,
exit() {
forcedExit = true;
subprocess.kill();
close();
},
notifyOfPeerFailure() {
@ -176,6 +173,6 @@ module.exports = (file, options, execArgv = process.execArgv) => {
onStateChange(listener) {
return emitter.on('stateChange', listener);
}
},
};
};
}

140
node_modules/ava/lib/glob-helpers.cjs generated vendored Normal file
View file

@ -0,0 +1,140 @@
'use strict';
const path = require('path');
const process = require('process');
const ignoreByDefault = require('ignore-by-default');
const picomatch = require('picomatch');
const slash = require('slash');
const defaultIgnorePatterns = [...ignoreByDefault.directories(), '**/node_modules'];
exports.defaultIgnorePatterns = defaultIgnorePatterns;
const defaultPicomatchIgnorePatterns = [
...defaultIgnorePatterns,
// Unlike globby(), picomatch needs a complete pattern when ignoring directories.
...defaultIgnorePatterns.map(pattern => `${pattern}/**/*`),
];
const defaultMatchNoIgnore = picomatch(defaultPicomatchIgnorePatterns);
const matchingCache = new WeakMap();
const processMatchingPatterns = input => {
let result = matchingCache.get(input);
if (!result) {
const ignore = [...defaultPicomatchIgnorePatterns];
const patterns = input.filter(pattern => {
if (pattern.startsWith('!')) {
// Unlike globby(), picomatch needs a complete pattern when ignoring directories.
ignore.push(pattern.slice(1), `${pattern.slice(1)}/**/*`);
return false;
}
return true;
});
result = {
match: picomatch(patterns, {ignore}),
matchNoIgnore: picomatch(patterns),
individualMatchers: patterns.map(pattern => ({pattern, match: picomatch(pattern, {ignore})})),
};
matchingCache.set(input, result);
}
return result;
};
exports.processMatchingPatterns = processMatchingPatterns;
const matchesIgnorePatterns = (file, patterns) => {
const {matchNoIgnore} = processMatchingPatterns(patterns);
return matchNoIgnore(file) || defaultMatchNoIgnore(file);
};
function classify(file, {cwd, extensions, filePatterns, ignoredByWatcherPatterns}) {
file = normalizeFileForMatching(cwd, file);
return {
isIgnoredByWatcher: matchesIgnorePatterns(file, ignoredByWatcherPatterns),
isTest: hasExtension(extensions, file) && !isHelperish(file) && filePatterns.length > 0 && matches(file, filePatterns),
};
}
exports.classify = classify;
const hasExtension = (extensions, file) => extensions.includes(path.extname(file).slice(1));
exports.hasExtension = hasExtension;
function isHelperish(file) { // Assume file has been normalized already.
// File names starting with an underscore are deemed "helpers".
if (path.basename(file).startsWith('_')) {
return true;
}
// This function assumes the file has been normalized. If it couldn't be,
// don't check if it's got a parent directory that starts with an underscore.
// Deem it not a "helper".
if (path.isAbsolute(file)) {
return false;
}
// If the file has a parent directory that starts with only a single
// underscore, it's deemed a "helper".
return path.dirname(file).split('/').some(dir => /^_(?:$|[^_])/.test(dir));
}
exports.isHelperish = isHelperish;
function matches(file, patterns) {
const {match} = processMatchingPatterns(patterns);
return match(file);
}
exports.matches = matches;
function normalizeFileForMatching(cwd, file) {
if (process.platform === 'win32') {
cwd = slash(cwd);
file = slash(file);
}
// Note that if `file` is outside `cwd` we can't normalize it. If this turns
// out to be a real-world scenario we may have to make changes in calling code
// to make sure the file isn't even selected for matching.
if (!file.startsWith(cwd)) {
return file;
}
// Assume `cwd` does *not* end in a slash.
return file.slice(cwd.length + 1);
}
exports.normalizeFileForMatching = normalizeFileForMatching;
function normalizePattern(pattern) {
// Always use `/` in patterns, harmonizing matching across platforms
if (process.platform === 'win32') {
pattern = slash(pattern);
}
if (pattern.endsWith('/')) {
pattern = pattern.slice(0, -1);
}
if (pattern.startsWith('./')) {
return pattern.slice(2);
}
if (pattern.startsWith('!./')) {
return `!${pattern.slice(3)}`;
}
return pattern;
}
exports.normalizePattern = normalizePattern;
function normalizePatterns(patterns) {
return patterns.map(pattern => normalizePattern(pattern));
}
exports.normalizePatterns = normalizePatterns;

305
node_modules/ava/lib/globs.js generated vendored
View file

@ -1,54 +1,36 @@
'use strict';
const path = require('path');
const globby = require('globby');
const ignoreByDefault = require('ignore-by-default');
const picomatch = require('picomatch');
const slash = require('slash');
const providerManager = require('./provider-manager');
import fs from 'node:fs';
import path from 'node:path';
const defaultIgnorePatterns = [...ignoreByDefault.directories(), '**/node_modules'];
const defaultPicomatchIgnorePatterns = [
...defaultIgnorePatterns,
// Unlike globby(), picomatch needs a complete pattern when ignoring directories.
...defaultIgnorePatterns.map(pattern => `${pattern}/**/*`)
];
import {globby, globbySync} from 'globby';
const defaultMatchNoIgnore = picomatch(defaultPicomatchIgnorePatterns);
import {
defaultIgnorePatterns,
hasExtension,
normalizeFileForMatching,
normalizePatterns,
processMatchingPatterns,
} from './glob-helpers.cjs';
export {
classify,
isHelperish,
matches,
normalizePattern,
defaultIgnorePatterns,
hasExtension,
normalizeFileForMatching,
normalizePatterns,
} from './glob-helpers.cjs';
const defaultIgnoredByWatcherPatterns = [
'**/*.snap.md', // No need to rerun tests when the Markdown files change.
'ava.config.js', // Config is not reloaded so avoid rerunning tests when it changes.
'ava.config.cjs' // Config is not reloaded so avoid rerunning tests when it changes.
'ava.config.cjs', // Config is not reloaded so avoid rerunning tests when it changes.
];
const buildExtensionPattern = extensions => extensions.length === 1 ? extensions[0] : `{${extensions.join(',')}}`;
function normalizePattern(pattern) {
// Always use `/` in patterns, harmonizing matching across platforms
if (process.platform === 'win32') {
pattern = slash(pattern);
}
if (pattern.startsWith('./')) {
return pattern.slice(2);
}
if (pattern.startsWith('!./')) {
return `!${pattern.slice(3)}`;
}
return pattern;
}
exports.normalizePattern = normalizePattern;
function normalizePatterns(patterns) {
return patterns.map(pattern => normalizePattern(pattern));
}
exports.normalizePatterns = normalizePatterns;
function normalizeGlobs({extensions, files: filePatterns, ignoredByWatcher: ignoredByWatcherPatterns, providers}) {
export function normalizeGlobs({extensions, files: filePatterns, ignoredByWatcher: ignoredByWatcherPatterns, providers}) {
if (filePatterns !== undefined && (!Array.isArray(filePatterns) || filePatterns.length === 0)) {
throw new Error('The files configuration must be an array containing glob patterns.');
}
@ -68,7 +50,7 @@ function normalizeGlobs({extensions, files: filePatterns, ignoredByWatcher: igno
`**/test/**/*.${extensionPattern}`,
`**/tests/**/*.${extensionPattern}`,
'!**/__tests__/**/__{helper,fixture}?(s)__/**/*',
'!**/test?(s)/**/{helper,fixture}?(s)/**/*'
'!**/test?(s)/**/{helper,fixture}?(s)/**/*',
];
if (filePatterns) {
@ -84,40 +66,36 @@ function normalizeGlobs({extensions, files: filePatterns, ignoredByWatcher: igno
ignoredByWatcherPatterns = ignoredByWatcherPatterns ? [...defaultIgnoredByWatcherPatterns, ...normalizePatterns(ignoredByWatcherPatterns)] : [...defaultIgnoredByWatcherPatterns];
for (const {level, main} of providers) {
if (level >= providerManager.levels.pathRewrites) {
({filePatterns, ignoredByWatcherPatterns} = main.updateGlobs({filePatterns, ignoredByWatcherPatterns}));
}
for (const {main} of providers) {
({filePatterns, ignoredByWatcherPatterns} = main.updateGlobs({filePatterns, ignoredByWatcherPatterns}));
}
return {extensions, filePatterns, ignoredByWatcherPatterns};
}
exports.normalizeGlobs = normalizeGlobs;
const hasExtension = (extensions, file) => extensions.includes(path.extname(file).slice(1));
exports.hasExtension = hasExtension;
const globOptions = {
// Globs should work relative to the cwd value only (this should be the
// project directory that AVA is run in).
absolute: false,
braceExpansion: true,
caseSensitiveMatch: false,
dot: false,
expandDirectories: false,
extglob: true,
followSymbolicLinks: true,
gitignore: false,
globstar: true,
ignore: defaultIgnorePatterns,
baseNameMatch: false,
stats: false,
unique: true,
};
const globFiles = async (cwd, patterns) => {
const files = await globby(patterns, {
// Globs should work relative to the cwd value only (this should be the
// project directory that AVA is run in).
absolute: false,
braceExpansion: true,
caseSensitiveMatch: false,
...globOptions,
cwd,
dot: false,
expandDirectories: false,
extglob: true,
followSymbolicLinks: true,
gitignore: false,
globstar: true,
ignore: defaultIgnorePatterns,
baseNameMatch: false,
onlyFiles: true,
stats: false,
unique: true
});
// Return absolute file paths. This has the side-effect of normalizing paths
@ -125,119 +103,114 @@ const globFiles = async (cwd, patterns) => {
return files.map(file => path.join(cwd, file));
};
async function findFiles({cwd, extensions, filePatterns}) {
return (await globFiles(cwd, filePatterns)).filter(file => hasExtension(extensions, file));
const globDirectoriesSync = (cwd, patterns) => {
const files = globbySync(patterns, {
...globOptions,
cwd,
onlyDirectories: true,
});
// Return absolute file paths. This has the side-effect of normalizing paths
// on Windows.
return files.map(file => path.join(cwd, file));
};
export async function findFiles({cwd, extensions, filePatterns}) {
const files = await globFiles(cwd, filePatterns);
return files.filter(file => hasExtension(extensions, file));
}
exports.findFiles = findFiles;
async function findTests({cwd, extensions, filePatterns}) {
return (await findFiles({cwd, extensions, filePatterns})).filter(file => !path.basename(file).startsWith('_'));
export async function findTests({cwd, extensions, filePatterns}) {
const files = await findFiles({cwd, extensions, filePatterns});
return files.filter(file => !path.basename(file).startsWith('_'));
}
exports.findTests = findTests;
function getChokidarIgnorePatterns({ignoredByWatcherPatterns}) {
export function getChokidarIgnorePatterns({ignoredByWatcherPatterns}) {
return [
...defaultIgnorePatterns.map(pattern => `${pattern}/**/*`),
...ignoredByWatcherPatterns.filter(pattern => !pattern.startsWith('!'))
...ignoredByWatcherPatterns.filter(pattern => !pattern.startsWith('!')),
];
}
exports.getChokidarIgnorePatterns = getChokidarIgnorePatterns;
export function applyTestFileFilter({ // eslint-disable-line complexity
cwd,
expandDirectories = true,
filter,
providers = [],
testFiles,
treatFilterPatternsAsFiles = true,
}) {
const {individualMatchers} = processMatchingPatterns(filter);
const normalizedFiles = testFiles.map(file => ({file, matcheable: normalizeFileForMatching(cwd, file)}));
const matchingCache = new WeakMap();
const processMatchingPatterns = input => {
let result = matchingCache.get(input);
if (!result) {
const ignore = [...defaultPicomatchIgnorePatterns];
const patterns = input.filter(pattern => {
if (pattern.startsWith('!')) {
// Unlike globby(), picomatch needs a complete pattern when ignoring directories.
ignore.push(pattern.slice(1), `${pattern.slice(1)}/**/*`);
return false;
const selected = new Set();
const unmatchedPatterns = new Set(individualMatchers.map(({pattern}) => pattern));
for (const {pattern, match} of individualMatchers) {
for (const {file, matcheable} of normalizedFiles) {
if (match(matcheable)) {
unmatchedPatterns.delete(pattern);
selected.add(file);
}
}
}
if (expandDirectories && unmatchedPatterns.size > 0) {
const expansion = [];
for (const pattern of unmatchedPatterns) {
const directories = globDirectoriesSync(cwd, pattern);
if (directories.length > 0) {
unmatchedPatterns.delete(pattern);
expansion.push(directories);
}
}
const directories = expansion.flat();
if (directories.length > 0) {
for (const file of testFiles) {
if (selected.has(file)) {
continue;
}
for (const dir of directories) {
if (file.startsWith(dir + path.sep)) { // eslint-disable-line max-depth
selected.add(file);
}
}
}
}
}
const ignoredFilterPatternFiles = [];
if (treatFilterPatternsAsFiles && unmatchedPatterns.size > 0) {
const providerExtensions = new Set(providers.flatMap(({main}) => main.extensions));
for (const pattern of unmatchedPatterns) {
const file = path.join(cwd, pattern);
try {
const stats = fs.statSync(file);
if (!stats.isFile()) {
continue;
}
} catch (error) {
if (error.code === 'ENOENT') {
continue;
}
throw error;
}
return true;
});
if (
path.basename(file).startsWith('_')
|| providerExtensions.has(path.extname(file).slice(1))
|| file.split(path.sep).includes('node_modules')
) {
ignoredFilterPatternFiles.push(pattern);
continue;
}
result = {
match: picomatch(patterns, {ignore}),
matchNoIgnore: picomatch(patterns)
};
matchingCache.set(input, result);
selected.add(file);
}
}
return result;
};
function matches(file, patterns) {
const {match} = processMatchingPatterns(patterns);
return match(file);
return Object.assign([...selected], {ignoredFilterPatternFiles});
}
exports.matches = matches;
const matchesIgnorePatterns = (file, patterns) => {
const {matchNoIgnore} = processMatchingPatterns(patterns);
return matchNoIgnore(file) || defaultMatchNoIgnore(file);
};
function normalizeFileForMatching(cwd, file) {
if (process.platform === 'win32') {
cwd = slash(cwd);
file = slash(file);
}
if (!cwd) { // TODO: Ensure tests provide an actual value.
return file;
}
// TODO: If `file` is outside `cwd` we can't normalize it. Need to figure
// out if that's a real-world scenario, but we may have to ensure the file
// isn't even selected.
if (!file.startsWith(cwd)) {
return file;
}
// Assume `cwd` does *not* end in a slash.
return file.slice(cwd.length + 1);
}
exports.normalizeFileForMatching = normalizeFileForMatching;
function isHelperish(file) { // Assume file has been normalized already.
// File names starting with an underscore are deemed "helpers".
if (path.basename(file).startsWith('_')) {
return true;
}
// This function assumes the file has been normalized. If it couldn't be,
// don't check if it's got a parent directory that starts with an underscore.
// Deem it not a "helper".
if (path.isAbsolute(file)) {
return false;
}
// If the file has a parent directory that starts with only a single
// underscore, it's deemed a "helper".
return path.dirname(file).split('/').some(dir => /^_(?:$|[^_])/.test(dir));
}
exports.isHelperish = isHelperish;
function classify(file, {cwd, extensions, filePatterns, ignoredByWatcherPatterns}) {
file = normalizeFileForMatching(cwd, file);
return {
isIgnoredByWatcher: matchesIgnorePatterns(file, ignoredByWatcherPatterns),
isTest: hasExtension(extensions, file) && !isHelperish(file) && filePatterns.length > 0 && matches(file, filePatterns)
};
}
exports.classify = classify;
function applyTestFileFilter({cwd, filter, testFiles}) {
return testFiles.filter(file => matches(normalizeFileForMatching(cwd, file), filter));
}
exports.applyTestFileFilter = applyTestFileFilter;

View file

@ -1,3 +1,4 @@
'use strict';
function controlFlow(channel) {
let errored = false;
let deliverImmediately = true;

6
node_modules/ava/lib/is-ci.js generated vendored
View file

@ -1,5 +1,7 @@
const info = require('ci-info');
import process from 'node:process';
import info from 'ci-info';
const {AVA_FORCE_CI} = process.env;
module.exports = AVA_FORCE_CI === 'not-ci' ? false : AVA_FORCE_CI === 'ci' || info.isCI;
export default AVA_FORCE_CI === 'not-ci' ? false : AVA_FORCE_CI === 'ci' || info.isCI;

View file

@ -1,17 +1,13 @@
'use strict';
function isLikeSelector(selector) {
return selector !== null &&
typeof selector === 'object' &&
Reflect.getPrototypeOf(selector) === Object.prototype &&
Reflect.ownKeys(selector).length > 0;
export function isLikeSelector(selector) {
return selector !== null
&& typeof selector === 'object'
&& Reflect.getPrototypeOf(selector) === Object.prototype
&& Reflect.ownKeys(selector).length > 0;
}
exports.isLikeSelector = isLikeSelector;
export const CIRCULAR_SELECTOR = new Error('Encountered a circular selector');
const CIRCULAR_SELECTOR = new Error('Encountered a circular selector');
exports.CIRCULAR_SELECTOR = CIRCULAR_SELECTOR;
function selectComparable(lhs, selector, circular = new Set()) {
export function selectComparable(lhs, selector, circular = new Set()) {
if (circular.has(selector)) {
throw CIRCULAR_SELECTOR;
}
@ -33,5 +29,3 @@ function selectComparable(lhs, selector, circular = new Set()) {
return comparable;
}
exports.selectComparable = selectComparable;

29
node_modules/ava/lib/line-numbers.js generated vendored
View file

@ -1,7 +1,4 @@
'use strict';
const picomatch = require('picomatch');
const flatten = require('lodash/flatten');
import picomatch from 'picomatch';
const NUMBER_REGEX = /^\d+$/;
const RANGE_REGEX = /^(?<startGroup>\d+)-(?<endGroup>\d+)$/;
@ -17,10 +14,10 @@ const sortNumbersAscending = array => {
const parseNumber = string => Number.parseInt(string, 10);
const removeAllWhitespace = string => string.replace(/\s/g, '');
const range = (start, end) => new Array(end - start + 1).fill(start).map((element, index) => element + index);
const range = (start, end) => Array.from({length: end - start + 1}).fill(start).map((element, index) => element + index);
const parseLineNumbers = suffix => sortNumbersAscending(distinctArray(flatten(
suffix.split(',').map(part => {
const parseLineNumbers = suffix => sortNumbersAscending(distinctArray(
suffix.split(',').flatMap(part => {
if (NUMBER_REGEX.test(part)) {
return parseNumber(part);
}
@ -34,10 +31,10 @@ const parseLineNumbers = suffix => sortNumbersAscending(distinctArray(flatten(
}
return range(start, end);
})
)));
}),
));
function splitPatternAndLineNumbers(pattern) {
export function splitPatternAndLineNumbers(pattern) {
const parts = pattern.split(DELIMITER);
if (parts.length === 1) {
return {pattern, lineNumbers: null};
@ -51,14 +48,10 @@ function splitPatternAndLineNumbers(pattern) {
return {pattern: parts.join(DELIMITER), lineNumbers: parseLineNumbers(suffix)};
}
exports.splitPatternAndLineNumbers = splitPatternAndLineNumbers;
function getApplicableLineNumbers(normalizedFilePath, filter) {
return sortNumbersAscending(distinctArray(flatten(
export function getApplicableLineNumbers(normalizedFilePath, filter) {
return sortNumbersAscending(distinctArray(
filter
.filter(({pattern, lineNumbers}) => lineNumbers && picomatch.isMatch(normalizedFilePath, pattern))
.map(({lineNumbers}) => lineNumbers)
)));
.flatMap(({lineNumbers}) => lineNumbers),
));
}
exports.getApplicableLineNumbers = getApplicableLineNumbers;

254
node_modules/ava/lib/load-config.js generated vendored
View file

@ -1,40 +1,17 @@
'use strict';
const fs = require('fs');
const path = require('path');
const url = require('url');
const vm = require('vm');
const {isPlainObject} = require('is-plain-object');
const pkgConf = require('pkg-conf');
import fs from 'node:fs';
import path from 'node:path';
import process from 'node:process';
import url from 'node:url';
import {isPlainObject} from 'is-plain-object';
import {packageConfig, packageJsonPath} from 'pkg-conf';
const NO_SUCH_FILE = Symbol('no ava.config.js file');
const MISSING_DEFAULT_EXPORT = Symbol('missing default export');
const EXPERIMENTS = new Set([
'configurableModuleFormat',
'disableNullExpectations',
'disableSnapshotsInHooks',
'nextGenConfig',
'reverseTeardowns',
'sharedWorkers'
]);
// *Very* rudimentary support for loading ava.config.js files containing an `export default` statement.
const evaluateJsConfig = (contents, configFile) => {
const script = new vm.Script(`'use strict';(()=>{let __export__;\n${contents.toString('utf8').replace(/export default/g, '__export__ =')};return __export__;})()`, {
filename: configFile,
lineOffset: -1
});
return script.runInThisContext();
};
const EXPERIMENTS = new Set();
const importConfig = async ({configFile, fileForErrorMessage}) => {
let module;
try {
module = await import(url.pathToFileURL(configFile)); // eslint-disable-line node/no-unsupported-features/es-syntax
} catch (error) {
throw Object.assign(new Error(`Error loading ${fileForErrorMessage}: ${error.message}`), {parent: error});
}
const {default: config = MISSING_DEFAULT_EXPORT} = module;
const {default: config = MISSING_DEFAULT_EXPORT} = await import(url.pathToFileURL(configFile)); // eslint-disable-line node/no-unsupported-features/es-syntax
if (config === MISSING_DEFAULT_EXPORT) {
throw new Error(`${fileForErrorMessage} must have a default export`);
}
@ -42,79 +19,22 @@ const importConfig = async ({configFile, fileForErrorMessage}) => {
return config;
};
const loadJsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.config.js')}, useImport = false) => {
if (!configFile.endsWith('.js')) {
const loadConfigFile = async ({projectDir, configFile}) => {
if (!fs.existsSync(configFile)) {
return null;
}
const fileForErrorMessage = path.relative(projectDir, configFile);
let config;
try {
const contents = fs.readFileSync(configFile);
config = useImport && contents.includes('nonSemVerExperiments') && contents.includes('nextGenConfig') ?
importConfig({configFile, fileForErrorMessage}) :
evaluateJsConfig(contents, configFile) || MISSING_DEFAULT_EXPORT;
return {config: await importConfig({configFile, fileForErrorMessage}), configFile, fileForErrorMessage};
} catch (error) {
if (error.code === 'ENOENT') {
return null;
}
throw Object.assign(new Error(`Error loading ${fileForErrorMessage}: ${error.message}`), {parent: error});
}
if (config === MISSING_DEFAULT_EXPORT) {
throw new Error(`${fileForErrorMessage} must have a default export, using ES module syntax`);
}
return {config, fileForErrorMessage};
};
const loadCjsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.config.cjs')}) => {
if (!configFile.endsWith('.cjs')) {
return null;
}
const fileForErrorMessage = path.relative(projectDir, configFile);
try {
return {config: require(configFile), fileForErrorMessage};
} catch (error) {
if (error.code === 'MODULE_NOT_FOUND') {
return null;
}
throw Object.assign(new Error(`Error loading ${fileForErrorMessage}`), {parent: error});
}
};
const loadMjsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.config.mjs')}, experimentally = false) => {
if (!configFile.endsWith('.mjs')) {
return null;
}
const fileForErrorMessage = path.relative(projectDir, configFile);
try {
const contents = fs.readFileSync(configFile);
if (experimentally && contents.includes('nonSemVerExperiments') && contents.includes('nextGenConfig')) {
return {config: importConfig({configFile, fileForErrorMessage}), fileForErrorMessage};
}
} catch (error) {
if (error.code === 'ENOENT') {
return null;
}
throw Object.assign(new Error(`Error loading ${fileForErrorMessage}`), {parent: error});
}
throw new Error(`AVA cannot yet load ${fileForErrorMessage} files`);
};
function resolveConfigFile(projectDir, configFile) {
function resolveConfigFile(configFile) {
if (configFile) {
configFile = path.resolve(configFile); // Relative to CWD
if (path.basename(configFile) !== path.relative(projectDir, configFile)) {
throw new Error('Config files must be located next to the package.json file');
}
if (!configFile.endsWith('.js') && !configFile.endsWith('.cjs') && !configFile.endsWith('.mjs')) {
throw new Error('Config files must have .js, .cjs or .mjs extensions');
@ -124,19 +44,59 @@ function resolveConfigFile(projectDir, configFile) {
return configFile;
}
function loadConfigSync({configFile, resolveFrom = process.cwd(), defaults = {}} = {}) {
let packageConf = pkgConf.sync('ava', {cwd: resolveFrom});
const filepath = pkgConf.filepath(packageConf);
const projectDir = filepath === null ? resolveFrom : path.dirname(filepath);
const gitScmFile = process.env.AVA_FAKE_SCM_ROOT || '.git';
configFile = resolveConfigFile(projectDir, configFile);
async function findRepoRoot(fromDir) {
const {root} = path.parse(fromDir);
let dir = fromDir;
while (root !== dir) {
try {
const stat = await fs.promises.stat(path.join(dir, gitScmFile)); // eslint-disable-line no-await-in-loop
if (stat.isFile() || stat.isDirectory()) {
return dir;
}
} catch {}
dir = path.dirname(dir);
}
return root;
}
export async function loadConfig({configFile, resolveFrom = process.cwd(), defaults = {}} = {}) {
let packageConf = await packageConfig('ava', {cwd: resolveFrom});
const filepath = packageJsonPath(packageConf);
const projectDir = filepath === undefined ? resolveFrom : path.dirname(filepath);
const repoRoot = await findRepoRoot(projectDir);
// Conflicts are only allowed when an explicit config file is provided.
const allowConflictWithPackageJson = Boolean(configFile);
configFile = resolveConfigFile(configFile);
let [{config: fileConf, fileForErrorMessage} = {config: NO_SUCH_FILE, fileForErrorMessage: undefined}, ...conflicting] = [
loadJsConfig({projectDir, configFile}),
loadCjsConfig({projectDir, configFile}),
loadMjsConfig({projectDir, configFile})
].filter(result => result !== null);
let fileConf = NO_SUCH_FILE;
let fileForErrorMessage;
let conflicting = [];
if (configFile) {
const loaded = await loadConfigFile({projectDir, configFile});
if (loaded !== null) {
({config: fileConf, fileForErrorMessage} = loaded);
}
} else {
let searchDir = projectDir;
const stopAt = path.dirname(repoRoot);
do {
const results = await Promise.all([ // eslint-disable-line no-await-in-loop
loadConfigFile({projectDir, configFile: path.join(searchDir, 'ava.config.js')}),
loadConfigFile({projectDir, configFile: path.join(searchDir, 'ava.config.cjs')}),
loadConfigFile({projectDir, configFile: path.join(searchDir, 'ava.config.mjs')}),
]);
[{config: fileConf, fileForErrorMessage, configFile} = {config: NO_SUCH_FILE, fileForErrorMessage: undefined}, ...conflicting] = results.filter(result => result !== null);
searchDir = path.dirname(searchDir);
} while (fileConf === NO_SUCH_FILE && searchDir !== stopAt);
}
if (conflicting.length > 0) {
throw new Error(`Conflicting configuration in ${fileForErrorMessage} and ${conflicting.map(({fileForErrorMessage}) => fileForErrorMessage).join(' & ')}`);
@ -149,19 +109,12 @@ function loadConfigSync({configFile, resolveFrom = process.cwd(), defaults = {}}
throw new Error(`Conflicting configuration in ${fileForErrorMessage} and package.json`);
}
if (fileConf && typeof fileConf.then === 'function') { // eslint-disable-line promise/prefer-await-to-then
throw new TypeError(`${fileForErrorMessage} must not export a promise`);
}
if (!isPlainObject(fileConf) && typeof fileConf !== 'function') {
throw new TypeError(`${fileForErrorMessage} must export a plain object or factory function`);
}
if (typeof fileConf === 'function') {
fileConf = fileConf({projectDir});
if (fileConf && typeof fileConf.then === 'function') { // eslint-disable-line promise/prefer-await-to-then
throw new TypeError(`Factory method exported by ${fileForErrorMessage} must not return a promise`);
}
fileConf = await fileConf({projectDir});
if (!isPlainObject(fileConf)) {
throw new TypeError(`Factory method exported by ${fileForErrorMessage} must return a plain object`);
@ -173,7 +126,7 @@ function loadConfigSync({configFile, resolveFrom = process.cwd(), defaults = {}}
}
}
const config = {...defaults, nonSemVerExperiments: {}, ...fileConf, ...packageConf, projectDir};
const config = {...defaults, nonSemVerExperiments: {}, ...fileConf, ...packageConf, projectDir, configFile};
const {nonSemVerExperiments: experiments} = config;
if (!isPlainObject(experiments)) {
@ -188,80 +141,3 @@ function loadConfigSync({configFile, resolveFrom = process.cwd(), defaults = {}}
return config;
}
exports.loadConfigSync = loadConfigSync;
async function loadConfig({configFile, resolveFrom = process.cwd(), defaults = {}} = {}) {
let packageConf = await pkgConf('ava', {cwd: resolveFrom});
const filepath = pkgConf.filepath(packageConf);
const projectDir = filepath === null ? resolveFrom : path.dirname(filepath);
configFile = resolveConfigFile(projectDir, configFile);
const allowConflictWithPackageJson = Boolean(configFile);
// TODO: Refactor resolution logic to implement https://github.com/avajs/ava/issues/2285.
let [{config: fileConf, fileForErrorMessage} = {config: NO_SUCH_FILE, fileForErrorMessage: undefined}, ...conflicting] = [
loadJsConfig({projectDir, configFile}, true),
loadCjsConfig({projectDir, configFile}),
loadMjsConfig({projectDir, configFile}, true)
].filter(result => result !== null);
if (conflicting.length > 0) {
throw new Error(`Conflicting configuration in ${fileForErrorMessage} and ${conflicting.map(({fileForErrorMessage}) => fileForErrorMessage).join(' & ')}`);
}
let sawPromise = false;
if (fileConf !== NO_SUCH_FILE) {
if (allowConflictWithPackageJson) {
packageConf = {};
} else if (Object.keys(packageConf).length > 0) {
throw new Error(`Conflicting configuration in ${fileForErrorMessage} and package.json`);
}
if (fileConf && typeof fileConf.then === 'function') { // eslint-disable-line promise/prefer-await-to-then
sawPromise = true;
fileConf = await fileConf;
}
if (!isPlainObject(fileConf) && typeof fileConf !== 'function') {
throw new TypeError(`${fileForErrorMessage} must export a plain object or factory function`);
}
if (typeof fileConf === 'function') {
fileConf = fileConf({projectDir});
if (fileConf && typeof fileConf.then === 'function') { // eslint-disable-line promise/prefer-await-to-then
sawPromise = true;
fileConf = await fileConf;
}
if (!isPlainObject(fileConf)) {
throw new TypeError(`Factory method exported by ${fileForErrorMessage} must return a plain object`);
}
}
if ('ava' in fileConf) {
throw new Error(`Encountered ava property in ${fileForErrorMessage}; avoid wrapping the configuration`);
}
}
const config = {...defaults, nonSemVerExperiments: {}, ...fileConf, ...packageConf, projectDir};
const {nonSemVerExperiments: experiments} = config;
if (!isPlainObject(experiments)) {
throw new Error(`nonSemVerExperiments from ${fileForErrorMessage} must be an object`);
}
for (const key of Object.keys(experiments)) {
if (!EXPERIMENTS.has(key)) {
throw new Error(`nonSemVerExperiments.${key} from ${fileForErrorMessage} is not a supported experiment`);
}
}
if (sawPromise && experiments.nextGenConfig !== true) {
throw new Error(`${fileForErrorMessage} exported a promise or an asynchronous factory function. You must enable the asyncConfigurationLoading experiment for this to work.`);
}
return config;
}
exports.loadConfig = loadConfig;

10
node_modules/ava/lib/module-types.js generated vendored
View file

@ -54,12 +54,12 @@ const deriveFromArray = (extensions, defaultModuleType) => {
return moduleTypes;
};
module.exports = (configuredExtensions, defaultModuleType, experiments) => {
export default function moduleTypes(configuredExtensions, defaultModuleType) {
if (configuredExtensions === undefined) {
return {
cjs: 'commonjs',
mjs: 'module',
js: defaultModuleType
js: defaultModuleType,
};
}
@ -67,9 +67,5 @@ module.exports = (configuredExtensions, defaultModuleType, experiments) => {
return deriveFromArray(configuredExtensions, defaultModuleType);
}
if (!experiments.configurableModuleFormat) {
throw new Error('You must enable the `configurableModuleFormat` experiment in order to specify module types');
}
return deriveFromObject(configuredExtensions, defaultModuleType);
};
}

View file

@ -1,7 +1,8 @@
'use strict';
const arrgv = require('arrgv');
import process from 'node:process';
function normalizeNodeArguments(fromConf = [], fromArgv = '') {
import arrgv from 'arrgv';
export default function normalizeNodeArguments(fromConf = [], fromArgv = '') {
let parsedArgv = [];
if (fromArgv !== '') {
try {
@ -13,5 +14,3 @@ function normalizeNodeArguments(fromConf = [], fromArgv = '') {
return [...process.execArgv, ...fromConf, ...parsedArgv];
}
module.exports = normalizeNodeArguments;

View file

@ -1,15 +1,26 @@
'use strict';
function parseTestArgs(args) {
const rawTitle = typeof args[0] === 'string' ? args.shift() : undefined;
const receivedImplementationArray = Array.isArray(args[0]);
const implementations = receivedImplementationArray ? args.shift() : args.splice(0, 1);
const buildTitle = (raw, implementation, args) => {
let value = implementation && implementation.title ? implementation.title(raw, ...args) : raw;
const isValid = typeof value === 'string';
if (isValid) {
value = value.trim().replace(/\s+/g, ' ');
}
const buildTitle = implementation => {
const title = implementation.title ? implementation.title(rawTitle, ...args) : rawTitle;
return {title, isSet: typeof title !== 'undefined', isValid: typeof title === 'string', isEmpty: !title};
return {
raw,
value,
isSet: value !== undefined,
isValid,
isEmpty: !isValid || value === '',
};
};
return {args, buildTitle, implementations, rawTitle, receivedImplementationArray};
export default function parseTestArgs(args) {
const rawTitle = typeof args[0] === 'string' ? args.shift() : undefined;
const implementation = args.shift();
return {
args,
implementation: implementation && implementation.exec ? implementation.exec : implementation,
title: buildTitle(rawTitle, implementation, args),
};
}
module.exports = parseTestArgs;

2
node_modules/ava/lib/pkg.cjs generated vendored Normal file
View file

@ -0,0 +1,2 @@
'use strict';
module.exports = require('../package.json');

View file

@ -1,20 +1,30 @@
const {EventEmitter, on} = require('events');
const v8 = require('v8');
const {workerData, parentPort} = require('worker_threads');
const pkg = require('../../package.json');
import {EventEmitter, on} from 'node:events';
import process from 'node:process';
import {workerData, parentPort, threadId} from 'node:worker_threads';
// Used to forward messages received over the `parentPort`. Every subscription
// adds a listener, so do not enforce any maximums.
import pkg from '../pkg.cjs';
// Used to forward messages received over the `parentPort` and any direct ports
// to test workers. Every subscription adds a listener, so do not enforce any
// maximums.
const events = new EventEmitter().setMaxListeners(0);
const emitMessage = message => {
// Wait for a turn of the event loop, to allow new subscriptions to be
// set up in response to the previous message.
setImmediate(() => events.emit('message', message));
};
// Map of active test workers, used in receiveMessages() to get a reference to
// the TestWorker instance, and relevant release functions.
const activeTestWorkers = new Map();
const internalMessagePort = Symbol('Internal MessagePort');
class TestWorker {
constructor(id, file) {
constructor(id, file, port) {
this.id = id;
this.file = file;
this[internalMessagePort] = port;
}
teardown(fn) {
@ -47,10 +57,10 @@ class TestWorker {
}
class ReceivedMessage {
constructor(testWorker, id, serializedData) {
constructor(testWorker, id, data) {
this.testWorker = testWorker;
this.id = id;
this.data = v8.deserialize(new Uint8Array(serializedData));
this.data = data;
}
reply(data) {
@ -98,7 +108,7 @@ async function * receiveMessages(fromTestWorker, replyTo) {
let received = messageCache.get(message);
if (received === undefined) {
received = new ReceivedMessage(active.instance, message.messageId, message.serializedData);
received = new ReceivedMessage(active.instance, message.messageId, message.data);
messageCache.set(message, received);
}
@ -107,59 +117,47 @@ async function * receiveMessages(fromTestWorker, replyTo) {
}
let messageCounter = 0;
const messageIdPrefix = `${workerData.id}/message`;
const messageIdPrefix = `${threadId}/message`;
const nextMessageId = () => `${messageIdPrefix}/${++messageCounter}`;
function publishMessage(testWorker, data, replyTo) {
const id = nextMessageId();
parentPort.postMessage({
testWorker[internalMessagePort].postMessage({
type: 'message',
messageId: id,
testWorkerId: testWorker.id,
serializedData: [...v8.serialize(data)],
replyTo
data,
replyTo,
});
return {
id,
async * replies() {
yield * receiveMessages(testWorker, id);
}
},
};
}
function broadcastMessage(data) {
const id = nextMessageId();
parentPort.postMessage({
type: 'broadcast',
messageId: id,
serializedData: [...v8.serialize(data)]
});
for (const trackedWorker of activeTestWorkers.values()) {
trackedWorker.instance[internalMessagePort].postMessage({
type: 'message',
messageId: id,
data,
});
}
return {
id,
async * replies() {
yield * receiveMessages(undefined, id);
}
},
};
}
async function loadFactory() {
try {
const mod = require(workerData.filename);
if (typeof mod === 'function') {
return mod;
}
return mod.default;
} catch (error) {
if (error && (error.code === 'ERR_REQUIRE_ESM' || (error.code === 'MODULE_NOT_FOUND' && workerData.filename.startsWith('file://')))) {
const {default: factory} = await import(workerData.filename); // eslint-disable-line node/no-unsupported-features/es-syntax
return factory;
}
throw error;
}
const {default: factory} = await import(workerData.filename); // eslint-disable-line node/no-unsupported-features/es-syntax
return factory;
}
let signalAvailable = () => {
@ -175,7 +173,7 @@ loadFactory(workerData.filename).then(factory => {
factory({
negotiateProtocol(supported) {
if (!supported.includes('experimental')) {
if (!supported.includes('ava-4')) {
fatal = new Error(`This version of AVA (${pkg.version}) is not compatible with shared worker plugin at ${workerData.filename}`);
throw fatal;
}
@ -184,12 +182,13 @@ loadFactory(workerData.filename).then(factory => {
parentPort.on('message', async message => {
if (message.type === 'register-test-worker') {
const {id, file} = message;
const instance = new TestWorker(id, file);
const {id, file, port} = message;
const instance = new TestWorker(id, file, port);
activeTestWorkers.set(id, {instance, teardownFns: new Set()});
produceTestWorker(instance);
port.on('message', message => emitMessage({testWorkerId: id, ...message}));
}
if (message.type === 'deregister-test-worker') {
@ -205,18 +204,16 @@ loadFactory(workerData.filename).then(factory => {
parentPort.postMessage({
type: 'deregistered-test-worker',
id
id,
});
}
// Wait for a turn of the event loop, to allow new subscriptions to be
// set up in response to the previous message.
setImmediate(() => events.emit('message', message));
emitMessage(message);
}
});
return {
initialData: workerData.initialData,
protocol: 'experimental',
protocol: 'ava-4',
ready() {
signalAvailable();
@ -235,9 +232,9 @@ loadFactory(workerData.filename).then(factory => {
for await (const [worker] of on(events, 'testWorker')) {
yield worker;
}
}
},
};
}
},
});
}).catch(error => {
if (fatal === undefined) {

View file

@ -1,14 +1,11 @@
const events = require('events');
const serializeError = require('../serialize-error');
import events from 'node:events';
import {pathToFileURL} from 'node:url';
import {Worker} from 'node:worker_threads';
let Worker;
try {
({Worker} = require('worker_threads'));
} catch {}
import serializeError from '../serialize-error.js';
const LOADER = require.resolve('./shared-worker-loader');
const LOADER = new URL('shared-worker-loader.js', import.meta.url);
let sharedWorkerCounter = 0;
const launchedWorkers = new Map();
const waitForAvailable = async worker => {
@ -19,30 +16,28 @@ const waitForAvailable = async worker => {
}
};
function launchWorker({filename, initialData}) {
function launchWorker(filename, initialData) {
if (launchedWorkers.has(filename)) {
return launchedWorkers.get(filename);
}
const id = `shared-worker/${++sharedWorkerCounter}`;
const worker = new Worker(LOADER, {
// Ensure the worker crashes for unhandled rejections, rather than allowing undefined behavior.
execArgv: ['--unhandled-rejections=strict'],
workerData: {
filename,
id,
initialData
}
initialData,
},
});
worker.setMaxListeners(0);
const launched = {
statePromises: {
available: waitForAvailable(worker),
error: events.once(worker, 'error').then(([error]) => error) // eslint-disable-line promise/prefer-await-to-then
error: events.once(worker, 'error').then(([error]) => error),
},
exited: false,
worker
worker,
};
launchedWorkers.set(filename, launched);
@ -53,7 +48,7 @@ function launchWorker({filename, initialData}) {
return launched;
}
async function observeWorkerProcess(fork, runStatus) {
export async function observeWorkerProcess(fork, runStatus) {
let registrationCount = 0;
let signalDeregistered;
const deregistered = new Promise(resolve => {
@ -66,26 +61,11 @@ async function observeWorkerProcess(fork, runStatus) {
}
});
fork.onConnectSharedWorker(async channel => {
const launched = launchWorker(channel);
const handleChannelMessage = ({messageId, replyTo, serializedData}) => {
launched.worker.postMessage({
type: 'message',
testWorkerId: fork.forkId,
messageId,
replyTo,
serializedData
});
};
fork.onConnectSharedWorker(async ({filename, initialData, port, signalError}) => {
const launched = launchWorker(filename, initialData);
const handleWorkerMessage = async message => {
if (message.type === 'broadcast' || (message.type === 'message' && message.testWorkerId === fork.forkId)) {
const {messageId, replyTo, serializedData} = message;
channel.forwardMessageToFork({messageId, replyTo, serializedData});
}
if (message.type === 'deregistered-test-worker' && message.id === fork.forkId) {
if (message.type === 'deregistered-test-worker' && message.id === fork.threadId) {
launched.worker.off('message', handleWorkerMessage);
registrationCount--;
@ -95,35 +75,35 @@ async function observeWorkerProcess(fork, runStatus) {
}
};
launched.statePromises.error.then(error => { // eslint-disable-line promise/prefer-await-to-then
launched.statePromises.error.then(error => {
signalDeregistered();
launched.worker.off('message', handleWorkerMessage);
runStatus.emitStateChange({type: 'shared-worker-error', err: serializeError('Shared worker error', true, error)});
channel.signalError();
signalError();
});
try {
await launched.statePromises.available;
registrationCount++;
port.postMessage({type: 'ready'});
launched.worker.postMessage({
type: 'register-test-worker',
id: fork.forkId,
file: fork.file
});
id: fork.threadId,
file: pathToFileURL(fork.file).toString(),
port,
}, [port]);
fork.promise.finally(() => {
launched.worker.postMessage({
type: 'deregister-test-worker',
id: fork.forkId
id: fork.threadId,
});
channel.off('message', handleChannelMessage);
});
launched.worker.on('message', handleWorkerMessage);
channel.on('message', handleChannelMessage);
channel.signalReady();
} catch {
return;
} finally {
@ -136,5 +116,3 @@ async function observeWorkerProcess(fork, runStatus) {
return deregistered;
}
exports.observeWorkerProcess = observeWorkerProcess;

View file

@ -1,21 +1,21 @@
const pkg = require('../package.json');
const globs = require('./globs');
import * as globs from './globs.js';
import pkg from './pkg.cjs';
const levels = {
ava3: 1,
pathRewrites: 2
// As the protocol changes, comparing levels by integer allows AVA to be
// compatible with different versions. Currently there is only one supported
// version, so this is effectively unused. The infrastructure is retained for
// future use.
levelIntegersAreCurrentlyUnused: 0,
};
exports.levels = levels;
const levelsByProtocol = {
'ava-3': levels.ava3,
'ava-3.2': levels.pathRewrites
'ava-3.2': levels.levelIntegersAreCurrentlyUnused,
};
function load(providerModule, projectDir) {
async function load(providerModule, projectDir) {
const ava = {version: pkg.version};
const makeProvider = require(providerModule);
const {default: makeProvider} = await import(providerModule); // eslint-disable-line node/no-unsupported-features/es-syntax
let fatal;
let level;
@ -37,9 +37,9 @@ function load(providerModule, projectDir) {
},
identifier,
normalizeGlobPatterns: globs.normalizePatterns,
projectDir
projectDir,
};
}
},
});
if (fatal) {
@ -49,5 +49,11 @@ function load(providerModule, projectDir) {
return {...provider, level};
}
exports.babel = projectDir => load('@ava/babel', projectDir);
exports.typescript = projectDir => load('@ava/typescript', projectDir);
const providerManager = {
levels,
async typescript(projectDir) {
return load('@ava/typescript', projectDir);
},
};
export default providerManager;

View file

@ -1,16 +1,10 @@
'use strict';
const StackUtils = require('stack-utils');
import StackUtils from 'stack-utils';
const stackUtils = new StackUtils({
ignoredPackages: [
'@ava/babel',
'@ava/require-precompiled',
'@ava/typescript',
'append-transform',
'ava',
'empower-core',
'esm',
'nyc'
'nyc',
],
internals: [
// AVA internals, which ignoredPackages don't ignore when we run our own unit tests.
@ -20,8 +14,8 @@ const stackUtils = new StackUtils({
/\(internal\/process\/task_queues\.js:\d+:\d+\)$/,
/\(internal\/modules\/cjs\/.+?\.js:\d+:\d+\)$/,
/async Promise\.all \(index/,
/new Promise \(<anonymous>\)/
]
/new Promise \(<anonymous>\)/,
],
});
/*
@ -60,7 +54,7 @@ const stackUtils = new StackUtils({
* Module.runMain (module.js:604:10)
* ```
*/
module.exports = stack => {
export default function beautifyStack(stack) {
if (!stack) {
return [];
}
@ -70,4 +64,4 @@ module.exports = stack => {
.split('\n')
.map(line => line.trim())
.filter(line => line !== '');
};
}

View file

@ -1,17 +1,42 @@
'use strict';
const chalk = require('../chalk').get();
import {chalk} from '../chalk.js';
module.exports = {
log: chalk.gray,
title: chalk.bold,
error: chalk.red,
skip: chalk.yellow,
todo: chalk.blue,
pass: chalk.green,
duration: chalk.gray.dim,
errorSource: chalk.gray,
errorStack: chalk.gray,
errorStackInternal: chalk.gray.dim,
stack: chalk.red,
information: chalk.magenta
const colors = {
get log() {
return chalk.gray;
},
get title() {
return chalk.bold;
},
get error() {
return chalk.red;
},
get skip() {
return chalk.yellow;
},
get todo() {
return chalk.blue;
},
get pass() {
return chalk.green;
},
get duration() {
return chalk.gray.dim;
},
get errorSource() {
return chalk.gray;
},
get errorStack() {
return chalk.gray;
},
get errorStackInternal() {
return chalk.gray.dim;
},
get stack() {
return chalk.red;
},
get information() {
return chalk.magenta;
},
};
export default colors;

View file

@ -1,25 +1,24 @@
'use strict';
const os = require('os');
const path = require('path');
const stream = require('stream');
import os from 'node:os';
import path from 'node:path';
import stream from 'node:stream';
import {fileURLToPath} from 'node:url';
const cliCursor = require('cli-cursor');
const figures = require('figures');
const indentString = require('indent-string');
const ora = require('ora');
const plur = require('plur');
const prettyMs = require('pretty-ms');
const trimOffNewlines = require('trim-off-newlines');
import figures from 'figures';
import indentString from 'indent-string';
import plur from 'plur';
import prettyMs from 'pretty-ms';
import StackUtils from 'stack-utils';
const chalk = require('../chalk').get();
const codeExcerpt = require('../code-excerpt');
const beautifyStack = require('./beautify-stack');
const colors = require('./colors');
const formatSerializedError = require('./format-serialized-error');
const improperUsageMessages = require('./improper-usage-messages');
const prefixTitle = require('./prefix-title');
import {chalk} from '../chalk.js';
import codeExcerpt from '../code-excerpt.js';
const nodeInternals = require('stack-utils').nodeInternals();
import beautifyStack from './beautify-stack.js';
import colors from './colors.js';
import formatSerializedError from './format-serialized-error.js';
import improperUsageMessage from './improper-usage-messages.js';
import prefixTitle from './prefix-title.js';
const nodeInternals = StackUtils.nodeInternals();
class LineWriter extends stream.Writable {
constructor(dest) {
@ -52,129 +51,48 @@ class LineWriter extends stream.Writable {
}
}
class LineWriterWithSpinner extends LineWriter {
constructor(dest, spinner) {
super(dest);
this.lastSpinnerText = '';
this.spinner = spinner;
}
_write(chunk, _, callback) {
this.spinner.clear();
this._writeWithSpinner(chunk.toString('utf8'));
callback();
}
_writev(pieces, callback) {
// Discard the current spinner output. Any lines that were meant to be
// preserved should be rewritten.
this.spinner.clear();
const last = pieces.pop();
for (const piece of pieces) {
this.dest.write(piece.chunk);
}
this._writeWithSpinner(last.chunk.toString('utf8'));
callback();
}
_writeWithSpinner(string) {
if (!this.spinner.isSpinning) {
this.dest.write(string);
return;
}
this.lastSpinnerText = string;
// Ignore whitespace at the end of the chunk. We're continiously rewriting
// the last line through the spinner. Also be careful to remove the indent
// as the spinner adds its own.
this.spinner.text = string.trimEnd().slice(2);
this.spinner.render();
}
}
function manageCorking(stream) {
let corked = false;
const cork = () => {
corked = true;
stream.cork();
};
const uncork = () => {
corked = false;
stream.uncork();
};
return {
decorateFlushingWriter(fn) {
decorateWriter(fn) {
return function (...args) {
if (corked) {
stream.uncork();
}
stream.cork();
try {
return fn.apply(this, args);
} finally {
if (corked) {
stream.cork();
}
stream.uncork();
}
};
},
decorateWriter(fn) {
return function (...args) {
cork();
try {
return fn.apply(this, args);
} finally {
uncork();
}
};
}
};
}
class Reporter {
export default class Reporter {
constructor({
verbose,
extensions,
reportStream,
stdStream,
projectDir,
watching,
spinner,
durationThreshold
durationThreshold,
}) {
this.verbose = verbose;
this.extensions = extensions;
this.reportStream = reportStream;
this.stdStream = stdStream;
this.watching = watching;
this.relativeFile = file => path.relative(projectDir, file);
this.relativeFile = file => {
if (file.startsWith('file://')) {
file = fileURLToPath(file);
}
const {decorateWriter, decorateFlushingWriter} = manageCorking(this.reportStream);
return path.relative(projectDir, file);
};
const {decorateWriter} = manageCorking(this.reportStream);
this.consumeStateChange = decorateWriter(this.consumeStateChange);
this.endRun = decorateWriter(this.endRun);
if (this.verbose) {
this.durationThreshold = durationThreshold || 100;
this.spinner = null;
this.clearSpinner = () => {};
this.lineWriter = new LineWriter(this.reportStream);
} else {
this.spinner = ora({
isEnabled: true,
color: spinner ? spinner.color : 'gray',
discardStdin: !watching,
hideCursor: false,
spinner: spinner || (process.platform === 'win32' ? 'line' : 'dots'),
stream: reportStream
});
this.clearSpinner = decorateFlushingWriter(this.spinner.clear.bind(this.spinner));
this.lineWriter = new LineWriterWithSpinner(this.reportStream, this.spinner);
}
this.durationThreshold = durationThreshold || 100;
this.lineWriter = new LineWriter(this.reportStream);
this.reset();
}
@ -198,7 +116,6 @@ class Reporter {
this.sharedWorkerErrors = [];
this.uncaughtExceptions = [];
this.unhandledRejections = [];
this.unsavedSnapshots = [];
this.previousFailures = 0;
@ -221,9 +138,10 @@ class Reporter {
this.matching = plan.matching;
this.previousFailures = plan.previousFailures;
this.emptyParallelRun = plan.status.emptyParallelRun;
this.selectionInsights = plan.status.selectionInsights;
if (this.watching || plan.files.length > 1) {
this.prefixTitle = (testFile, title) => prefixTitle(plan.filePathPrefix, testFile, title);
this.prefixTitle = (testFile, title) => prefixTitle(this.extensions, plan.filePathPrefix, testFile, title);
}
this.removePreviousListener = plan.status.on('stateChange', evt => {
@ -234,13 +152,7 @@ class Reporter {
this.lineWriter.write(chalk.gray.dim('\u2500'.repeat(this.lineWriter.columns)) + os.EOL);
}
if (this.spinner === null) {
this.lineWriter.writeLine();
} else {
cliCursor.hide(this.reportStream);
this.lineWriter.writeLine();
this.spinner.start();
}
this.lineWriter.writeLine();
}
consumeStateChange(event) { // eslint-disable-line complexity
@ -296,12 +208,10 @@ class Reporter {
this.write(colors.error(`${figures.cross} Internal error`));
}
if (this.verbose) {
this.lineWriter.writeLine(colors.stack(event.err.summary));
this.lineWriter.writeLine(colors.errorStack(event.err.stack));
this.lineWriter.writeLine();
this.lineWriter.writeLine();
}
this.lineWriter.writeLine(colors.stack(event.err.summary));
this.lineWriter.writeLine(colors.errorStack(event.err.stack));
this.lineWriter.writeLine();
this.lineWriter.writeLine();
break;
}
@ -321,7 +231,7 @@ class Reporter {
}
case 'hook-finished': {
if (this.verbose && event.logs.length > 0) {
if (true && event.logs.length > 0) {
this.lineWriter.writeLine(` ${this.prefixTitle(event.testFile, event.title)}`);
this.writeLogs(event);
}
@ -330,12 +240,10 @@ class Reporter {
}
case 'selected-test': {
if (this.verbose) {
if (event.skip) {
this.lineWriter.writeLine(colors.skip(`- ${this.prefixTitle(event.testFile, event.title)}`));
} else if (event.todo) {
this.lineWriter.writeLine(colors.todo(`- ${this.prefixTitle(event.testFile, event.title)}`));
}
if (event.skip) {
this.lineWriter.writeLine(colors.skip(`- ${this.prefixTitle(event.testFile, event.title)}`));
} else if (event.todo) {
this.lineWriter.writeLine(colors.todo(`- ${this.prefixTitle(event.testFile, event.title)}`));
}
break;
@ -344,29 +252,21 @@ class Reporter {
case 'shared-worker-error': {
this.sharedWorkerErrors.push(event);
if (this.verbose) {
this.lineWriter.ensureEmptyLine();
this.lineWriter.writeLine(colors.error(`${figures.cross} Error in shared worker`));
this.lineWriter.writeLine();
this.writeErr(event);
}
this.lineWriter.ensureEmptyLine();
this.lineWriter.writeLine(colors.error(`${figures.cross} Error in shared worker`));
this.lineWriter.writeLine();
this.writeErr(event);
break;
}
case 'snapshot-error':
this.unsavedSnapshots.push(event);
break;
case 'uncaught-exception': {
this.uncaughtExceptions.push(event);
if (this.verbose) {
this.lineWriter.ensureEmptyLine();
this.lineWriter.writeLine(colors.title(`Uncaught exception in ${this.relativeFile(event.testFile)}`));
this.lineWriter.writeLine();
this.writeErr(event);
}
this.lineWriter.ensureEmptyLine();
this.lineWriter.writeLine(colors.title(`Uncaught exception in ${this.relativeFile(event.testFile)}`));
this.lineWriter.writeLine();
this.writeErr(event);
break;
}
@ -374,12 +274,10 @@ class Reporter {
case 'unhandled-rejection': {
this.unhandledRejections.push(event);
if (this.verbose) {
this.lineWriter.ensureEmptyLine();
this.lineWriter.writeLine(colors.title(`Unhandled rejection in ${this.relativeFile(event.testFile)}`));
this.lineWriter.writeLine();
this.writeErr(event);
}
this.lineWriter.ensureEmptyLine();
this.lineWriter.writeLine(colors.title(`Unhandled rejection in ${this.relativeFile(event.testFile)}`));
this.lineWriter.writeLine();
this.writeErr(event);
break;
}
@ -389,8 +287,12 @@ class Reporter {
this.filesWithoutDeclaredTests.add(event.testFile);
}
if (this.verbose && !this.filesWithMissingAvaImports.has(event.testFile)) {
if (event.nonZeroExitCode) {
if (!this.filesWithMissingAvaImports.has(event.testFile)) {
if (event.err) {
this.lineWriter.writeLine(colors.error(`${figures.cross} ${this.relativeFile(event.testFile)} exited due to an error:`));
this.lineWriter.writeLine();
this.writeErr(event);
} else if (event.nonZeroExitCode) {
this.lineWriter.writeLine(colors.error(`${figures.cross} ${this.relativeFile(event.testFile)} exited with a non-zero exit code: ${event.nonZeroExitCode}`));
} else {
this.lineWriter.writeLine(colors.error(`${figures.cross} ${this.relativeFile(event.testFile)} exited due to ${event.signal}`));
@ -410,7 +312,7 @@ class Reporter {
this.filesWithoutMatchedLineNumbers.add(event.testFile);
this.lineWriter.writeLine(colors.error(`${figures.cross} Line numbers for ${this.relativeFile(event.testFile)} did not match any tests`));
} else if (this.verbose && !this.failFastEnabled && fileStats.remainingTests > 0) {
} else if (true && !this.failFastEnabled && fileStats.remainingTests > 0) {
this.lineWriter.writeLine(colors.error(`${figures.cross} ${fileStats.remainingTests} ${plur('test', fileStats.remainingTests)} remaining in ${this.relativeFile(event.testFile)}`));
}
}
@ -419,9 +321,6 @@ class Reporter {
}
case 'worker-stderr': {
// Forcibly clear the spinner, writing the chunk corrupts the TTY.
this.clearSpinner();
this.stdStream.write(event.chunk);
// If the chunk does not end with a linebreak, *forcibly* write one to
// ensure it remains visible in the TTY.
@ -433,17 +332,10 @@ class Reporter {
this.reportStream.write(os.EOL);
}
if (this.spinner !== null) {
this.lineWriter.write(this.lineWriter.lastSpinnerText);
}
break;
}
case 'worker-stdout': {
// Forcibly clear the spinner, writing the chunk corrupts the TTY.
this.clearSpinner();
this.stdStream.write(event.chunk);
// If the chunk does not end with a linebreak, *forcibly* write one to
// ensure it remains visible in the TTY.
@ -454,10 +346,6 @@ class Reporter {
if (event.chunk[event.chunk.length - 1] !== 0x0A) {
this.reportStream.write(os.EOL);
}
if (this.spinner !== null) {
this.lineWriter.write(this.lineWriter.lastSpinnerText);
}
}
}
}
@ -478,11 +366,7 @@ class Reporter {
}
write(string) {
if (this.verbose) {
this.lineWriter.writeLine(string);
} else {
this.writeWithCounts(string);
}
this.lineWriter.writeLine(string);
}
writeWithCounts(string) {
@ -529,7 +413,7 @@ class Reporter {
writeErr(event) {
if (event.err.name === 'TSError' && event.err.object && event.err.object.diagnosticText) {
this.lineWriter.writeLine(colors.errorStack(trimOffNewlines(event.err.object.diagnosticText)));
this.lineWriter.writeLine(colors.errorStack(event.err.object.diagnosticText));
this.lineWriter.writeLine();
return;
}
@ -556,13 +440,13 @@ class Reporter {
this.lineWriter.writeLine();
}
const message = improperUsageMessages.forError(event.err);
const message = improperUsageMessage(event.err);
if (message) {
this.lineWriter.writeLine(message);
this.lineWriter.writeLine();
}
} else if (event.err.nonErrorObject) {
this.lineWriter.writeLine(trimOffNewlines(event.err.formatted));
this.lineWriter.writeLine(event.err.formatted);
this.lineWriter.writeLine();
} else {
this.lineWriter.writeLine(event.err.summary);
@ -618,27 +502,15 @@ class Reporter {
writeTestSummary(event) {
if (event.type === 'hook-failed' || event.type === 'test-failed') {
if (this.verbose) {
this.write(`${colors.error(figures.cross)} ${this.prefixTitle(event.testFile, event.title)} ${colors.error(event.err.message)}`);
} else {
this.write(this.prefixTitle(event.testFile, event.title));
}
this.write(`${colors.error(figures.cross)} ${this.prefixTitle(event.testFile, event.title)} ${colors.error(event.err.message)}`);
} else if (event.knownFailing) {
if (this.verbose) {
this.write(`${colors.error(figures.tick)} ${colors.error(this.prefixTitle(event.testFile, event.title))}`);
} else {
this.write(colors.error(this.prefixTitle(event.testFile, event.title)));
}
} else if (this.verbose) {
this.write(`${colors.error(figures.tick)} ${colors.error(this.prefixTitle(event.testFile, event.title))}`);
} else {
const duration = event.duration > this.durationThreshold ? colors.duration(' (' + prettyMs(event.duration) + ')') : '';
this.write(`${colors.pass(figures.tick)} ${this.prefixTitle(event.testFile, event.title)}${duration}`);
} else {
this.write(this.prefixTitle(event.testFile, event.title));
}
if (this.verbose) {
this.writeLogs(event);
}
this.writeLogs(event);
}
writeFailure(event) {
@ -652,19 +524,38 @@ class Reporter {
endRun() {// eslint-disable-line complexity
let firstLinePostfix = this.watching ? ` ${chalk.gray.dim(`[${new Date().toLocaleTimeString('en-US', {hour12: false})}]`)}` : '';
let wroteSomething = false;
if (!this.verbose) {
this.spinner.stop();
cliCursor.show(this.reportStream);
} else if (this.emptyParallelRun) {
if (this.emptyParallelRun) {
this.lineWriter.writeLine('No files tested in this parallel run');
this.lineWriter.writeLine();
return;
}
if (!this.stats) {
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldnt find any files to test` + firstLinePostfix));
if (this.selectionInsights.ignoredFilterPatternFiles.length > 0) {
this.write(colors.information(`${figures.warning} Paths for additional test files were disregarded:`));
this.lineWriter.writeLine();
for (const pattern of this.selectionInsights.ignoredFilterPatternFiles) {
this.lineWriter.writeLine(chalk.magenta(`* ${pattern}`));
}
this.lineWriter.writeLine();
this.write(colors.information('Files starting with underscores are never treated as test files.'));
this.write(colors.information('Files handled by @ava/typescript can only be selected if your configuration already selects them.'));
this.lineWriter.writeLine();
}
if (this.selectionInsights.selectionCount === 0) {
if (this.selectionInsights.testFileCount === 0) {
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldnt find any files to test` + firstLinePostfix));
} else {
const {testFileCount: count} = this.selectionInsights;
this.lineWriter.writeLine(colors.error(`${figures.cross} Based on your configuration, ${count} test ${plur('file was', 'files were', count)} found, but did not match the CLI arguments:` + firstLinePostfix));
this.lineWriter.writeLine();
for (const {pattern} of this.selectionInsights.filter) {
this.lineWriter.writeLine(colors.error(`* ${pattern}`));
}
}
this.lineWriter.writeLine();
return;
}
@ -675,53 +566,8 @@ class Reporter {
return;
}
if (this.verbose) {
this.lineWriter.writeLine(colors.log(figures.line));
this.lineWriter.writeLine();
} else {
if (this.filesWithMissingAvaImports.size > 0) {
for (const testFile of this.filesWithMissingAvaImports) {
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(testFile)}, make sure to import "ava" at the top of your test file`) + firstLinePostfix);
firstLinePostfix = '';
wroteSomething = true;
}
}
if (this.filesWithoutDeclaredTests.size > 0) {
for (const testFile of this.filesWithoutDeclaredTests) {
if (!this.filesWithMissingAvaImports.has(testFile)) {
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(testFile)}`) + firstLinePostfix);
firstLinePostfix = '';
wroteSomething = true;
}
}
}
if (this.lineNumberErrors.length > 0) {
for (const event of this.lineNumberErrors) {
this.lineWriter.writeLine(colors.information(`${figures.warning} Could not parse ${this.relativeFile(event.testFile)} for line number selection` + firstLinePostfix));
firstLinePostfix = '';
wroteSomething = true;
}
}
if (this.filesWithoutMatchedLineNumbers.size > 0) {
for (const testFile of this.filesWithoutMatchedLineNumbers) {
if (!this.filesWithMissingAvaImports.has(testFile) && !this.filesWithoutDeclaredTests.has(testFile)) {
this.lineWriter.writeLine(colors.error(`${figures.cross} Line numbers for ${this.relativeFile(testFile)} did not match any tests`) + firstLinePostfix);
firstLinePostfix = '';
wroteSomething = true;
}
}
}
if (wroteSomething) {
this.lineWriter.writeLine();
this.lineWriter.writeLine(colors.log(figures.line));
this.lineWriter.writeLine();
wroteSomething = false;
}
}
this.lineWriter.writeLine(colors.log(figures.line));
this.lineWriter.writeLine();
if (this.failures.length > 0) {
const writeTrailingLines = this.internalErrors.length > 0 || this.sharedWorkerErrors.length > 0 || this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
@ -732,106 +578,13 @@ class Reporter {
if (event !== lastFailure) {
this.lineWriter.writeLine();
this.lineWriter.writeLine();
} else if (!this.verbose && writeTrailingLines) {
} else if (!true && writeTrailingLines) {
this.lineWriter.writeLine();
this.lineWriter.writeLine();
}
wroteSomething = true;
}
if (this.verbose) {
this.lineWriter.writeLine(colors.log(figures.line));
this.lineWriter.writeLine();
}
}
if (!this.verbose) {
if (this.internalErrors.length > 0) {
const writeTrailingLines = this.sharedWorkerErrors.length > 0 || this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
const last = this.internalErrors[this.internalErrors.length - 1];
for (const event of this.internalErrors) {
if (event.testFile) {
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error when running ${this.relativeFile(event.testFile)}`));
} else {
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error`));
}
this.lineWriter.writeLine(colors.stack(event.err.summary));
this.lineWriter.writeLine(colors.errorStack(event.err.stack));
if (event !== last || writeTrailingLines) {
this.lineWriter.writeLine();
this.lineWriter.writeLine();
this.lineWriter.writeLine();
}
wroteSomething = true;
}
}
if (this.sharedWorkerErrors.length > 0) {
const writeTrailingLines = this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
const last = this.sharedWorkerErrors[this.sharedWorkerErrors.length - 1];
for (const evt of this.sharedWorkerErrors) {
this.lineWriter.writeLine(colors.error(`${figures.cross} Error in shared worker`));
this.lineWriter.writeLine();
this.writeErr(evt.err);
if (evt !== last || writeTrailingLines) {
this.lineWriter.writeLine();
this.lineWriter.writeLine();
}
wroteSomething = true;
}
}
if (this.uncaughtExceptions.length > 0) {
const writeTrailingLines = this.unhandledRejections.length > 0;
const last = this.uncaughtExceptions[this.uncaughtExceptions.length - 1];
for (const event of this.uncaughtExceptions) {
this.lineWriter.writeLine(colors.title(`Uncaught exception in ${this.relativeFile(event.testFile)}`));
this.lineWriter.writeLine();
this.writeErr(event);
if (event !== last || writeTrailingLines) {
this.lineWriter.writeLine();
this.lineWriter.writeLine();
}
wroteSomething = true;
}
}
if (this.unhandledRejections.length > 0) {
const last = this.unhandledRejections[this.unhandledRejections.length - 1];
for (const event of this.unhandledRejections) {
this.lineWriter.writeLine(colors.title(`Unhandled rejection in ${this.relativeFile(event.testFile)}`));
this.lineWriter.writeLine();
this.writeErr(event);
if (event !== last) {
this.lineWriter.writeLine();
this.lineWriter.writeLine();
}
wroteSomething = true;
}
}
if (wroteSomething) {
this.lineWriter.writeLine(colors.log(figures.line));
this.lineWriter.writeLine();
}
}
if (this.unsavedSnapshots.length > 0) {
this.lineWriter.writeLine(colors.title('Could not update snapshots for the following test files:'));
this.lineWriter.writeLine();
for (const event of this.unsavedSnapshots) {
this.lineWriter.writeLine(`${figures.warning} ${this.relativeFile(event.testFile)}`);
}
this.lineWriter.writeLine(colors.log(figures.line));
this.lineWriter.writeLine();
}
@ -853,16 +606,14 @@ class Reporter {
}
this.lineWriter.writeLine(colors.information(`\`--fail-fast\` is on. ${remaining}.`));
if (this.verbose) {
this.lineWriter.writeLine();
}
this.lineWriter.writeLine();
}
if (this.verbose && this.stats.parallelRuns) {
if (this.stats.parallelRuns) {
const {
currentFileCount,
currentIndex,
totalRuns
totalRuns,
} = this.stats.parallelRuns;
this.lineWriter.writeLine(colors.information(`Ran ${currentFileCount} test ${plur('file', currentFileCount)} out of ${this.stats.files} for job ${currentIndex + 1} of ${totalRuns}`));
this.lineWriter.writeLine();
@ -879,11 +630,11 @@ class Reporter {
}
if (
this.stats.failedHooks === 0 &&
this.stats.failedTests === 0 &&
this.stats.passedTests > 0
this.stats.failedHooks === 0
&& this.stats.failedTests === 0
&& this.stats.passedTests > 0
) {
this.lineWriter.writeLine(colors.pass(`${this.stats.passedTests} ${plur('test', this.stats.passedTests)} passed`) + firstLinePostfix
this.lineWriter.writeLine(colors.pass(`${this.stats.passedTests} ${plur('test', this.stats.passedTests)} passed`) + firstLinePostfix,
);
firstLinePostfix = '';
}
@ -917,4 +668,3 @@ class Reporter {
}
}
}
module.exports = Reporter;

View file

@ -1,27 +1,16 @@
'use strict';
const trimOffNewlines = require('trim-off-newlines');
const chalk = require('../chalk').get();
export default function formatSerializedError(error) {
const printMessage = error.values.length === 0
? Boolean(error.message)
: !error.values[0].label.startsWith(error.message);
function formatSerializedError(error) {
const printMessage = error.values.length === 0 ?
Boolean(error.message) :
!error.values[0].label.startsWith(error.message);
if (error.statements.length === 0 && error.values.length === 0) {
if (error.values.length === 0) {
return {formatted: null, printMessage};
}
let formatted = '';
for (const value of error.values) {
formatted += `${value.label}\n\n${trimOffNewlines(value.formatted)}\n\n`;
formatted += `${value.label}\n\n${value.formatted}\n\n`;
}
for (const statement of error.statements) {
formatted += `${statement[0]}\n${chalk.grey('=>')} ${trimOffNewlines(statement[1])}\n\n`;
}
formatted = trimOffNewlines(formatted);
return {formatted, printMessage};
return {formatted: formatted.trim(), printMessage};
}
module.exports = formatSerializedError;

View file

@ -1,8 +1,7 @@
'use strict';
const chalk = require('../chalk').get();
const pkg = require('../../package.json');
import {chalk} from '../chalk.js';
import pkg from '../pkg.cjs';
exports.forError = error => {
export default function buildMessage(error) {
if (!error.improperUsage) {
return null;
}
@ -21,7 +20,7 @@ Visit the following URL for more details:
if (assertion === 'snapshot') {
const {name, snapPath} = error.improperUsage;
if (name === 'ChecksumError') {
if (name === 'ChecksumError' || name === 'InvalidSnapshotError') {
return `The snapshot file is corrupted.
File path: ${chalk.yellow(snapPath)}
@ -39,9 +38,9 @@ Please run AVA again with the ${chalk.cyan('--update-snapshots')} flag to upgrad
if (name === 'VersionMismatchError') {
const {snapVersion, expectedVersion} = error.improperUsage;
const upgradeMessage = snapVersion < expectedVersion ?
`Please run AVA again with the ${chalk.cyan('--update-snapshots')} flag to upgrade.` :
'You should upgrade AVA.';
const upgradeMessage = snapVersion < expectedVersion
? `Please run AVA again with the ${chalk.cyan('--update-snapshots')} flag to upgrade.`
: 'You should upgrade AVA.';
return `The snapshot file is v${snapVersion}, but only v${expectedVersion} is supported.
@ -52,4 +51,4 @@ ${upgradeMessage}`;
}
return null;
};
}

View file

@ -1,21 +1,23 @@
'use strict';
const path = require('path');
const figures = require('figures');
const chalk = require('../chalk').get();
import path from 'node:path';
const SEPERATOR = ' ' + chalk.gray.dim(figures.pointerSmall) + ' ';
import figures from 'figures';
module.exports = (base, file, title) => {
const prefix = file
import {chalk} from '../chalk.js';
const SEPARATOR = ' ' + chalk.gray.dim(figures.pointerSmall) + ' ';
export default function prefixTitle(extensions, base, file, title) {
const parts = file
// Only replace base if it is found at the start of the path
.replace(base, (match, offset) => offset === 0 ? '' : match)
.replace(/\.spec/, '')
.replace(/\.test/, '')
.replace(/test-/g, '')
.replace(/\.js$/, '')
.split(path.sep)
.filter(p => p !== '__tests__')
.join(SEPERATOR);
.filter(p => p !== '__tests__');
return prefix + SEPERATOR + title;
};
const filename = parts.pop()
.replace(/\.spec\./, '.')
.replace(/\.test\./, '.')
.replace(/test-/, '')
.replace(new RegExp(`.(${extensions.join('|')})$`), '');
return [...parts, filename, title].join(SEPARATOR);
}

View file

@ -1,14 +1,13 @@
'use strict';
const os = require('os');
const path = require('path');
import os from 'node:os';
import path from 'node:path';
const plur = require('plur');
const stripAnsi = require('strip-ansi');
const supertap = require('supertap');
const indentString = require('indent-string');
import indentString from 'indent-string';
import plur from 'plur';
import stripAnsi from 'strip-ansi';
import supertap from 'supertap';
const beautifyStack = require('./beautify-stack');
const prefixTitle = require('./prefix-title');
import beautifyStack from './beautify-stack.js';
import prefixTitle from './prefix-title.js';
function dumpError(error) {
const object = {...error.object};
@ -30,10 +29,7 @@ function dumpError(error) {
}
if (error.values.length > 0) {
object.values = error.values.reduce((acc, value) => { // eslint-disable-line unicorn/no-reduce
acc[value.label] = stripAnsi(value.formatted);
return acc;
}, {});
object.values = Object.fromEntries(error.values.map(({label, formatted}) => [label, stripAnsi(formatted)]));
}
}
@ -49,10 +45,11 @@ function dumpError(error) {
return object;
}
class TapReporter {
export default class TapReporter {
constructor(options) {
this.i = 0;
this.extensions = options.extensions;
this.stdStream = options.stdStream;
this.reportStream = options.reportStream;
@ -65,7 +62,7 @@ class TapReporter {
startRun(plan) {
if (plan.files.length > 1) {
this.prefixTitle = (testFile, title) => prefixTitle(plan.filePathPrefix, testFile, title);
this.prefixTitle = (testFile, title) => prefixTitle(this.extensions, plan.filePathPrefix, testFile, title);
}
plan.status.on('stateChange', evt => this.consumeStateChange(evt));
@ -80,7 +77,7 @@ class TapReporter {
failed: this.stats.failedTests + this.stats.remainingTests,
passed: this.stats.passedTests + this.stats.passedKnownFailingTests,
skipped: this.stats.skippedTests,
todo: this.stats.todoTests
todo: this.stats.todoTests,
}) + os.EOL);
if (this.stats.parallelRuns) {
@ -93,7 +90,7 @@ class TapReporter {
failed: 0,
passed: 0,
skipped: 0,
todo: 0
todo: 0,
}) + os.EOL);
}
}
@ -105,7 +102,7 @@ class TapReporter {
index: ++this.i,
passed: flags.passed,
skip: flags.skip,
todo: flags.todo
todo: flags.todo,
}) + os.EOL);
}
@ -117,7 +114,7 @@ class TapReporter {
index: ++this.i,
passed: false,
skip: false,
todo: false
todo: false,
}) + os.EOL);
}
@ -132,11 +129,11 @@ class TapReporter {
}
writeTimeout(evt) {
const err = new Error(`Exited because no new tests completed within the last ${evt.period}ms of inactivity`);
const error = new Error(`Exited because no new tests completed within the last ${evt.period}ms of inactivity`);
for (const [testFile, tests] of evt.pendingTests) {
for (const title of tests) {
this.writeTest({testFile, title, err}, {passed: false, todo: false, skip: false});
this.writeTest({testFile, title, err: error}, {passed: false, todo: false, skip: false});
}
}
}
@ -168,9 +165,6 @@ class TapReporter {
this.writeTest(evt, {passed: false, todo: true, skip: false});
}
break;
case 'snapshot-error':
this.writeComment(evt, {title: 'Could not update snapshots'});
break;
case 'stats':
this.stats = evt.stats;
@ -219,4 +213,3 @@ class TapReporter {
}
}
}
module.exports = TapReporter;

52
node_modules/ava/lib/run-status.js generated vendored
View file

@ -1,17 +1,21 @@
'use strict';
const Emittery = require('emittery');
const cloneDeep = require('lodash/cloneDeep');
import v8 from 'node:v8';
class RunStatus extends Emittery {
constructor(files, parallelRuns) {
import Emittery from 'emittery';
const copyStats = stats => v8.deserialize(v8.serialize(stats));
export default class RunStatus extends Emittery {
constructor(files, parallelRuns, selectionInsights) {
super();
this.pendingTests = new Map();
this.emptyParallelRun = parallelRuns &&
parallelRuns.currentFileCount === 0 &&
parallelRuns.totalRuns > 1 &&
files > 0;
this.emptyParallelRun = parallelRuns
&& parallelRuns.currentFileCount === 0
&& parallelRuns.totalRuns > 1
&& files > 0;
this.selectionInsights = selectionInsights;
this.stats = {
byFile: new Map(),
@ -32,7 +36,7 @@ class RunStatus extends Emittery {
timeouts: 0,
todoTests: 0,
uncaughtExceptions: 0,
unhandledRejections: 0
unhandledRejections: 0,
};
}
@ -51,7 +55,7 @@ class RunStatus extends Emittery {
todoTests: 0,
uncaughtExceptions: 0,
unhandledRejections: 0,
...stats
...stats,
});
this.pendingTests.set(testFile, new Set());
@ -147,7 +151,7 @@ class RunStatus extends Emittery {
}
if (changedStats) {
this.emit('stateChange', {type: 'stats', stats: cloneDeep(stats)});
this.emit('stateChange', {type: 'stats', stats: copyStats(stats)});
}
this.emit('stateChange', event);
@ -163,15 +167,15 @@ class RunStatus extends Emittery {
}
if (
this.stats.declaredTests === 0 ||
this.stats.internalErrors > 0 ||
this.stats.failedHooks > 0 ||
this.stats.failedTests > 0 ||
this.stats.failedWorkers > 0 ||
this.stats.sharedWorkerErrors > 0 ||
this.stats.timeouts > 0 ||
this.stats.uncaughtExceptions > 0 ||
this.stats.unhandledRejections > 0
this.stats.declaredTests === 0
|| this.stats.internalErrors > 0
|| this.stats.failedHooks > 0
|| this.stats.failedTests > 0
|| this.stats.failedWorkers > 0
|| this.stats.sharedWorkerErrors > 0
|| this.stats.timeouts > 0
|| this.stats.uncaughtExceptions > 0
|| this.stats.unhandledRejections > 0
) {
return 1;
}
@ -194,6 +198,8 @@ class RunStatus extends Emittery {
this.pendingTests.get(event.testFile).delete(event.title);
}
}
}
module.exports = RunStatus;
getFailedTestFiles() {
return [...this.stats.byFile].filter(statByFile => statByFile[1].failedTests).map(statByFile => statByFile[0]);
}
}

337
node_modules/ava/lib/runner.js generated vendored
View file

@ -1,14 +1,19 @@
'use strict';
const Emittery = require('emittery');
const matcher = require('matcher');
const ContextRef = require('./context-ref');
const createChain = require('./create-chain');
const parseTestArgs = require('./parse-test-args');
const snapshotManager = require('./snapshot-manager');
const serializeError = require('./serialize-error');
const Runnable = require('./test');
import process from 'node:process';
import {pathToFileURL} from 'node:url';
class Runner extends Emittery {
import Emittery from 'emittery';
import {matcher} from 'matcher';
import ContextRef from './context-ref.js';
import createChain from './create-chain.js';
import parseTestArgs from './parse-test-args.js';
import serializeError from './serialize-error.js';
import {load as loadSnapshots, determineSnapshotDir} from './snapshot-manager.js';
import Runnable from './test.js';
import {waitForReady} from './worker/state.cjs';
const makeFileURL = file => file.startsWith('file://') ? file : pathToFileURL(file).toString();
export default class Runner extends Emittery {
constructor(options = {}) {
super();
@ -18,21 +23,18 @@ class Runner extends Emittery {
this.file = options.file;
this.checkSelectedByLineNumbers = options.checkSelectedByLineNumbers;
this.match = options.match || [];
this.powerAssert = undefined; // Assigned later.
this.projectDir = options.projectDir;
this.recordNewSnapshots = options.recordNewSnapshots === true;
this.runOnlyExclusive = options.runOnlyExclusive === true;
this.serial = options.serial === true;
this.skippingTests = false;
this.snapshotDir = options.snapshotDir;
this.updateSnapshots = options.updateSnapshots;
this.activeRunnables = new Set();
this.boundCompareTestSnapshot = this.compareTestSnapshot.bind(this);
this.skippedSnapshots = false;
this.boundSkipSnapshot = this.skipSnapshot.bind(this);
this.interrupted = false;
this.snapshots = null;
this.nextTaskIndex = 0;
this.tasks = {
after: [],
@ -43,9 +45,9 @@ class Runner extends Emittery {
beforeEach: [],
concurrent: [],
serial: [],
todo: []
todo: [],
};
this.waitForReady = [];
this.waitForReady = waitForReady;
const uniqueTestTitles = new Set();
this.registerUniqueTitle = title => {
@ -57,14 +59,21 @@ class Runner extends Emittery {
return true;
};
this.notifyTimeoutUpdate = timeoutMs => {
this.emit('stateChange', {
type: 'test-timeout-configured',
period: timeoutMs,
});
};
let hasStarted = false;
let scheduledStart = false;
const meta = Object.freeze({
file: options.file,
file: makeFileURL(options.file),
get snapshotDirectory() {
const {file, snapshotDir: fixedLocation, projectDir} = options;
return snapshotManager.determineSnapshotDir({file, fixedLocation, projectDir});
}
return makeFileURL(determineSnapshotDir({file, fixedLocation, projectDir}));
},
});
this.chain = createChain((metadata, testArgs) => { // eslint-disable-line complexity
if (hasStarted) {
@ -81,98 +90,96 @@ class Runner extends Emittery {
metadata.taskIndex = this.nextTaskIndex++;
const {args, buildTitle, implementations, rawTitle} = parseTestArgs(testArgs);
const {args, implementation, title} = parseTestArgs(testArgs);
if (this.checkSelectedByLineNumbers) {
metadata.selected = this.checkSelectedByLineNumbers();
}
if (metadata.todo) {
if (implementations.length > 0) {
if (implementation) {
throw new TypeError('`todo` tests are not allowed to have an implementation. Use `test.skip()` for tests with an implementation.');
}
if (!rawTitle) { // Either undefined or a string.
if (!title.raw) { // Either undefined or a string.
throw new TypeError('`todo` tests require a title');
}
if (!this.registerUniqueTitle(rawTitle)) {
throw new Error(`Duplicate test title: ${rawTitle}`);
if (!this.registerUniqueTitle(title.value)) {
throw new Error(`Duplicate test title: ${title.value}`);
}
if (this.match.length > 0) {
// --match selects TODO tests.
if (matcher([rawTitle], this.match).length === 1) {
metadata.exclusive = true;
this.runOnlyExclusive = true;
}
// --match selects TODO tests.
if (this.match.length > 0 && matcher(title.value, this.match).length === 1) {
metadata.exclusive = true;
this.runOnlyExclusive = true;
}
this.tasks.todo.push({title: rawTitle, metadata});
this.tasks.todo.push({title: title.value, metadata});
this.emit('stateChange', {
type: 'declared-test',
title: rawTitle,
title: title.value,
knownFailing: false,
todo: true
todo: true,
});
} else {
if (implementations.length === 0) {
if (!implementation) {
throw new TypeError('Expected an implementation. Use `test.todo()` for tests without an implementation.');
}
for (const implementation of implementations) {
let {title, isSet, isValid, isEmpty} = buildTitle(implementation);
if (Array.isArray(implementation)) {
throw new TypeError('AVA 4 no longer supports multiple implementations.');
}
if (isSet && !isValid) {
throw new TypeError('Test & hook titles must be strings');
}
if (isEmpty) {
if (metadata.type === 'test') {
throw new TypeError('Tests must have a title');
} else if (metadata.always) {
title = `${metadata.type}.always hook`;
} else {
title = `${metadata.type} hook`;
}
}
if (metadata.type === 'test' && !this.registerUniqueTitle(title)) {
throw new Error(`Duplicate test title: ${title}`);
}
const task = {
title,
implementation,
args,
metadata: {...metadata}
};
if (title.isSet && !title.isValid) {
throw new TypeError('Test & hook titles must be strings');
}
let fallbackTitle = title.value;
if (title.isEmpty) {
if (metadata.type === 'test') {
if (this.match.length > 0) {
// --match overrides .only()
task.metadata.exclusive = matcher([title], this.match).length === 1;
}
if (task.metadata.skipped) {
this.skippingTests = true;
}
if (task.metadata.exclusive) {
this.runOnlyExclusive = true;
}
this.tasks[metadata.serial ? 'serial' : 'concurrent'].push(task);
this.emit('stateChange', {
type: 'declared-test',
title,
knownFailing: metadata.failing,
todo: false
});
} else if (!metadata.skipped) {
this.tasks[metadata.type + (metadata.always ? 'Always' : '')].push(task);
throw new TypeError('Tests must have a title');
} else if (metadata.always) {
fallbackTitle = `${metadata.type}.always hook`;
} else {
fallbackTitle = `${metadata.type} hook`;
}
}
if (metadata.type === 'test' && !this.registerUniqueTitle(title.value)) {
throw new Error(`Duplicate test title: ${title.value}`);
}
const task = {
title: title.value || fallbackTitle,
implementation,
args,
metadata: {...metadata},
};
if (metadata.type === 'test') {
if (this.match.length > 0) {
// --match overrides .only()
task.metadata.exclusive = matcher(title.value, this.match).length === 1;
}
if (task.metadata.exclusive) {
this.runOnlyExclusive = true;
}
this.tasks[metadata.serial ? 'serial' : 'concurrent'].push(task);
this.snapshots.touch(title.value, metadata.taskIndex);
this.emit('stateChange', {
type: 'declared-test',
title: title.value,
knownFailing: metadata.failing,
todo: false,
});
} else if (!metadata.skipped) {
this.tasks[metadata.type + (metadata.always ? 'Always' : '')].push(task);
}
}
}, {
serial: false,
@ -182,54 +189,43 @@ class Runner extends Emittery {
failing: false,
callback: false,
inline: false, // Set for attempt metadata created by `t.try()`
always: false
always: false,
}, meta);
}
compareTestSnapshot(options) {
if (!this.snapshots) {
this.snapshots = snapshotManager.load({
file: this.file,
fixedLocation: this.snapshotDir,
projectDir: this.projectDir,
recordNewSnapshots: this.recordNewSnapshots,
updating: this.updateSnapshots && !this.runOnlyExclusive && !this.skippingTests
});
this.emit('dependency', this.snapshots.snapPath);
get snapshots() {
if (this._snapshots) {
return this._snapshots;
}
// Lazy load not when the runner is instantiated but when snapshots are
// needed. This should be after the test file has been loaded and source
// maps are available.
const snapshots = loadSnapshots({
file: this.file,
fixedLocation: this.snapshotDir,
projectDir: this.projectDir,
recordNewSnapshots: this.recordNewSnapshots,
updating: this.updateSnapshots,
});
if (snapshots.snapPath !== undefined) {
this.emit('dependency', snapshots.snapPath);
}
this._snapshots = snapshots;
return snapshots;
}
compareTestSnapshot(options) {
return this.snapshots.compare(options);
}
skipSnapshot() {
this.skippedSnapshots = true;
skipSnapshot(options) {
return this.snapshots.skipSnapshot(options);
}
saveSnapshotState() {
if (
this.updateSnapshots &&
(
this.runOnlyExclusive ||
this.skippingTests ||
this.skippedSnapshots
)
) {
return {cannotSave: true};
}
if (this.snapshots) {
return {touchedFiles: this.snapshots.save()};
}
if (this.updateSnapshots) {
return {touchedFiles: snapshotManager.cleanSnapshots({
file: this.file,
fixedLocation: this.snapshotDir,
projectDir: this.projectDir
})};
}
return {};
async saveSnapshotState() {
return {touchedFiles: await this.snapshots.save()};
}
onRun(runnable) {
@ -240,16 +236,6 @@ class Runner extends Emittery {
this.activeRunnables.delete(runnable);
}
attributeLeakedError(err) {
for (const runnable of this.activeRunnables) {
if (runnable.attributeLeakedError(err)) {
return true;
}
}
return false;
}
beforeExitHandler() {
for (const runnable of this.activeRunnables) {
runnable.finishDueToInactivity();
@ -269,25 +255,25 @@ class Runner extends Emittery {
};
let waitForSerial = Promise.resolve();
await runnables.reduce((previous, runnable) => { // eslint-disable-line unicorn/no-reduce
await runnables.reduce((previous, runnable) => { // eslint-disable-line unicorn/no-array-reduce
if (runnable.metadata.serial || this.serial) {
waitForSerial = previous.then(() => {
waitForSerial = previous.then(() =>
// Serial runnables run as long as there was no previous failure, unless
// the runnable should always be run.
return (allPassed || runnable.metadata.always) && runAndStoreResult(runnable);
});
(allPassed || runnable.metadata.always) && runAndStoreResult(runnable),
);
return waitForSerial;
}
return Promise.all([
previous,
waitForSerial.then(() => {
waitForSerial.then(() =>
// Concurrent runnables are kicked off after the previous serial
// runnables have completed, as long as there was no previous failure
// (or if the runnable should always be run). One concurrent runnable's
// failure does not prevent the next runnable from running.
return (allPassed || runnable.metadata.always) && runAndStoreResult(runnable);
})
(allPassed || runnable.metadata.always) && runAndStoreResult(runnable),
),
]);
}, waitForSerial);
@ -303,22 +289,22 @@ class Runner extends Emittery {
return result;
}
async runHooks(tasks, contextRef, {titleSuffix, testPassed, associatedTaskIndex} = {}) {
async runHooks(tasks, contextRef, {titleSuffix, testPassed} = {}) {
const hooks = tasks.map(task => new Runnable({
contextRef,
experiments: this.experiments,
failWithoutAssertions: false,
fn: task.args.length === 0 ?
task.implementation :
t => task.implementation.apply(null, [t].concat(task.args)),
fn: task.args.length === 0
? task.implementation
: t => Reflect.apply(task.implementation, null, [t, ...task.args]),
compareTestSnapshot: this.boundCompareTestSnapshot,
skipSnapshot: this.boundSkipSnapshot,
updateSnapshots: this.updateSnapshots,
metadata: {...task.metadata, associatedTaskIndex},
powerAssert: this.powerAssert,
metadata: task.metadata,
title: `${task.title}${titleSuffix || ''}`,
isHook: true,
testPassed
testPassed,
notifyTimeoutUpdate: this.notifyTimeoutUpdate,
}));
const outcome = await this.runMultiple(hooks, this.serial);
for (const result of outcome.storedResults) {
@ -327,7 +313,7 @@ class Runner extends Emittery {
type: 'hook-finished',
title: result.title,
duration: result.duration,
logs: result.logs
logs: result.logs,
});
} else {
this.emit('stateChange', {
@ -335,7 +321,7 @@ class Runner extends Emittery {
title: result.title,
err: serializeError('Hook failure', true, result.error),
duration: result.duration,
logs: result.logs
logs: result.logs,
});
}
}
@ -350,8 +336,7 @@ class Runner extends Emittery {
contextRef,
{
titleSuffix: hookSuffix,
associatedTaskIndex: task.metadata.taskIndex
}
},
);
let testOk = false;
@ -361,16 +346,16 @@ class Runner extends Emittery {
contextRef,
experiments: this.experiments,
failWithoutAssertions: this.failWithoutAssertions,
fn: task.args.length === 0 ?
task.implementation :
t => task.implementation.apply(null, [t].concat(task.args)),
fn: task.args.length === 0
? task.implementation
: t => Reflect.apply(task.implementation, null, [t, ...task.args]),
compareTestSnapshot: this.boundCompareTestSnapshot,
skipSnapshot: this.boundSkipSnapshot,
updateSnapshots: this.updateSnapshots,
metadata: task.metadata,
powerAssert: this.powerAssert,
title: task.title,
registerUniqueTitle: this.registerUniqueTitle
registerUniqueTitle: this.registerUniqueTitle,
notifyTimeoutUpdate: this.notifyTimeoutUpdate,
});
const result = await this.runSingle(test);
@ -382,7 +367,7 @@ class Runner extends Emittery {
title: result.title,
duration: result.duration,
knownFailing: result.metadata.failing,
logs: result.logs
logs: result.logs,
});
hooksOk = await this.runHooks(
@ -391,7 +376,6 @@ class Runner extends Emittery {
{
titleSuffix: hookSuffix,
testPassed: testOk,
associatedTaskIndex: task.metadata.taskIndex
});
} else {
this.emit('stateChange', {
@ -400,7 +384,7 @@ class Runner extends Emittery {
err: serializeError('Test failure', true, result.error, this.file),
duration: result.duration,
knownFailing: result.metadata.failing,
logs: result.logs
logs: result.logs,
});
// Don't run `afterEach` hooks if the test failed.
}
@ -412,20 +396,21 @@ class Runner extends Emittery {
{
titleSuffix: hookSuffix,
testPassed: testOk,
associatedTaskIndex: task.metadata.taskIndex
});
return alwaysOk && hooksOk && testOk;
}
async start() {
async start() { // eslint-disable-line complexity
const concurrentTests = [];
const serialTests = [];
for (const task of this.tasks.serial) {
if (this.runOnlyExclusive && !task.metadata.exclusive) {
this.snapshots.skipBlock(task.title, task.metadata.taskIndex);
continue;
}
if (this.checkSelectedByLineNumbers && !task.metadata.selected) {
this.snapshots.skipBlock(task.title, task.metadata.taskIndex);
continue;
}
@ -434,20 +419,24 @@ class Runner extends Emittery {
title: task.title,
knownFailing: task.metadata.failing,
skip: task.metadata.skipped,
todo: false
todo: false,
});
if (!task.metadata.skipped) {
if (task.metadata.skipped) {
this.snapshots.skipBlock(task.title, task.metadata.taskIndex);
} else {
serialTests.push(task);
}
}
for (const task of this.tasks.concurrent) {
if (this.runOnlyExclusive && !task.metadata.exclusive) {
this.snapshots.skipBlock(task.title, task.metadata.taskIndex);
continue;
}
if (this.checkSelectedByLineNumbers && !task.metadata.selected) {
this.snapshots.skipBlock(task.title, task.metadata.taskIndex);
continue;
}
@ -456,15 +445,15 @@ class Runner extends Emittery {
title: task.title,
knownFailing: task.metadata.failing,
skip: task.metadata.skipped,
todo: false
todo: false,
});
if (!task.metadata.skipped) {
if (this.serial) {
serialTests.push(task);
} else {
concurrentTests.push(task);
}
if (task.metadata.skipped) {
this.snapshots.skipBlock(task.title, task.metadata.taskIndex);
} else if (this.serial) {
serialTests.push(task);
} else {
concurrentTests.push(task);
}
}
@ -482,7 +471,7 @@ class Runner extends Emittery {
title: task.title,
knownFailing: false,
skip: false,
todo: true
todo: true,
});
}
@ -498,13 +487,13 @@ class Runner extends Emittery {
// Note that the hooks and tests always begin running asynchronously.
const beforePromise = this.runHooks(this.tasks.before, contextRef);
const serialPromise = beforePromise.then(beforeHooksOk => { // eslint-disable-line promise/prefer-await-to-then
const serialPromise = beforePromise.then(beforeHooksOk => {
// Don't run tests if a `before` hook failed.
if (!beforeHooksOk) {
return false;
}
return serialTests.reduce(async (previous, task) => { // eslint-disable-line unicorn/no-reduce
return serialTests.reduce(async (previous, task) => { // eslint-disable-line unicorn/no-array-reduce
const previousOk = await previous;
// Don't start tests after an interrupt.
if (this.interrupted) {
@ -520,7 +509,7 @@ class Runner extends Emittery {
return this.runTest(task, contextRef.copy());
}, true);
});
const concurrentPromise = Promise.all([beforePromise, serialPromise]).then(async ([beforeHooksOk, serialOk]) => { // eslint-disable-line promise/prefer-await-to-then
const concurrentPromise = Promise.all([beforePromise, serialPromise]).then(async ([beforeHooksOk, serialOk]) => {
// Don't run tests if a `before` hook failed, or if `failFast` is enabled
// and a previous serial test failed.
if (!beforeHooksOk || (!serialOk && this.failFast)) {
@ -534,9 +523,7 @@ class Runner extends Emittery {
// If a concurrent test fails, even if `failFast` is enabled it won't
// stop other concurrent tests from running.
const allOkays = await Promise.all(concurrentTests.map(task => {
return this.runTest(task, contextRef.copy());
}));
const allOkays = await Promise.all(concurrentTests.map(task => this.runTest(task, contextRef.copy())));
return allOkays.every(ok => ok);
});
@ -563,5 +550,3 @@ class Runner extends Emittery {
this.interrupted = true;
}
}
module.exports = Runner;

53
node_modules/ava/lib/scheduler.js generated vendored Normal file
View file

@ -0,0 +1,53 @@
import fs from 'node:fs';
import path from 'node:path';
import writeFileAtomic from 'write-file-atomic';
import isCi from './is-ci.js';
const FILENAME = 'failing-tests.json';
const scheduler = {
storeFailedTestFiles(runStatus, cacheDir) {
if (isCi || !cacheDir) {
return;
}
try {
writeFileAtomic.sync(path.join(cacheDir, FILENAME), JSON.stringify(runStatus.getFailedTestFiles()));
} catch {}
},
// Order test-files, so that files with failing tests come first
failingTestsFirst(selectedFiles, cacheDir, cacheEnabled) {
if (isCi || cacheEnabled === false) {
return selectedFiles;
}
const filePath = path.join(cacheDir, FILENAME);
let failedTestFiles;
try {
failedTestFiles = JSON.parse(fs.readFileSync(filePath));
} catch {
return selectedFiles;
}
return [...selectedFiles].sort((f, s) => {
if (failedTestFiles.includes(f) && failedTestFiles.includes(s)) {
return 0;
}
if (failedTestFiles.includes(f)) {
return -1;
}
if (failedTestFiles.includes(s)) {
return 1;
}
return 0;
});
},
};
export default scheduler;

View file

@ -1,42 +1,45 @@
'use strict';
const path = require('path');
const cleanYamlObject = require('clean-yaml-object');
const concordance = require('concordance');
const isError = require('is-error');
const slash = require('slash');
const StackUtils = require('stack-utils');
const assert = require('./assert');
const concordanceOptions = require('./concordance-options').default;
import path from 'node:path';
import process from 'node:process';
import {fileURLToPath, pathToFileURL} from 'node:url';
import cleanYamlObject from 'clean-yaml-object';
import concordance from 'concordance';
import isError from 'is-error';
import StackUtils from 'stack-utils';
import {AssertionError} from './assert.js';
import concordanceOptions from './concordance-options.js';
function isAvaAssertionError(source) {
return source instanceof assert.AssertionError;
return source instanceof AssertionError;
}
function filter(propertyName, isRoot) {
return !isRoot || (propertyName !== 'message' && propertyName !== 'name' && propertyName !== 'stack');
}
function normalizeFile(file, ...base) {
return file.startsWith('file://') ? file : pathToFileURL(path.resolve(...base, file)).toString();
}
const stackUtils = new StackUtils();
function extractSource(stack, testFile) {
if (!stack || !testFile) {
return null;
}
// Normalize the test file so it matches `callSite.file`.
const relFile = path.relative(process.cwd(), testFile);
const normalizedFile = process.platform === 'win32' ? slash(relFile) : relFile;
testFile = normalizeFile(testFile);
for (const line of stack.split('\n')) {
try {
const callSite = stackUtils.parseLine(line);
if (callSite.file === normalizedFile) {
return {
isDependency: false,
isWithinProject: true,
file: path.resolve(process.cwd(), callSite.file),
line: callSite.line
};
}
} catch {}
const callSite = stackUtils.parseLine(line);
if (callSite && normalizeFile(callSite.file) === testFile) {
return {
isDependency: false,
isWithinProject: true,
file: testFile,
line: callSite.line,
};
}
}
return null;
@ -52,8 +55,8 @@ function buildSource(source) {
// directory set to the project directory.
const projectDir = process.cwd();
const file = path.resolve(projectDir, source.file.trim());
const rel = path.relative(projectDir, file);
const file = normalizeFile(source.file.trim(), projectDir);
const rel = path.relative(projectDir, fileURLToPath(file));
const [segment] = rel.split(path.sep);
const isWithinProject = segment !== '..' && (process.platform !== 'win32' || !segment.includes(':'));
@ -63,60 +66,59 @@ function buildSource(source) {
isDependency,
isWithinProject,
file,
line: source.line
line: source.line,
};
}
function trySerializeError(err, shouldBeautifyStack, testFile) {
const stack = err.savedError ? err.savedError.stack : err.stack;
function trySerializeError(error, shouldBeautifyStack, testFile) {
const stack = error.savedError ? error.savedError.stack : error.stack;
const retval = {
avaAssertionError: isAvaAssertionError(err),
avaAssertionError: isAvaAssertionError(error),
nonErrorObject: false,
source: extractSource(stack, testFile),
stack,
shouldBeautifyStack
shouldBeautifyStack,
};
if (err.actualStack) {
retval.stack = err.actualStack;
if (error.actualStack) {
retval.stack = error.actualStack;
}
if (retval.avaAssertionError) {
retval.improperUsage = err.improperUsage;
retval.message = err.message;
retval.name = err.name;
retval.statements = err.statements;
retval.values = err.values;
retval.improperUsage = error.improperUsage;
retval.message = error.message;
retval.name = error.name;
retval.values = error.values;
if (err.fixedSource) {
const source = buildSource(err.fixedSource);
if (error.fixedSource) {
const source = buildSource(error.fixedSource);
if (source) {
retval.source = source;
}
}
if (err.assertion) {
retval.assertion = err.assertion;
if (error.assertion) {
retval.assertion = error.assertion;
}
if (err.operator) {
retval.operator = err.operator;
if (error.operator) {
retval.operator = error.operator;
}
} else {
retval.object = cleanYamlObject(err, filter); // Cleanly copy non-standard properties
if (typeof err.message === 'string') {
retval.message = err.message;
retval.object = cleanYamlObject(error, filter); // Cleanly copy non-standard properties
if (typeof error.message === 'string') {
retval.message = error.message;
}
if (typeof err.name === 'string') {
retval.name = err.name;
if (typeof error.name === 'string') {
retval.name = error.name;
}
}
if (typeof err.stack === 'string') {
const lines = err.stack.split('\n');
if (err.name === 'SyntaxError' && !lines[0].startsWith('SyntaxError')) {
if (typeof error.stack === 'string') {
const lines = error.stack.split('\n');
if (error.name === 'SyntaxError' && !lines[0].startsWith('SyntaxError')) {
retval.summary = '';
for (const line of lines) {
retval.summary += line + '\n';
@ -127,11 +129,8 @@ function trySerializeError(err, shouldBeautifyStack, testFile) {
retval.summary = retval.summary.trim();
} else {
// Skip the source line header inserted by `esm`:
// <https://github.com/standard-things/esm/wiki/improved-errors>
const start = lines.findIndex(line => !/:\d+$/.test(line));
retval.summary = '';
for (let index = start; index < lines.length; index++) {
for (let index = 0; index < lines.length; index++) {
if (lines[index].startsWith(' at')) {
break;
}
@ -146,17 +145,17 @@ function trySerializeError(err, shouldBeautifyStack, testFile) {
return retval;
}
function serializeError(origin, shouldBeautifyStack, err, testFile) {
if (!isError(err)) {
export default function serializeError(origin, shouldBeautifyStack, error, testFile) {
if (!isError(error)) {
return {
avaAssertionError: false,
nonErrorObject: true,
formatted: concordance.formatDescriptor(concordance.describe(err, concordanceOptions), concordanceOptions)
formatted: concordance.formatDescriptor(concordance.describe(error, concordanceOptions), concordanceOptions),
};
}
try {
return trySerializeError(err, shouldBeautifyStack, testFile);
return trySerializeError(error, shouldBeautifyStack, testFile);
} catch {
const replacement = new Error(`${origin}: Could not serialize error`);
return {
@ -165,9 +164,7 @@ function serializeError(origin, shouldBeautifyStack, err, testFile) {
name: replacement.name,
message: replacement.message,
stack: replacement.stack,
summary: replacement.message
summary: replacement.message,
};
}
}
module.exports = serializeError;

View file

@ -1,24 +1,24 @@
'use strict';
import {Buffer} from 'node:buffer';
import crypto from 'node:crypto';
import fs from 'node:fs';
import {findSourceMap} from 'node:module';
import path from 'node:path';
import {fileURLToPath} from 'node:url';
import zlib from 'node:zlib';
const crypto = require('crypto');
const fs = require('fs');
const path = require('path');
const zlib = require('zlib');
import cbor from 'cbor';
import concordance from 'concordance';
import indentString from 'indent-string';
import mem from 'mem';
import slash from 'slash';
import writeFileAtomic from 'write-file-atomic';
const concordance = require('concordance');
const indentString = require('indent-string');
const md5Hex = require('md5-hex');
const convertSourceMap = require('convert-source-map');
const slash = require('slash');
const writeFileAtomic = require('write-file-atomic');
const mem = require('mem');
const concordanceOptions = require('./concordance-options').snapshotManager;
import {snapshotManager as concordanceOptions} from './concordance-options.js';
// Increment if encoding layout or Concordance serialization versions change. Previous AVA versions will not be able to
// decode buffers generated by a newer version, so changing this value will require a major version bump of AVA itself.
// The version is encoded as an unsigned 16 bit integer.
const VERSION = 2;
const VERSION = 3;
const VERSION_HEADER = Buffer.alloc(2);
VERSION_HEADER.writeUInt16LE(VERSION);
@ -28,26 +28,24 @@ const READABLE_PREFIX = Buffer.from(`AVA Snapshot v${VERSION}\n`, 'ascii');
const REPORT_SEPARATOR = Buffer.from('\n\n', 'ascii');
const REPORT_TRAILING_NEWLINE = Buffer.from('\n', 'ascii');
const MD5_HASH_LENGTH = 16;
const SHA_256_HASH_LENGTH = 32;
class SnapshotError extends Error {
export class SnapshotError extends Error {
constructor(message, snapPath) {
super(message);
this.name = 'SnapshotError';
this.snapPath = snapPath;
}
}
exports.SnapshotError = SnapshotError;
class ChecksumError extends SnapshotError {
export class ChecksumError extends SnapshotError {
constructor(snapPath) {
super('Checksum mismatch', snapPath);
this.name = 'ChecksumError';
}
}
exports.ChecksumError = ChecksumError;
class VersionMismatchError extends SnapshotError {
export class VersionMismatchError extends SnapshotError {
constructor(snapPath, version) {
super('Unexpected snapshot version', snapPath);
this.name = 'VersionMismatchError';
@ -55,20 +53,25 @@ class VersionMismatchError extends SnapshotError {
this.expectedVersion = VERSION;
}
}
exports.VersionMismatchError = VersionMismatchError;
export class InvalidSnapshotError extends SnapshotError {
constructor(snapPath) {
super('Invalid snapshot file', snapPath);
this.name = 'InvalidSnapshotError';
}
}
const LEGACY_SNAPSHOT_HEADER = Buffer.from('// Jest Snapshot v1');
function isLegacySnapshot(buffer) {
return LEGACY_SNAPSHOT_HEADER.equals(buffer.slice(0, LEGACY_SNAPSHOT_HEADER.byteLength));
}
class LegacyError extends SnapshotError {
export class LegacyError extends SnapshotError {
constructor(snapPath) {
super('Legacy snapshot file', snapPath);
this.name = 'LegacyError';
}
}
exports.LegacyError = LegacyError;
function tryRead(file) {
try {
@ -82,168 +85,117 @@ function tryRead(file) {
}
}
function withoutLineEndings(buffer) {
let checkPosition = buffer.byteLength - 1;
while (buffer[checkPosition] === 0x0A || buffer[checkPosition] === 0x0D) {
checkPosition--;
}
function formatEntry(snapshot, index) {
const {
data,
label = `Snapshot ${index + 1}`, // Human-readable labels start counting at 1.
} = snapshot;
return buffer.slice(0, checkPosition + 1);
const description = data
? concordance.formatDescriptor(concordance.deserialize(data), concordanceOptions)
: '<No Data>';
const blockquote = label.split(/\n/).map(line => '> ' + line).join('\n');
return `${blockquote}\n\n${indentString(description, 4)}`;
}
function formatEntry(label, descriptor) {
if (label) {
label = `> ${label}\n\n`;
}
function combineEntries({blocks}) {
const combined = new BufferBuilder();
const codeBlock = indentString(concordance.formatDescriptor(descriptor, concordanceOptions), 4);
return Buffer.from(label + codeBlock, 'utf8');
}
for (const {title, snapshots} of blocks) {
const last = snapshots[snapshots.length - 1];
combined.write(`\n\n## ${title}\n\n`);
function combineEntries(entries) {
const buffers = [];
let byteLength = 0;
for (const [index, snapshot] of snapshots.entries()) {
combined.write(formatEntry(snapshot, index));
const sortedKeys = [...entries.keys()].sort((keyA, keyB) => {
const [a, b] = [entries.get(keyA), entries.get(keyB)];
const taskDifference = a.taskIndex - b.taskIndex;
if (taskDifference !== 0) {
return taskDifference;
}
const [assocA, assocB] = [a.associatedTaskIndex, b.associatedTaskIndex];
if (assocA !== undefined && assocB !== undefined) {
const assocDifference = assocA - assocB;
if (assocDifference !== 0) {
return assocDifference;
}
}
return a.snapIndex - b.snapIndex;
});
for (const key of sortedKeys) {
const keyBuffer = Buffer.from(`\n\n## ${key}\n\n`, 'utf8');
buffers.push(keyBuffer);
byteLength += keyBuffer.byteLength;
const formattedEntries = entries.get(key).buffers;
const last = formattedEntries[formattedEntries.length - 1];
for (const entry of formattedEntries) {
buffers.push(entry);
byteLength += entry.byteLength;
if (entry !== last) {
buffers.push(REPORT_SEPARATOR);
byteLength += REPORT_SEPARATOR.byteLength;
if (snapshot !== last) {
combined.write(REPORT_SEPARATOR);
}
}
}
return {buffers, byteLength};
return combined;
}
function generateReport(relFile, snapFile, entries) {
const combined = combineEntries(entries);
const {buffers} = combined;
let {byteLength} = combined;
const header = Buffer.from(`# Snapshot report for \`${slash(relFile)}\`
function generateReport(relFile, snapFile, snapshots) {
return new BufferBuilder()
.write(`# Snapshot report for \`${slash(relFile)}\`
The actual snapshot is saved in \`${snapFile}\`.
Generated by [AVA](https://avajs.dev).`, 'utf8');
buffers.unshift(header);
byteLength += header.byteLength;
buffers.push(REPORT_TRAILING_NEWLINE);
byteLength += REPORT_TRAILING_NEWLINE.byteLength;
return Buffer.concat(buffers, byteLength);
Generated by [AVA](https://avajs.dev).`)
.append(combineEntries(snapshots))
.write(REPORT_TRAILING_NEWLINE)
.toBuffer();
}
function appendReportEntries(existingReport, entries) {
const combined = combineEntries(entries);
const {buffers} = combined;
let {byteLength} = combined;
class BufferBuilder {
constructor() {
this.buffers = [];
this.byteOffset = 0;
}
const prepend = withoutLineEndings(existingReport);
buffers.unshift(prepend);
byteLength += prepend.byteLength;
append(builder) {
this.buffers.push(...builder.buffers);
this.byteOffset += builder.byteOffset;
return this;
}
buffers.push(REPORT_TRAILING_NEWLINE);
byteLength += REPORT_TRAILING_NEWLINE.byteLength;
return Buffer.concat(buffers, byteLength);
}
function encodeSnapshots(buffersByHash) {
const buffers = [];
let byteOffset = 0;
// Entry start and end pointers are relative to the header length. This means
// it's possible to append new entries to an existing snapshot file, without
// having to rewrite pointers for existing entries.
const headerLength = Buffer.alloc(4);
buffers.push(headerLength);
byteOffset += 4;
// Allows 65535 hashes (tests or identified snapshots) per file.
const numberHashes = Buffer.alloc(2);
numberHashes.writeUInt16LE(buffersByHash.size);
buffers.push(numberHashes);
byteOffset += 2;
const entries = [];
// Maps can't have duplicate keys, so all items in [...buffersByHash.keys()]
// are unique, so sortedHashes should be deterministic.
const sortedHashes = [...buffersByHash.keys()].sort();
const sortedBuffersByHash = [...sortedHashes.map(hash => [hash, buffersByHash.get(hash)])];
for (const [hash, snapshotBuffers] of sortedBuffersByHash) {
buffers.push(Buffer.from(hash, 'hex'));
byteOffset += MD5_HASH_LENGTH;
// Allows 65535 snapshots per hash.
const numberSnapshots = Buffer.alloc(2);
numberSnapshots.writeUInt16LE(snapshotBuffers.length, 0);
buffers.push(numberSnapshots);
byteOffset += 2;
for (const value of snapshotBuffers) {
// Each pointer is 32 bits, restricting the total, uncompressed buffer to
// 4 GiB.
const start = Buffer.alloc(4);
const end = Buffer.alloc(4);
entries.push({start, end, value});
buffers.push(start, end);
byteOffset += 8;
write(data) {
if (typeof data === 'string') {
this.write(Buffer.from(data, 'utf8'));
} else {
this.buffers.push(data);
this.byteOffset += data.byteLength;
}
return this;
}
headerLength.writeUInt32LE(byteOffset, 0);
let bodyOffset = 0;
for (const entry of entries) {
const start = bodyOffset;
const end = bodyOffset + entry.value.byteLength;
entry.start.writeUInt32LE(start, 0);
entry.end.writeUInt32LE(end, 0);
buffers.push(entry.value);
bodyOffset = end;
toBuffer() {
return Buffer.concat(this.buffers, this.byteOffset);
}
}
byteOffset += bodyOffset;
function sortBlocks(blocksByTitle, blockIndices) {
return [...blocksByTitle].sort(
([aTitle], [bTitle]) => {
const a = blockIndices.get(aTitle);
const b = blockIndices.get(bTitle);
const compressed = zlib.gzipSync(Buffer.concat(buffers, byteOffset));
if (a === undefined) {
if (b === undefined) {
return 0;
}
return 1;
}
if (b === undefined) {
return -1;
}
return a - b;
},
);
}
async function encodeSnapshots(snapshotData) {
const encoded = await cbor.encodeAsync(snapshotData, {
omitUndefinedProperties: true,
canonical: true,
});
const compressed = zlib.gzipSync(encoded);
compressed[9] = 0x03; // Override the GZip header containing the OS to always be Linux
const md5sum = crypto.createHash('md5').update(compressed).digest();
const sha256sum = crypto.createHash('sha256').update(compressed).digest();
return Buffer.concat([
READABLE_PREFIX,
VERSION_HEADER,
md5sum,
compressed
], READABLE_PREFIX.byteLength + VERSION_HEADER.byteLength + MD5_HASH_LENGTH + compressed.byteLength);
sha256sum,
compressed,
], READABLE_PREFIX.byteLength + VERSION_HEADER.byteLength + SHA_256_HASH_LENGTH + compressed.byteLength);
}
function decodeSnapshots(buffer, snapPath) {
@ -253,182 +205,207 @@ function decodeSnapshots(buffer, snapPath) {
// The version starts after the readable prefix, which is ended by a newline
// byte (0x0A).
const versionOffset = buffer.indexOf(0x0A) + 1;
const newline = buffer.indexOf(0x0A);
if (newline === -1) {
throw new InvalidSnapshotError(snapPath);
}
const versionOffset = newline + 1;
const version = buffer.readUInt16LE(versionOffset);
if (version !== VERSION) {
throw new VersionMismatchError(snapPath, version);
}
const md5sumOffset = versionOffset + 2;
const compressedOffset = md5sumOffset + MD5_HASH_LENGTH;
const sha256sumOffset = versionOffset + 2;
const compressedOffset = sha256sumOffset + SHA_256_HASH_LENGTH;
const compressed = buffer.slice(compressedOffset);
const md5sum = crypto.createHash('md5').update(compressed).digest();
const expectedSum = buffer.slice(md5sumOffset, compressedOffset);
if (!md5sum.equals(expectedSum)) {
const sha256sum = crypto.createHash('sha256').update(compressed).digest();
const expectedSum = buffer.slice(sha256sumOffset, compressedOffset);
if (!sha256sum.equals(expectedSum)) {
throw new ChecksumError(snapPath);
}
const decompressed = zlib.gunzipSync(compressed);
let byteOffset = 0;
const headerLength = decompressed.readUInt32LE(byteOffset);
byteOffset += 4;
const snapshotsByHash = new Map();
const numberHashes = decompressed.readUInt16LE(byteOffset);
byteOffset += 2;
for (let count = 0; count < numberHashes; count++) {
const hash = decompressed.toString('hex', byteOffset, byteOffset + MD5_HASH_LENGTH);
byteOffset += MD5_HASH_LENGTH;
const numberSnapshots = decompressed.readUInt16LE(byteOffset);
byteOffset += 2;
const snapshotsBuffers = new Array(numberSnapshots);
for (let index = 0; index < numberSnapshots; index++) {
const start = decompressed.readUInt32LE(byteOffset) + headerLength;
byteOffset += 4;
const end = decompressed.readUInt32LE(byteOffset) + headerLength;
byteOffset += 4;
snapshotsBuffers[index] = decompressed.slice(start, end);
}
// Allow for new entries to be appended to an existing header, which could
// lead to the same hash being present multiple times.
if (snapshotsByHash.has(hash)) {
snapshotsByHash.set(hash, snapshotsByHash.get(hash).concat(snapshotsBuffers));
} else {
snapshotsByHash.set(hash, snapshotsBuffers);
}
}
return snapshotsByHash;
return cbor.decode(decompressed);
}
class Manager {
constructor(options) {
this.appendOnly = options.appendOnly;
this.dir = options.dir;
this.recordNewSnapshots = options.recordNewSnapshots;
this.updating = options.updating;
this.relFile = options.relFile;
this.reportFile = options.reportFile;
this.reportPath = options.reportPath;
this.snapFile = options.snapFile;
this.snapPath = options.snapPath;
this.snapshotsByHash = options.snapshotsByHash;
this.oldBlocksByTitle = options.oldBlocksByTitle;
this.newBlocksByTitle = options.newBlocksByTitle;
this.blockIndices = new Map();
this.error = options.error;
this.hasChanges = false;
this.reportEntries = new Map();
}
touch(title, taskIndex) {
this.blockIndices.set(title, taskIndex);
}
compare(options) {
const hash = md5Hex(options.belongsTo);
const entries = this.snapshotsByHash.get(hash) || [];
const snapshotBuffer = entries[options.index];
if (this.error) {
throw this.error;
}
if (!snapshotBuffer) {
const block = this.newBlocksByTitle.get(options.belongsTo);
const snapshot = block && block.snapshots[options.index];
const data = snapshot && snapshot.data;
if (!data) {
if (!this.recordNewSnapshots) {
return {pass: false};
}
if (options.deferRecording) {
const record = this.deferRecord(hash, options);
const record = this.deferRecord(options);
return {pass: true, record};
}
this.record(hash, options);
this.record(options);
return {pass: true};
}
const actual = concordance.deserialize(snapshotBuffer, concordanceOptions);
const actual = concordance.deserialize(data, concordanceOptions);
const expected = concordance.describe(options.expected, concordanceOptions);
const pass = concordance.compareDescriptors(actual, expected);
return {actual, expected, pass};
}
deferRecord(hash, options) {
const descriptor = concordance.describe(options.expected, concordanceOptions);
const snapshot = concordance.serialize(descriptor);
const entry = formatEntry(options.label, descriptor);
const {taskIndex, snapIndex, associatedTaskIndex} = options;
recordSerialized({data, label, belongsTo, index}) {
let block = this.newBlocksByTitle.get(belongsTo);
if (!block) {
block = {snapshots: []};
}
const {snapshots} = block;
if (index > snapshots.length) {
throw new RangeError(`Cannot record snapshot ${index} for ${JSON.stringify(belongsTo)}, exceeds expected index of ${snapshots.length}`);
} else if (index < snapshots.length) {
if (snapshots[index].data) {
throw new RangeError(`Cannot record snapshot ${index} for ${JSON.stringify(belongsTo)}, already exists`);
}
snapshots[index] = {data, label};
} else {
snapshots.push({data, label});
}
this.newBlocksByTitle.set(belongsTo, block);
}
deferRecord(options) {
const {expected, belongsTo, label, index} = options;
const descriptor = concordance.describe(expected, concordanceOptions);
const data = concordance.serialize(descriptor);
return () => { // Must be called in order!
this.hasChanges = true;
let snapshots = this.snapshotsByHash.get(hash);
if (!snapshots) {
snapshots = [];
this.snapshotsByHash.set(hash, snapshots);
}
if (options.index > snapshots.length) {
throw new RangeError(`Cannot record snapshot ${options.index} for ${JSON.stringify(options.belongsTo)}, exceeds expected index of ${snapshots.length}`);
}
if (options.index < snapshots.length) {
throw new RangeError(`Cannot record snapshot ${options.index} for ${JSON.stringify(options.belongsTo)}, already exists`);
}
snapshots.push(snapshot);
if (this.reportEntries.has(options.belongsTo)) {
this.reportEntries.get(options.belongsTo).buffers.push(entry);
} else {
this.reportEntries.set(options.belongsTo, {buffers: [entry], taskIndex, snapIndex, associatedTaskIndex});
}
this.recordSerialized({data, label, belongsTo, index});
};
}
record(hash, options) {
const record = this.deferRecord(hash, options);
record(options) {
const record = this.deferRecord(options);
record();
}
save() {
skipBlock(title) {
const block = this.oldBlocksByTitle.get(title);
if (block) {
this.newBlocksByTitle.set(title, block);
}
}
skipSnapshot({belongsTo, index, deferRecording}) {
const oldBlock = this.oldBlocksByTitle.get(belongsTo);
let snapshot = oldBlock && oldBlock.snapshots[index];
if (!snapshot) {
snapshot = {};
}
// Retain the label from the old snapshot, so as not to assume that the
// snapshot.skip() arguments are well-formed.
// Defer recording if called in a try().
if (deferRecording) {
return () => { // Must be called in order!
this.recordSerialized({belongsTo, index, ...snapshot});
};
}
this.recordSerialized({belongsTo, index, ...snapshot});
}
async save() {
const {dir, relFile, snapFile, snapPath, reportPath} = this;
if (this.updating && this.newBlocksByTitle.size === 0) {
return {
changedFiles: [cleanFile(snapPath), cleanFile(reportPath)].flat(),
temporaryFiles: [],
};
}
if (!this.hasChanges) {
return null;
}
const {snapPath} = this;
const buffer = encodeSnapshots(this.snapshotsByHash);
const snapshots = {
blocks: sortBlocks(this.newBlocksByTitle, this.blockIndices).map(
([title, block]) => ({title, ...block}),
),
};
const reportPath = path.join(this.dir, this.reportFile);
const existingReport = this.appendOnly ? tryRead(reportPath) : null;
const reportBuffer = existingReport ?
appendReportEntries(existingReport, this.reportEntries) :
generateReport(this.relFile, this.snapFile, this.reportEntries);
const buffer = await encodeSnapshots(snapshots);
const reportBuffer = generateReport(relFile, snapFile, snapshots);
fs.mkdirSync(this.dir, {recursive: true});
await fs.promises.mkdir(dir, {recursive: true});
const paths = [snapPath, reportPath];
const tmpfileCreated = tmpfile => paths.push(tmpfile);
writeFileAtomic.sync(snapPath, buffer, {tmpfileCreated});
writeFileAtomic.sync(reportPath, reportBuffer, {tmpfileCreated});
return paths;
const temporaryFiles = [];
const tmpfileCreated = file => temporaryFiles.push(file);
await Promise.all([
writeFileAtomic(snapPath, buffer, {tmpfileCreated}),
writeFileAtomic(reportPath, reportBuffer, {tmpfileCreated}),
]);
return {
changedFiles: [snapPath, reportPath],
temporaryFiles,
};
}
}
const resolveSourceFile = mem(file => {
const testDir = path.dirname(file);
const buffer = tryRead(file);
if (!buffer) {
return file; // Assume the file is stubbed in our test suite.
const sourceMap = findSourceMap(file);
if (sourceMap === undefined) {
return file;
}
const source = buffer.toString();
const converter = convertSourceMap.fromSource(source) || convertSourceMap.fromMapFileSource(source, testDir);
if (converter) {
const map = converter.toObject();
const firstSource = `${map.sourceRoot || ''}${map.sources[0]}`;
return path.resolve(testDir, firstSource);
const {payload} = sourceMap;
if (payload.sources.length === 0) { // Hypothetical?
return file;
}
return file;
return payload.sources[0].startsWith('file://')
? fileURLToPath(payload.sources[0])
: payload.sources[0];
});
const determineSnapshotDir = mem(({file, fixedLocation, projectDir}) => {
export const determineSnapshotDir = mem(({file, fixedLocation, projectDir}) => {
const testDir = path.dirname(resolveSourceFile(file));
if (fixedLocation) {
const relativeTestLocation = path.relative(projectDir, testDir);
@ -447,8 +424,6 @@ const determineSnapshotDir = mem(({file, fixedLocation, projectDir}) => {
return testDir;
}, {cacheKey: ([{file}]) => file});
exports.determineSnapshotDir = determineSnapshotDir;
function determineSnapshotPaths({file, fixedLocation, projectDir}) {
const dir = determineSnapshotDir({file, fixedLocation, projectDir});
const relFile = path.relative(projectDir, resolveSourceFile(file));
@ -460,7 +435,9 @@ function determineSnapshotPaths({file, fixedLocation, projectDir}) {
dir,
relFile,
snapFile,
reportFile
reportFile,
snapPath: path.join(dir, snapFile),
reportPath: path.join(dir, reportFile),
};
}
@ -477,45 +454,52 @@ function cleanFile(file) {
}
}
// Remove snapshot and report if they exist. Returns an array containing the
// paths of the touched files.
function cleanSnapshots({file, fixedLocation, projectDir}) {
const {dir, snapFile, reportFile} = determineSnapshotPaths({file, fixedLocation, projectDir});
export function load({file, fixedLocation, projectDir, recordNewSnapshots, updating}) {
// Keep runner unit tests that use `new Runner()` happy
if (file === undefined || projectDir === undefined) {
return new Manager({
recordNewSnapshots,
updating,
oldBlocksByTitle: new Map(),
newBlocksByTitle: new Map(),
});
}
return [
...cleanFile(path.join(dir, snapFile)),
...cleanFile(path.join(dir, reportFile))
];
}
const paths = determineSnapshotPaths({file, fixedLocation, projectDir});
const buffer = tryRead(paths.snapPath);
exports.cleanSnapshots = cleanSnapshots;
if (!buffer) {
return new Manager({
recordNewSnapshots,
updating,
...paths,
oldBlocksByTitle: new Map(),
newBlocksByTitle: new Map(),
});
}
function load({file, fixedLocation, projectDir, recordNewSnapshots, updating}) {
const {dir, relFile, snapFile, reportFile} = determineSnapshotPaths({file, fixedLocation, projectDir});
const snapPath = path.join(dir, snapFile);
let blocksByTitle;
let snapshotError;
let appendOnly = !updating;
let snapshotsByHash;
try {
const data = decodeSnapshots(buffer, paths.snapPath);
blocksByTitle = new Map(data.blocks.map(({title, ...block}) => [title, block]));
} catch (error) {
blocksByTitle = new Map();
if (!updating) {
const buffer = tryRead(snapPath);
if (buffer) {
snapshotsByHash = decodeSnapshots(buffer, snapPath);
} else {
appendOnly = false;
if (!updating) { // Discard all decoding errors when updating snapshots
snapshotError = error instanceof SnapshotError
? error
: new InvalidSnapshotError(paths.snapPath);
}
}
return new Manager({
appendOnly,
dir,
recordNewSnapshots,
relFile,
reportFile,
snapFile,
snapPath,
snapshotsByHash: snapshotsByHash || new Map()
updating,
...paths,
oldBlocksByTitle: blocksByTitle,
newBlocksByTitle: updating ? new Map() : blocksByTitle,
error: snapshotError,
});
}
exports.load = load;

422
node_modules/ava/lib/test.js generated vendored
View file

@ -1,11 +1,11 @@
'use strict';
const concordance = require('concordance');
const isPromise = require('is-promise');
const plur = require('plur');
const assert = require('./assert');
const nowAndTimers = require('./now-and-timers');
const parseTestArgs = require('./parse-test-args');
const concordanceOptions = require('./concordance-options').default;
import concordance from 'concordance';
import isPromise from 'is-promise';
import plur from 'plur';
import {AssertionError, Assertions, checkAssertionMessage} from './assert.js';
import concordanceOptions from './concordance-options.js';
import nowAndTimers from './now-and-timers.cjs';
import parseTestArgs from './parse-test-args.js';
function formatErrorValue(label, error) {
const formatted = concordance.format(error, concordanceOptions);
@ -15,13 +15,13 @@ function formatErrorValue(label, error) {
const captureSavedError = () => {
const limitBefore = Error.stackTraceLimit;
Error.stackTraceLimit = 1;
const err = new Error();
const error = new Error(); // eslint-disable-line unicorn/error-message
Error.stackTraceLimit = limitBefore;
return err;
return error;
};
const testMap = new WeakMap();
class ExecutionContext extends assert.Assertions {
class ExecutionContext extends Assertions {
constructor(test) {
super({
pass: () => {
@ -30,18 +30,15 @@ class ExecutionContext extends assert.Assertions {
pending: promise => {
test.addPendingAssertion(promise);
},
fail: err => {
test.addFailedAssertion(err);
fail: error => {
test.addFailedAssertion(error);
},
skip: () => {
test.countPassedAssertion();
},
compareWithSnapshot: options => {
return test.compareWithSnapshot(options);
},
powerAssert: test.powerAssert,
compareWithSnapshot: options => test.compareWithSnapshot(options),
experiments: test.experiments,
disableSnapshots: test.isHook === true
disableSnapshots: test.isHook === true,
});
testMap.set(this, test);
@ -50,11 +47,9 @@ class ExecutionContext extends assert.Assertions {
};
this.log = (...inputArgs) => {
const args = inputArgs.map(value => {
return typeof value === 'string' ?
value :
concordance.format(value, concordanceOptions);
});
const args = inputArgs.map(value => typeof value === 'string'
? value
: concordance.format(value, concordanceOptions));
if (args.length > 0) {
test.addLog(args.join(' '));
}
@ -81,99 +76,89 @@ class ExecutionContext extends assert.Assertions {
throw error;
}
const {args, buildTitle, implementations, receivedImplementationArray} = parseTestArgs(attemptArgs);
const {args, implementation, title} = parseTestArgs(attemptArgs);
if (implementations.length === 0) {
if (!implementation) {
throw new TypeError('Expected an implementation.');
}
const attemptPromises = implementations.map((implementation, index) => {
let {title, isSet, isValid, isEmpty} = buildTitle(implementation);
if (Array.isArray(implementation)) {
throw new TypeError('AVA 4 no longer supports t.try() with multiple implementations.');
}
if (!isSet || isEmpty) {
title = `${test.title} ─ attempt ${test.attemptCount + 1 + index}`;
} else if (isValid) {
title = `${test.title}${title}`;
} else {
throw new TypeError('`t.try()` titles must be strings'); // Throw synchronously!
}
let attemptTitle;
if (!title.isSet || title.isEmpty) {
attemptTitle = `${test.title} ─ attempt ${test.attemptCount + 1}`;
} else if (title.isValid) {
attemptTitle = `${test.title}${title.value}`;
} else {
throw new TypeError('`t.try()` titles must be strings');
}
if (!test.registerUniqueTitle(title)) {
throw new Error(`Duplicate test title: ${title}`);
}
if (!test.registerUniqueTitle(attemptTitle)) {
throw new Error(`Duplicate test title: ${attemptTitle}`);
}
return {implementation, title};
}).map(async ({implementation, title}) => {
let committed = false;
let discarded = false;
let committed = false;
let discarded = false;
const {assertCount, deferredSnapshotRecordings, errors, logs, passed, snapshotCount, startingSnapshotCount} = await test.runAttempt(title, t => implementation(t, ...args));
const {assertCount, deferredSnapshotRecordings, errors, logs, passed, snapshotCount, startingSnapshotCount} = await test.runAttempt(attemptTitle, t => implementation(t, ...args));
return {
errors,
logs: [...logs], // Don't allow modification of logs.
passed,
title,
commit: ({retainLogs = true} = {}) => {
if (committed) {
return;
}
if (discarded) {
test.saveFirstError(new Error('Cant commit a result that was previously discarded'));
return;
}
committed = true;
test.finishAttempt({
assertCount,
commit: true,
deferredSnapshotRecordings,
errors,
logs,
passed,
retainLogs,
snapshotCount,
startingSnapshotCount
});
},
discard: ({retainLogs = false} = {}) => {
if (committed) {
test.saveFirstError(new Error('Cant discard a result that was previously committed'));
return;
}
if (discarded) {
return;
}
discarded = true;
test.finishAttempt({
assertCount: 0,
commit: false,
deferredSnapshotRecordings,
errors,
logs,
passed,
retainLogs,
snapshotCount,
startingSnapshotCount
});
return {
errors,
logs: [...logs], // Don't allow modification of logs.
passed,
title: attemptTitle,
commit: ({retainLogs = true} = {}) => {
if (committed) {
return;
}
};
});
const results = await Promise.all(attemptPromises);
return receivedImplementationArray ? results : results[0];
if (discarded) {
test.saveFirstError(new Error('Cant commit a result that was previously discarded'));
return;
}
committed = true;
test.finishAttempt({
assertCount,
commit: true,
deferredSnapshotRecordings,
errors,
logs,
passed,
retainLogs,
snapshotCount,
startingSnapshotCount,
});
},
discard: ({retainLogs = false} = {}) => {
if (committed) {
test.saveFirstError(new Error('Cant discard a result that was previously committed'));
return;
}
if (discarded) {
return;
}
discarded = true;
test.finishAttempt({
assertCount: 0,
commit: false,
deferredSnapshotRecordings,
errors,
logs,
passed,
retainLogs,
snapshotCount,
startingSnapshotCount,
});
},
};
};
}
get end() {
const end = testMap.get(this).bindEndCallback();
const endFn = error => end(error, captureSavedError());
return endFn;
}
get title() {
return testMap.get(this).title;
}
@ -190,17 +175,9 @@ class ExecutionContext extends assert.Assertions {
const test = testMap.get(this);
return test.isHook ? test.testPassed : !test.assertError;
}
_throwsArgStart(assertion, file, line) {
testMap.get(this).trackThrows({assertion, file, line});
}
_throwsArgEnd() {
testMap.get(this).trackThrows(null);
}
}
class Test {
export default class Test {
constructor(options) {
this.contextRef = options.contextRef;
this.experiments = options.experiments || {};
@ -208,12 +185,12 @@ class Test {
this.fn = options.fn;
this.isHook = options.isHook === true;
this.metadata = options.metadata;
this.powerAssert = options.powerAssert;
this.title = options.title;
this.testPassed = options.testPassed;
this.registerUniqueTitle = options.registerUniqueTitle;
this.logs = [];
this.teardowns = [];
this.notifyTimeoutUpdate = options.notifyTimeoutUpdate;
const {snapshotBelongsTo = this.title, nextSnapshotIndex = 0} = options;
this.snapshotBelongsTo = snapshotBelongsTo;
@ -222,24 +199,20 @@ class Test {
const deferRecording = this.metadata.inline;
this.deferredSnapshotRecordings = [];
this.compareWithSnapshot = ({expected, id, message}) => {
this.compareWithSnapshot = ({expected, message}) => {
this.snapshotCount++;
// TODO: In a breaking change, reject non-undefined, falsy IDs and messages.
const belongsTo = id || snapshotBelongsTo;
const index = id ? 0 : this.nextSnapshotIndex++;
const label = id ? '' : message || `Snapshot ${index + 1}`; // Human-readable labels start counting at 1.
const belongsTo = snapshotBelongsTo;
const index = this.nextSnapshotIndex++;
const label = message;
const {taskIndex, associatedTaskIndex} = this.metadata;
const {record, ...result} = options.compareTestSnapshot({
belongsTo,
deferRecording,
expected,
index,
label,
taskIndex,
snapIndex: this.snapshotCount,
associatedTaskIndex
taskIndex: this.metadata.taskIndex,
});
if (record) {
this.deferredSnapshotRecordings.push(record);
@ -250,16 +223,20 @@ class Test {
this.skipSnapshot = () => {
if (typeof options.skipSnapshot === 'function') {
options.skipSnapshot();
const record = options.skipSnapshot({
belongsTo: snapshotBelongsTo,
index: this.nextSnapshotIndex,
deferRecording,
taskIndex: this.metadata.taskIndex,
});
if (record) {
this.deferredSnapshotRecordings.push(record);
}
}
if (options.updateSnapshots) {
this.addFailedAssertion(new Error('Snapshot assertions cannot be skipped when updating snapshots'));
} else {
this.nextSnapshotIndex++;
this.snapshotCount++;
this.countPassedAssertion();
}
this.nextSnapshotIndex++;
this.snapshotCount++;
this.countPassedAssertion();
};
this.runAttempt = async (title, fn) => {
@ -274,11 +251,11 @@ class Test {
const attempt = new Test({
...options,
fn,
metadata: {...options.metadata, callback: false, failing: false, inline: true},
metadata: {...options.metadata, failing: false, inline: true},
contextRef: contextRef.copy(),
snapshotBelongsTo,
nextSnapshotIndex,
title
title,
});
const {deferredSnapshotRecordings, error, logs, passed, assertCount, snapshotCount} = await attempt.run();
@ -291,53 +268,17 @@ class Test {
this.attemptCount = 0;
this.calledEnd = false;
this.duration = null;
this.endCallbackFinisher = null;
this.finishDueToAttributedError = null;
this.finishDueToInactivity = null;
this.finishDueToTimeout = null;
this.finishing = false;
this.pendingAssertionCount = 0;
this.pendingAttemptCount = 0;
this.pendingThrowsAssertion = null;
this.planCount = null;
this.startedAt = 0;
this.timeoutMs = 0;
this.timeoutTimer = null;
}
bindEndCallback() {
if (this.metadata.callback) {
return (error, savedError) => {
this.endCallback(error, savedError);
};
}
const error_ = this.metadata.inline ? new Error('`t.end()` is not supported inside `t.try()`') : new Error('`t.end()` is not supported in this context. To use `t.end()` as a callback, you must use "callback mode" via `test.cb(testName, fn)`');
throw error_;
}
endCallback(error, savedError) {
if (this.calledEnd) {
this.saveFirstError(new Error('`t.end()` called more than once'));
return;
}
this.calledEnd = true;
if (error) {
this.saveFirstError(new assert.AssertionError({
actual: error,
message: 'Callback called with an error',
savedError,
values: [formatErrorValue('Callback called with an error:', error)]
}));
}
if (this.endCallbackFinisher) {
this.endCallbackFinisher();
}
}
createExecutionContext() {
return new ExecutionContext(this);
}
@ -374,7 +315,7 @@ class Test {
promise
.catch(error => this.saveFirstError(error))
.then(() => { // eslint-disable-line promise/prefer-await-to-then
.then(() => {
this.pendingAssertionCount--;
this.refreshTimeout();
});
@ -451,7 +392,7 @@ class Test {
}
timeout(ms, message) {
const result = assert.checkAssertionMessage('timeout', message);
const result = checkAssertionMessage('timeout', message);
if (result !== true) {
this.saveFirstError(result);
// Allow the timeout to be set even when the message is invalid.
@ -471,6 +412,8 @@ class Test {
this.finishDueToTimeout();
}
}, ms);
this.notifyTimeoutUpdate(this.timeoutMs);
}
refreshTimeout() {
@ -509,11 +452,7 @@ class Test {
}
async runTeardowns() {
const teardowns = [...this.teardowns];
if (this.experiments.reverseTeardowns) {
teardowns.reverse();
}
const teardowns = [...this.teardowns].reverse();
for (const teardown of teardowns) {
try {
@ -526,11 +465,11 @@ class Test {
verifyPlan() {
if (!this.assertError && this.planCount !== null && this.planCount !== this.assertCount) {
this.saveFirstError(new assert.AssertionError({
this.saveFirstError(new AssertionError({
assertion: 'plan',
message: `Planned for ${this.planCount} ${plur('assertion', this.planCount)}, but got ${this.assertCount}.`,
operator: '===',
savedError: this.planError
savedError: this.planError,
}));
}
}
@ -561,70 +500,16 @@ class Test {
}
}
trackThrows(pending) {
this.pendingThrowsAssertion = pending;
}
detectImproperThrows(error) {
if (!this.pendingThrowsAssertion) {
return false;
}
const pending = this.pendingThrowsAssertion;
this.pendingThrowsAssertion = null;
const values = [];
if (error) {
values.push(formatErrorValue(`The following error was thrown, possibly before \`t.${pending.assertion}()\` could be called:`, error));
}
this.saveFirstError(new assert.AssertionError({
assertion: pending.assertion,
fixedSource: {file: pending.file, line: pending.line},
improperUsage: true,
message: `Improper usage of \`t.${pending.assertion}()\` detected`,
savedError: error instanceof Error && error,
values
}));
return true;
}
waitForPendingThrowsAssertion() {
return new Promise(resolve => {
this.finishDueToAttributedError = () => {
resolve(this.finish());
};
this.finishDueToInactivity = () => {
this.detectImproperThrows();
resolve(this.finish());
};
// Wait up to a second to see if an error can be attributed to the
// pending assertion.
nowAndTimers.setTimeout(() => this.finishDueToInactivity(), 1000).unref();
});
}
attributeLeakedError(error) {
if (!this.detectImproperThrows(error)) {
return false;
}
this.finishDueToAttributedError();
return true;
}
callFn() {
try {
return {
ok: true,
retval: this.fn.call(null, this.createExecutionContext())
retval: this.fn.call(null, this.createExecutionContext()),
};
} catch (error) {
return {
ok: false,
error
error,
};
}
}
@ -634,13 +519,11 @@ class Test {
const result = this.callFn();
if (!result.ok) {
if (!this.detectImproperThrows(result.error)) {
this.saveFirstError(new assert.AssertionError({
message: 'Error thrown in test',
savedError: result.error instanceof Error && result.error,
values: [formatErrorValue('Error thrown in test:', result.error)]
}));
}
this.saveFirstError(new AssertionError({
message: 'Error thrown in test',
savedError: result.error instanceof Error && result.error,
values: [formatErrorValue('Error thrown in test:', result.error)],
}));
return this.finish();
}
@ -653,7 +536,7 @@ class Test {
promise = new Promise((resolve, reject) => {
result.retval.subscribe({
error: reject,
complete: () => resolve()
complete: () => resolve(),
});
});
} else if (returnedPromise) {
@ -661,37 +544,6 @@ class Test {
promise = Promise.resolve(result.retval);
}
if (this.metadata.callback) {
if (returnedObservable || returnedPromise) {
const asyncType = returnedObservable ? 'observables' : 'promises';
this.saveFirstError(new Error(`Do not return ${asyncType} from tests declared via \`test.cb(…)\`. Use \`test.cb(…)\` for legacy callback APIs. When using promises, observables or async functions, use \`test(…)\`.`));
return this.finish();
}
if (this.calledEnd) {
return this.finish();
}
return new Promise(resolve => {
this.endCallbackFinisher = () => {
resolve(this.finish());
};
this.finishDueToAttributedError = () => {
resolve(this.finish());
};
this.finishDueToTimeout = () => {
resolve(this.finish());
};
this.finishDueToInactivity = () => {
this.saveFirstError(new Error('`t.end()` was never called'));
resolve(this.finish());
};
});
}
if (promise) {
return new Promise(resolve => {
this.finishDueToAttributedError = () => {
@ -703,24 +555,22 @@ class Test {
};
this.finishDueToInactivity = () => {
const error = returnedObservable ?
new Error('Observable returned by test never completed') :
new Error('Promise returned by test never resolved');
const error = returnedObservable
? new Error('Observable returned by test never completed')
: new Error('Promise returned by test never resolved');
this.saveFirstError(error);
resolve(this.finish());
};
promise
.catch(error => {
if (!this.detectImproperThrows(error)) {
this.saveFirstError(new assert.AssertionError({
message: 'Rejected promise returned by test',
savedError: error instanceof Error && error,
values: [formatErrorValue('Rejected promise returned by test. Reason:', error)]
}));
}
this.saveFirstError(new AssertionError({
message: 'Rejected promise returned by test',
savedError: error instanceof Error && error,
values: [formatErrorValue('Rejected promise returned by test. Reason:', error)],
}));
})
.then(() => resolve(this.finish())); // eslint-disable-line promise/prefer-await-to-then
.then(() => resolve(this.finish()));
});
}
@ -730,10 +580,6 @@ class Test {
async finish() {
this.finishing = true;
if (!this.assertError && this.pendingThrowsAssertion) {
return this.waitForPendingThrowsAssertion();
}
this.clearTimeout();
this.verifyPlan();
this.verifyAssertions();
@ -759,9 +605,7 @@ class Test {
passed,
snapshotCount: this.snapshotCount,
assertCount: this.assertCount,
title: this.title
title: this.title,
};
}
}
module.exports = Test;

111
node_modules/ava/lib/watcher.js generated vendored
View file

@ -1,18 +1,26 @@
'use strict';
const nodePath = require('path');
const debug = require('debug')('ava:watcher');
const chokidar = require('chokidar');
const diff = require('lodash/difference');
const flatten = require('lodash/flatten');
const chalk = require('./chalk').get();
const {applyTestFileFilter, classify, getChokidarIgnorePatterns} = require('./globs');
const {levels: providerLevels} = require('./provider-manager');
import nodePath from 'node:path';
function rethrowAsync(err) {
import chokidar_ from 'chokidar';
import createDebug from 'debug';
import {chalk} from './chalk.js';
import {applyTestFileFilter, classify, getChokidarIgnorePatterns} from './globs.js';
let chokidar = chokidar_;
export function _testOnlyReplaceChokidar(replacement) {
chokidar = replacement;
}
let debug = createDebug('ava:watcher');
export function _testOnlyReplaceDebug(replacement) {
debug = replacement('ava:watcher');
}
function rethrowAsync(error) {
// Don't swallow exceptions. Note that any
// expected error should already have been logged
setImmediate(() => {
throw err;
throw error;
});
}
@ -77,7 +85,7 @@ class TestDependency {
}
}
class Watcher {
export default class Watcher {
constructor({api, filter = [], globs, projectDir, providers, reporter}) {
this.debouncer = new Debouncer(this);
@ -88,7 +96,7 @@ class Watcher {
const patternFilters = filter.map(({pattern}) => pattern);
this.providers = providers.filter(({level}) => level >= providerLevels.pathRewrites);
this.providers = providers;
this.run = (specificFiles = [], updateSnapshots = false) => {
const clearLogOnNextRun = this.clearLogOnNextRun && this.runVector > 0;
if (this.runVector > 0) {
@ -104,12 +112,18 @@ class Watcher {
if (runOnlyExclusive) {
// The test files that previously contained exclusive tests are always
// run, together with the remaining specific files.
const remainingFiles = diff(specificFiles, exclusiveFiles);
specificFiles = this.filesWithExclusiveTests.concat(remainingFiles);
const remainingFiles = specificFiles.filter(file => !exclusiveFiles.includes(file));
specificFiles = [...this.filesWithExclusiveTests, ...remainingFiles];
}
if (filter.length > 0) {
specificFiles = applyTestFileFilter({cwd: projectDir, filter: patternFilters, testFiles: specificFiles});
specificFiles = applyTestFileFilter({
cwd: projectDir,
expandDirectories: false,
filter: patternFilters,
testFiles: specificFiles,
treatFilterPatternsAsFiles: false,
});
}
this.pruneFailures(specificFiles);
@ -125,21 +139,21 @@ class Watcher {
previousFailures: this.sumPreviousFailures(this.runVector),
runOnlyExclusive,
runVector: this.runVector,
updateSnapshots: updateSnapshots === true
}
updateSnapshots: updateSnapshots === true,
},
})
.then(runStatus => { // eslint-disable-line promise/prefer-await-to-then
.then(runStatus => {
reporter.endRun();
reporter.lineWriter.writeLine(END_MESSAGE);
if (this.clearLogOnNextRun && (
runStatus.stats.failedHooks > 0 ||
runStatus.stats.failedTests > 0 ||
runStatus.stats.failedWorkers > 0 ||
runStatus.stats.internalErrors > 0 ||
runStatus.stats.timeouts > 0 ||
runStatus.stats.uncaughtExceptions > 0 ||
runStatus.stats.unhandledRejections > 0
runStatus.stats.failedHooks > 0
|| runStatus.stats.failedTests > 0
|| runStatus.stats.failedWorkers > 0
|| runStatus.stats.internalErrors > 0
|| runStatus.stats.timeouts > 0
|| runStatus.stats.uncaughtExceptions > 0
|| runStatus.stats.unhandledRejections > 0
)) {
this.clearLogOnNextRun = false;
}
@ -150,6 +164,7 @@ class Watcher {
this.testDependencies = [];
this.trackTestDependencies(api);
this.temporaryFiles = new Set();
this.touchedFiles = new Set();
this.trackTouchedFiles(api);
@ -168,7 +183,7 @@ class Watcher {
chokidar.watch(['**/*'], {
cwd: this.globs.cwd,
ignored: getChokidarIgnorePatterns(this.globs),
ignoreInitial: true
ignoreInitial: true,
}).on('all', (event, path) => {
if (event === 'add' || event === 'change' || event === 'unlink') {
debug('Detected %s of %s', event, path);
@ -231,9 +246,13 @@ class Watcher {
return;
}
for (const file of evt.files) {
for (const file of evt.files.changedFiles) {
this.touchedFiles.add(file);
}
for (const file of evt.files.temporaryFiles) {
this.temporaryFiles.add(file);
}
});
});
}
@ -307,7 +326,7 @@ class Watcher {
this.filesWithFailures.push({
file,
vector,
count: 1
count: 1,
});
}
}
@ -379,6 +398,14 @@ class Watcher {
return false;
}
// Unlike touched files, temporary files are never cleared. We may see
// adds and unlinks detected separately, so we track the temporary files
// as long as AVA is running.
if (this.temporaryFiles.has(path)) {
debug('Ignoring known temporary file %s', path);
return false;
}
return true;
});
@ -394,21 +421,23 @@ class Watcher {
}
const dirtyHelpersAndSources = [];
const dirtyTests = [];
const addedOrChangedTests = [];
const unlinkedTests = [];
for (const filePath of dirtyPaths) {
const {isIgnoredByWatcher, isTest} = classify(filePath, this.globs);
if (!isIgnoredByWatcher) {
if (isTest) {
dirtyTests.push(filePath);
if (dirtyStates[filePath] === 'unlink') {
unlinkedTests.push(filePath);
} else {
addedOrChangedTests.push(filePath);
}
} else {
dirtyHelpersAndSources.push(filePath);
}
}
}
const addedOrChangedTests = dirtyTests.filter(path => dirtyStates[path] !== 'unlink');
const unlinkedTests = diff(dirtyTests, addedOrChangedTests);
this.cleanUnlinkedTests(unlinkedTests);
// No need to rerun tests if the only change is that tests were deleted
@ -423,12 +452,10 @@ class Watcher {
}
// Try to find tests that depend on the changed source files
const testsByHelpersOrSource = dirtyHelpersAndSources.map(path => {
return this.testDependencies.filter(dep => dep.contains(path)).map(dep => {
debug('%s is a dependency of %s', path, dep.file);
return dep.file;
});
}, this).filter(tests => tests.length > 0);
const testsByHelpersOrSource = dirtyHelpersAndSources.map(path => this.testDependencies.filter(dep => dep.contains(path)).map(dep => {
debug('%s is a dependency of %s', path, dep.file);
return dep.file;
})).filter(tests => tests.length > 0);
// Rerun all tests if source files were changed that could not be traced to
// specific tests
@ -440,8 +467,6 @@ class Watcher {
}
// Run all affected tests
this.run([...new Set(addedOrChangedTests.concat(flatten(testsByHelpersOrSource)))]);
this.run([...new Set([addedOrChangedTests, testsByHelpersOrSource].flat(2))]);
}
}
module.exports = Watcher;

208
node_modules/ava/lib/worker/base.js generated vendored Normal file
View file

@ -0,0 +1,208 @@
import {createRequire} from 'node:module';
import process from 'node:process';
import {pathToFileURL} from 'node:url';
import {workerData} from 'node:worker_threads';
import setUpCurrentlyUnhandled from 'currently-unhandled';
import {set as setChalk} from '../chalk.js';
import nowAndTimers from '../now-and-timers.cjs';
import providerManager from '../provider-manager.js';
import Runner from '../runner.js';
import serializeError from '../serialize-error.js';
import channel from './channel.cjs';
import dependencyTracking from './dependency-tracker.js';
import lineNumberSelection from './line-numbers.js';
import {set as setOptions} from './options.cjs';
import {flags, refs, sharedWorkerTeardowns} from './state.cjs';
import {isRunningInThread, isRunningInChildProcess} from './utils.cjs';
const currentlyUnhandled = setUpCurrentlyUnhandled();
const run = async options => {
setOptions(options);
setChalk(options.chalkOptions);
if (options.chalkOptions.level > 0) {
const {stdout, stderr} = process;
global.console = Object.assign(global.console, new console.Console({stdout, stderr, colorMode: true}));
}
async function exit(code) {
if (!process.exitCode) {
process.exitCode = code;
}
dependencyTracking.flush();
await channel.flush();
process.exit(); // eslint-disable-line unicorn/no-process-exit
}
let checkSelectedByLineNumbers;
try {
checkSelectedByLineNumbers = lineNumberSelection({
file: options.file,
lineNumbers: options.lineNumbers,
});
} catch (error) {
channel.send({type: 'line-number-selection-error', err: serializeError('Line number selection error', false, error, options.file)});
checkSelectedByLineNumbers = () => false;
}
const runner = new Runner({
checkSelectedByLineNumbers,
experiments: options.experiments,
failFast: options.failFast,
failWithoutAssertions: options.failWithoutAssertions,
file: options.file,
match: options.match,
projectDir: options.projectDir,
recordNewSnapshots: options.recordNewSnapshots,
runOnlyExclusive: options.runOnlyExclusive,
serial: options.serial,
snapshotDir: options.snapshotDir,
updateSnapshots: options.updateSnapshots,
});
refs.runnerChain = runner.chain;
channel.peerFailed.then(() => {
runner.interrupt();
});
runner.on('dependency', dependencyTracking.track);
runner.on('stateChange', state => channel.send(state));
runner.on('error', error => {
channel.send({type: 'internal-error', err: serializeError('Internal runner error', false, error, runner.file)});
exit(1);
});
runner.on('finish', async () => {
try {
const {touchedFiles} = await runner.saveSnapshotState();
if (touchedFiles) {
channel.send({type: 'touched-files', files: touchedFiles});
}
} catch (error) {
channel.send({type: 'internal-error', err: serializeError('Internal runner error', false, error, runner.file)});
exit(1);
return;
}
try {
await Promise.all(sharedWorkerTeardowns.map(fn => fn()));
} catch (error) {
channel.send({type: 'uncaught-exception', err: serializeError('Shared worker teardown error', false, error, runner.file)});
exit(1);
return;
}
nowAndTimers.setImmediate(() => {
for (const rejection of currentlyUnhandled()) {
channel.send({type: 'unhandled-rejection', err: serializeError('Unhandled rejection', true, rejection.reason, runner.file)});
}
exit(0);
});
});
process.on('uncaughtException', error => {
channel.send({type: 'uncaught-exception', err: serializeError('Uncaught exception', true, error, runner.file)});
exit(1);
});
// Store value to prevent required modules from modifying it.
const testPath = options.file;
const extensionsToLoadAsModules = Object.entries(options.moduleTypes)
.filter(([, type]) => type === 'module')
.map(([extension]) => extension);
// Install before processing options.require, so if helpers are added to the
// require configuration the *compiled* helper will be loaded.
const {projectDir, providerStates = []} = options;
const providers = [];
await Promise.all(providerStates.map(async ({type, state}) => {
if (type === 'typescript') {
const provider = await providerManager.typescript(projectDir);
providers.push(provider.worker({extensionsToLoadAsModules, state}));
}
}));
const require = createRequire(import.meta.url);
const load = async ref => {
for (const provider of providers) {
if (provider.canLoad(ref)) {
return provider.load(ref, {requireFn: require});
}
}
for (const extension of extensionsToLoadAsModules) {
if (ref.endsWith(`.${extension}`)) {
return import(pathToFileURL(ref)); // eslint-disable-line node/no-unsupported-features/es-syntax
}
}
// We still support require() since it's more easily monkey-patched.
return require(ref);
};
try {
for await (const ref of (options.require || [])) {
await load(ref);
}
// Install dependency tracker after the require configuration has been evaluated
// to make sure we also track dependencies with custom require hooks
dependencyTracking.install(require.extensions, testPath);
if (options.debug && options.debug.port !== undefined && options.debug.host !== undefined) {
// If an inspector was active when the main process started, and is
// already active for the worker process, do not open a new one.
const {default: inspector} = await import('node:inspector'); // eslint-disable-line node/no-unsupported-features/es-syntax
if (!options.debug.active || inspector.url() === undefined) {
inspector.open(options.debug.port, options.debug.host, true);
}
if (options.debug.break) {
debugger; // eslint-disable-line no-debugger
}
}
await load(testPath);
if (flags.loadedMain) {
// Unreference the channel if the test file required AVA. This stops it
// from keeping the event loop busy, which means the `beforeExit` event can be
// used to detect when tests stall.
channel.unref();
} else {
channel.send({type: 'missing-ava-import'});
exit(1);
}
} catch (error) {
channel.send({type: 'uncaught-exception', err: serializeError('Uncaught exception', true, error, runner.file)});
exit(1);
}
};
const onError = error => {
// There shouldn't be any errors, but if there are we may not have managed
// to bootstrap enough code to serialize them. Re-throw and let the process
// crash.
setImmediate(() => {
throw error;
});
};
if (isRunningInThread) {
channel.send({type: 'starting'}); // AVA won't terminate the worker thread until it's seen this message.
const {options} = workerData;
delete workerData.options; // Don't allow user code access.
run(options).catch(onError);
} else if (isRunningInChildProcess) {
channel.send({type: 'ready-for-options'});
channel.options.then(run).catch(onError);
}

290
node_modules/ava/lib/worker/channel.cjs generated vendored Normal file
View file

@ -0,0 +1,290 @@
'use strict';
const events = require('events');
const process = require('process');
const {MessageChannel, threadId} = require('worker_threads');
const timers = require('../now-and-timers.cjs');
const {isRunningInChildProcess, isRunningInThread} = require('./utils.cjs');
let pEvent = async (emitter, event, options) => {
// We need to import p-event, but import() is asynchronous. Buffer any events
// emitted in the meantime. Don't handle errors.
const buffer = [];
const addToBuffer = (...args) => buffer.push(args);
emitter.on(event, addToBuffer);
try {
({pEvent} = await import('p-event')); // eslint-disable-line node/no-unsupported-features/es-syntax
} finally {
emitter.off(event, addToBuffer);
}
if (buffer.length === 0) {
return pEvent(emitter, event, options);
}
// Now replay buffered events.
const replayEmitter = new events.EventEmitter();
const promise = pEvent(replayEmitter, event, options);
for (const args of buffer) {
replayEmitter.emit(event, ...args);
}
const replay = (...args) => replayEmitter.emit(event, ...args);
emitter.on(event, replay);
try {
return await promise;
} finally {
emitter.off(event, replay);
}
};
const selectAvaMessage = type => message => message.ava && message.ava.type === type;
class RefCounter {
constructor() {
this.count = 0;
}
refAndTest() {
return ++this.count === 1;
}
testAndUnref() {
return this.count > 0 && --this.count === 0;
}
}
class MessagePortHandle {
constructor(port) {
this.counter = new RefCounter();
this.unreferenceable = false;
this.channel = port;
// Referencing the port does not immediately prevent the thread from
// exiting. Use a timer to keep a reference for at least a second.
this.workaroundTimer = timers.setTimeout(() => {}, 1000).unref();
}
forceUnref() {
if (this.unreferenceable) {
return;
}
this.unreferenceable = true;
this.workaroundTimer.unref();
this.channel.unref();
}
ref() {
if (!this.unreferenceable && this.counter.refAndTest()) {
this.workaroundTimer.refresh().ref();
this.channel.ref();
}
}
unref() {
if (!this.unreferenceable && this.counter.testAndUnref()) {
this.workaroundTimer.unref();
this.channel.unref();
}
}
send(evt, transferList) {
this.channel.postMessage({ava: evt}, transferList);
}
}
class IpcHandle {
constructor(bufferedSend) {
this.counter = new RefCounter();
this.channel = process;
this.sendRaw = bufferedSend;
}
ref() {
if (this.counter.refAndTest()) {
process.channel.ref();
}
}
unref() {
if (this.counter.testAndUnref()) {
process.channel.unref();
}
}
send(evt) {
this.sendRaw({ava: evt});
}
}
let handle;
if (isRunningInChildProcess) {
const {controlFlow} = require('../ipc-flow-control.cjs');
handle = new IpcHandle(controlFlow(process));
} else if (isRunningInThread) {
const {parentPort} = require('worker_threads');
handle = new MessagePortHandle(parentPort);
}
// The attaching of message listeners will cause the port to be referenced by
// Node.js. In order to keep track, explicitly reference before attaching.
handle.ref();
exports.options = pEvent(handle.channel, 'message', selectAvaMessage('options')).then(message => message.ava.options);
exports.peerFailed = pEvent(handle.channel, 'message', selectAvaMessage('peer-failed'));
exports.send = handle.send.bind(handle);
exports.unref = handle.unref.bind(handle);
let pendingPings = Promise.resolve();
async function flush() {
handle.ref();
const promise = pendingPings.then(async () => {
handle.send({type: 'ping'});
await pEvent(handle.channel, 'message', selectAvaMessage('pong'));
if (promise === pendingPings) {
handle.unref();
}
});
pendingPings = promise;
await promise;
}
exports.flush = flush;
let channelCounter = 0;
let messageCounter = 0;
const channelEmitters = new Map();
function createChannelEmitter(channelId) {
if (channelEmitters.size === 0) {
handle.channel.on('message', message => {
if (!message.ava) {
return;
}
const {channelId, type, ...payload} = message.ava;
if (type === 'shared-worker-error') {
const emitter = channelEmitters.get(channelId);
if (emitter !== undefined) {
emitter.emit(type, payload);
}
}
});
}
const emitter = new events.EventEmitter();
channelEmitters.set(channelId, emitter);
return [emitter, () => channelEmitters.delete(channelId)];
}
function registerSharedWorker(filename, initialData) {
const channelId = `${threadId}/channel/${++channelCounter}`;
const {port1: ourPort, port2: theirPort} = new MessageChannel();
const sharedWorkerHandle = new MessagePortHandle(ourPort);
const [channelEmitter, unsubscribe] = createChannelEmitter(channelId);
handle.send({
type: 'shared-worker-connect',
channelId,
filename,
initialData,
port: theirPort,
}, [theirPort]);
let currentlyAvailable = false;
let error = null;
// The attaching of message listeners will cause the port to be referenced by
// Node.js. In order to keep track, explicitly reference before attaching.
sharedWorkerHandle.ref();
const ready = pEvent(ourPort, 'message', ({type}) => type === 'ready').then(() => {
currentlyAvailable = error === null;
}).finally(() => {
// Once ready, it's up to user code to subscribe to messages, which (see
// below) causes us to reference the port.
sharedWorkerHandle.unref();
});
const messageEmitters = new Set();
// Errors are received over the test worker channel, not the message port
// dedicated to the shared worker.
pEvent(channelEmitter, 'shared-worker-error').then(() => {
unsubscribe();
sharedWorkerHandle.forceUnref();
error = new Error('The shared worker is no longer available');
currentlyAvailable = false;
for (const emitter of messageEmitters) {
emitter.emit('error', error);
}
});
ourPort.on('message', message => {
if (message.type === 'message') {
// Wait for a turn of the event loop, to allow new subscriptions to be set
// up in response to the previous message.
setImmediate(() => {
for (const emitter of messageEmitters) {
emitter.emit('message', message);
}
});
}
});
return {
forceUnref: () => sharedWorkerHandle.forceUnref(),
ready,
channel: {
available: ready,
get currentlyAvailable() {
return currentlyAvailable;
},
async * receive() {
if (error !== null) {
throw error;
}
const emitter = new events.EventEmitter();
messageEmitters.add(emitter);
try {
sharedWorkerHandle.ref();
for await (const [message] of events.on(emitter, 'message')) {
yield message;
}
} finally {
sharedWorkerHandle.unref();
messageEmitters.delete(emitter);
}
},
post(data, replyTo) {
if (error !== null) {
throw error;
}
if (!currentlyAvailable) {
throw new Error('Shared worker is not yet available');
}
const messageId = `${channelId}/message/${++messageCounter}`;
ourPort.postMessage({
type: 'message',
messageId,
replyTo,
data,
});
return messageId;
},
},
};
}
exports.registerSharedWorker = registerSharedWorker;

View file

@ -1,20 +1,19 @@
/* eslint-disable node/no-deprecated-api */
'use strict';
const ipc = require('./ipc');
import process from 'node:process';
import channel from './channel.cjs';
const seenDependencies = new Set();
let newDependencies = [];
function flush() {
if (newDependencies.length === 0) {
return;
}
ipc.send({type: 'dependencies', dependencies: newDependencies});
channel.send({type: 'dependencies', dependencies: newDependencies});
newDependencies = [];
}
exports.flush = flush;
function track(filename) {
if (seenDependencies.has(filename)) {
return;
@ -28,20 +27,22 @@ function track(filename) {
newDependencies.push(filename);
}
exports.track = track;
const tracker = {
flush,
track,
install(extensions, testPath) {
for (const ext of Object.keys(extensions)) {
const wrappedHandler = extensions[ext];
function install(testPath) {
for (const ext of Object.keys(require.extensions)) {
const wrappedHandler = require.extensions[ext];
extensions[ext] = (module, filename) => {
if (filename !== testPath) {
track(filename);
}
require.extensions[ext] = (module, filename) => {
if (filename !== testPath) {
track(filename);
}
wrappedHandler(module, filename);
};
}
},
};
wrappedHandler(module, filename);
};
}
}
exports.install = install;
export default tracker;

View file

@ -1,15 +1,16 @@
'use strict';
const path = require('path');
const chalk = require('chalk'); // Use default Chalk instance.
const process = require('process');
const {isRunningInThread, isRunningInChildProcess} = require('./utils.cjs');
// Check if the test is being run without AVA cli
const isForked = typeof process.send === 'function';
if (!isForked) {
if (!isRunningInChildProcess && !isRunningInThread) {
if (process.argv[1]) {
const fp = path.relative('.', process.argv[1]);
console.log();
console.error(`Test files must be run with the AVA CLI:\n\n ${chalk.grey.dim('$')} ${chalk.cyan('ava ' + fp)}\n`);
console.error(`Test files must be run with the AVA CLI:\n\n $ ava ${fp}\n`);
process.exit(1); // eslint-disable-line unicorn/no-process-exit
} else {

201
node_modules/ava/lib/worker/ipc.js generated vendored
View file

@ -1,201 +0,0 @@
'use strict';
const events = require('events');
const pEvent = require('p-event');
const {controlFlow} = require('../ipc-flow-control');
const {get: getOptions} = require('./options');
const selectAvaMessage = type => message => message.ava && message.ava.type === type;
exports.options = pEvent(process, 'message', selectAvaMessage('options')).then(message => message.ava.options);
exports.peerFailed = pEvent(process, 'message', selectAvaMessage('peer-failed'));
const bufferedSend = controlFlow(process);
function send(evt) {
bufferedSend({ava: evt});
}
exports.send = send;
let refs = 1;
function ref() {
if (++refs === 1) {
process.channel.ref();
}
}
function unref() {
if (refs > 0 && --refs === 0) {
process.channel.unref();
}
}
exports.unref = unref;
let pendingPings = Promise.resolve();
async function flush() {
ref();
const promise = pendingPings.then(async () => { // eslint-disable-line promise/prefer-await-to-then
send({type: 'ping'});
await pEvent(process, 'message', selectAvaMessage('pong'));
if (promise === pendingPings) {
unref();
}
});
pendingPings = promise;
await promise;
}
exports.flush = flush;
let channelCounter = 0;
let messageCounter = 0;
const channelEmitters = new Map();
function createChannelEmitter(channelId) {
if (channelEmitters.size === 0) {
process.on('message', message => {
if (!message.ava) {
return;
}
const {channelId, type, ...payload} = message.ava;
if (
type === 'shared-worker-error' ||
type === 'shared-worker-message' ||
type === 'shared-worker-ready'
) {
const emitter = channelEmitters.get(channelId);
if (emitter !== undefined) {
emitter.emit(type, payload);
}
}
});
}
const emitter = new events.EventEmitter();
channelEmitters.set(channelId, emitter);
return [emitter, () => channelEmitters.delete(channelId)];
}
function registerSharedWorker(filename, initialData) {
const channelId = `${getOptions().forkId}/channel/${++channelCounter}`;
const [channelEmitter, unsubscribe] = createChannelEmitter(channelId);
let forcedUnref = false;
let refs = 0;
const forceUnref = () => {
if (forcedUnref) {
return;
}
forcedUnref = true;
if (refs > 0) {
unref();
}
};
const refChannel = () => {
if (!forcedUnref && ++refs === 1) {
ref();
}
};
const unrefChannel = () => {
if (!forcedUnref && refs > 0 && --refs === 0) {
unref();
}
};
send({
type: 'shared-worker-connect',
channelId,
filename,
initialData
});
let currentlyAvailable = false;
let error = null;
refChannel();
const ready = pEvent(channelEmitter, 'shared-worker-ready').then(() => { // eslint-disable-line promise/prefer-await-to-then
currentlyAvailable = error === null;
}).finally(unrefChannel);
const messageEmitters = new Set();
const handleMessage = message => {
// Wait for a turn of the event loop, to allow new subscriptions to be set
// up in response to the previous message.
setImmediate(() => {
for (const emitter of messageEmitters) {
emitter.emit('message', message);
}
});
};
channelEmitter.on('shared-worker-message', handleMessage);
pEvent(channelEmitter, 'shared-worker-error').then(() => { // eslint-disable-line promise/prefer-await-to-then
unsubscribe();
forceUnref();
error = new Error('The shared worker is no longer available');
currentlyAvailable = false;
for (const emitter of messageEmitters) {
emitter.emit('error', error);
}
});
return {
forceUnref,
ready,
channel: {
available: ready,
get currentlyAvailable() {
return currentlyAvailable;
},
async * receive() {
if (error !== null) {
throw error;
}
const emitter = new events.EventEmitter();
messageEmitters.add(emitter);
try {
refChannel();
for await (const [message] of events.on(emitter, 'message')) {
yield message;
}
} finally {
unrefChannel();
messageEmitters.delete(emitter);
}
},
post(serializedData, replyTo) {
if (error !== null) {
throw error;
}
if (!currentlyAvailable) {
throw new Error('Shared worker is not yet available');
}
const messageId = `${channelId}/message/${++messageCounter}`;
send({
type: 'shared-worker-message',
channelId,
messageId,
replyTo,
serializedData
});
return messageId;
}
}
};
}
exports.registerSharedWorker = registerSharedWorker;

View file

@ -1,18 +1,27 @@
import * as fs from 'node:fs';
import {createRequire, findSourceMap} from 'node:module';
import {pathToFileURL} from 'node:url';
import callsites from 'callsites';
const require = createRequire(import.meta.url);
function parse(file) {
const fs = require('fs');
// Avoid loading these until we actually need to select tests by line number.
const acorn = require('acorn');
const walk = require('acorn-walk');
const ast = acorn.parse(fs.readFileSync(file, 'utf8'), {
ecmaVersion: 11,
locations: true
locations: true,
sourceType: 'module',
});
const locations = [];
walk.simple(ast, {
CallExpression(node) {
locations.push(node.loc);
}
},
});
// Walking is depth-first, but we want to sort these breadth-first.
@ -49,36 +58,65 @@ function findTest(locations, declaration) {
return spans.pop();
}
const range = (start, end) => new Array(end - start + 1).fill(start).map((element, index) => element + index);
const range = (start, end) => Array.from({length: end - start + 1}).fill(start).map((element, index) => element + index);
module.exports = ({file, lineNumbers = []}) => {
const translate = (sourceMap, pos) => {
if (sourceMap === undefined) {
return pos;
}
const entry = sourceMap.findEntry(pos.line - 1, pos.column); // Source maps are 0-based
return {
line: entry.originalLine + 1, // Readjust for Acorn.
column: entry.originalColumn,
};
};
export default function lineNumberSelection({file, lineNumbers = []}) {
if (lineNumbers.length === 0) {
return undefined;
}
// Avoid loading these until we actually need to select tests by line number.
const callsites = require('callsites');
const sourceMapSupport = require('source-map-support');
const locations = parse(file);
const selected = new Set(lineNumbers);
let locations = parse(file);
let lookedForSourceMap = false;
let sourceMap;
return () => {
if (!lookedForSourceMap) {
lookedForSourceMap = true;
// The returned function is called *after* the file has been loaded.
// Source maps are not available before then.
sourceMap = findSourceMap(file);
if (sourceMap !== undefined) {
locations = locations.map(({start, end}) => ({
start: translate(sourceMap, start),
end: translate(sourceMap, end),
}));
}
}
// Assume this is called from a test declaration, which is located in the file.
// If not… don't select the test!
const callSite = callsites().find(callSite => callSite.getFileName() === file);
const callSite = callsites().find(callSite => {
const current = callSite.getFileName();
if (file.startsWith('file://')) {
return current.startsWith('file://') ? file === current : file === pathToFileURL(current).toString();
}
return current.startsWith('file://') ? pathToFileURL(file).toString() === current : file === current;
});
if (!callSite) {
return false;
}
// FIXME: This assumes the callSite hasn't already been adjusted. It's likely
// that if `source-map-support/register` has been loaded, this would result
// in the wrong location.
const sourceCallSite = sourceMapSupport.wrapCallSite(callSite);
const start = {
line: sourceCallSite.getLineNumber(),
column: sourceCallSite.getColumnNumber() - 1 // Use 0-indexed columns.
};
const start = translate(sourceMap, {
line: callSite.getLineNumber(), // 1-based
column: callSite.getColumnNumber() - 1, // Comes out as 1-based, Acorn wants 0-based
});
const test = findTest(locations, start);
if (!test) {
@ -87,4 +125,4 @@ module.exports = ({file, lineNumbers = []}) => {
return range(test.start.line, test.end.line).some(line => selected.has(line));
};
};
}

Some files were not shown because too many files have changed in this diff Show more