Fix dependabot issues
This commit is contained in:
parent
c89d9bd8b0
commit
531c6ba7c8
705 changed files with 53406 additions and 20466 deletions
165
node_modules/ava/eslint-plugin-helper.js
generated
vendored
165
node_modules/ava/eslint-plugin-helper.js
generated
vendored
|
|
@ -1,26 +1,25 @@
|
|||
'use strict';
|
||||
const normalizeExtensions = require('./lib/extensions');
|
||||
let isMainThread = true;
|
||||
let supportsWorkers = false;
|
||||
try {
|
||||
({isMainThread} = require('worker_threads'));
|
||||
supportsWorkers = true;
|
||||
} catch {}
|
||||
|
||||
const {classify, hasExtension, isHelperish, matches, normalizeFileForMatching, normalizeGlobs, normalizePatterns} = require('./lib/globs');
|
||||
const loadConfig = require('./lib/load-config');
|
||||
const providerManager = require('./lib/provider-manager');
|
||||
|
||||
const configCache = new Map();
|
||||
const helperCache = new Map();
|
||||
let resolveGlobs;
|
||||
let resolveGlobsSync;
|
||||
|
||||
function load(projectDir, overrides) {
|
||||
const cacheKey = `${JSON.stringify(overrides)}\n${projectDir}`;
|
||||
if (helperCache.has(cacheKey)) {
|
||||
return helperCache.get(cacheKey);
|
||||
}
|
||||
if (!supportsWorkers || !isMainThread) {
|
||||
const normalizeExtensions = require('./lib/extensions');
|
||||
const {loadConfig, loadConfigSync} = require('./lib/load-config');
|
||||
const providerManager = require('./lib/provider-manager');
|
||||
|
||||
let conf;
|
||||
let providers;
|
||||
if (configCache.has(projectDir)) {
|
||||
({conf, providers} = configCache.get(projectDir));
|
||||
} else {
|
||||
conf = loadConfig({resolveFrom: projectDir});
|
||||
const configCache = new Map();
|
||||
|
||||
providers = [];
|
||||
const collectProviders = ({conf, projectDir}) => {
|
||||
const providers = [];
|
||||
if (Reflect.has(conf, 'babel')) {
|
||||
const {level, main} = providerManager.babel(projectDir);
|
||||
providers.push({
|
||||
|
|
@ -39,12 +38,125 @@ function load(projectDir, overrides) {
|
|||
});
|
||||
}
|
||||
|
||||
configCache.set(projectDir, {conf, providers});
|
||||
}
|
||||
return providers;
|
||||
};
|
||||
|
||||
const extensions = overrides && overrides.extensions ?
|
||||
normalizeExtensions(overrides.extensions) :
|
||||
normalizeExtensions(conf.extensions, providers);
|
||||
const buildGlobs = ({conf, providers, projectDir, overrideExtensions, overrideFiles}) => {
|
||||
const extensions = overrideExtensions ?
|
||||
normalizeExtensions(overrideExtensions) :
|
||||
normalizeExtensions(conf.extensions, providers);
|
||||
|
||||
return {
|
||||
cwd: projectDir,
|
||||
...normalizeGlobs({
|
||||
extensions,
|
||||
files: overrideFiles ? overrideFiles : conf.files,
|
||||
providers
|
||||
})
|
||||
};
|
||||
};
|
||||
|
||||
resolveGlobsSync = (projectDir, overrideExtensions, overrideFiles) => {
|
||||
if (!configCache.has(projectDir)) {
|
||||
const conf = loadConfigSync({resolveFrom: projectDir});
|
||||
const providers = collectProviders({conf, projectDir});
|
||||
configCache.set(projectDir, {conf, providers});
|
||||
}
|
||||
|
||||
const {conf, providers} = configCache.get(projectDir);
|
||||
return buildGlobs({conf, providers, projectDir, overrideExtensions, overrideFiles});
|
||||
};
|
||||
|
||||
resolveGlobs = async (projectDir, overrideExtensions, overrideFiles) => {
|
||||
if (!configCache.has(projectDir)) {
|
||||
configCache.set(projectDir, loadConfig({resolveFrom: projectDir}).then(conf => { // eslint-disable-line promise/prefer-await-to-then
|
||||
const providers = collectProviders({conf, projectDir});
|
||||
return {conf, providers};
|
||||
}));
|
||||
}
|
||||
|
||||
const {conf, providers} = await configCache.get(projectDir);
|
||||
return buildGlobs({conf, providers, projectDir, overrideExtensions, overrideFiles});
|
||||
};
|
||||
}
|
||||
|
||||
if (supportsWorkers) {
|
||||
const v8 = require('v8');
|
||||
|
||||
const MAX_DATA_LENGTH_EXCLUSIVE = 100 * 1024; // Allocate 100 KiB to exchange globs.
|
||||
|
||||
if (isMainThread) {
|
||||
const {Worker} = require('worker_threads');
|
||||
let data;
|
||||
let sync;
|
||||
let worker;
|
||||
|
||||
resolveGlobsSync = (projectDir, overrideExtensions, overrideFiles) => {
|
||||
if (worker === undefined) {
|
||||
const dataBuffer = new SharedArrayBuffer(MAX_DATA_LENGTH_EXCLUSIVE);
|
||||
data = new Uint8Array(dataBuffer);
|
||||
|
||||
const syncBuffer = new SharedArrayBuffer(4);
|
||||
sync = new Int32Array(syncBuffer);
|
||||
|
||||
worker = new Worker(__filename, {
|
||||
workerData: {
|
||||
dataBuffer,
|
||||
syncBuffer,
|
||||
firstMessage: {projectDir, overrideExtensions, overrideFiles}
|
||||
}
|
||||
});
|
||||
worker.unref();
|
||||
} else {
|
||||
worker.postMessage({projectDir, overrideExtensions, overrideFiles});
|
||||
}
|
||||
|
||||
Atomics.wait(sync, 0, 0);
|
||||
|
||||
const byteLength = Atomics.exchange(sync, 0, 0);
|
||||
if (byteLength === MAX_DATA_LENGTH_EXCLUSIVE) {
|
||||
throw new Error('Globs are over 100 KiB and cannot be resolved');
|
||||
}
|
||||
|
||||
const globsOrError = v8.deserialize(data.slice(0, byteLength));
|
||||
if (globsOrError instanceof Error) {
|
||||
throw globsOrError;
|
||||
}
|
||||
|
||||
return globsOrError;
|
||||
};
|
||||
} else {
|
||||
const {parentPort, workerData} = require('worker_threads');
|
||||
const data = new Uint8Array(workerData.dataBuffer);
|
||||
const sync = new Int32Array(workerData.syncBuffer);
|
||||
|
||||
const handleMessage = async ({projectDir, overrideExtensions, overrideFiles}) => {
|
||||
let encoded;
|
||||
try {
|
||||
const globs = await resolveGlobs(projectDir, overrideExtensions, overrideFiles);
|
||||
encoded = v8.serialize(globs);
|
||||
} catch (error) {
|
||||
encoded = v8.serialize(error);
|
||||
}
|
||||
|
||||
const byteLength = encoded.length < MAX_DATA_LENGTH_EXCLUSIVE ? encoded.copy(data) : MAX_DATA_LENGTH_EXCLUSIVE;
|
||||
Atomics.store(sync, 0, byteLength);
|
||||
Atomics.notify(sync, 0);
|
||||
};
|
||||
|
||||
parentPort.on('message', handleMessage);
|
||||
handleMessage(workerData.firstMessage);
|
||||
delete workerData.firstMessage;
|
||||
}
|
||||
}
|
||||
|
||||
const helperCache = new Map();
|
||||
|
||||
function load(projectDir, overrides) {
|
||||
const cacheKey = `${JSON.stringify(overrides)}\n${projectDir}`;
|
||||
if (helperCache.has(cacheKey)) {
|
||||
return helperCache.get(cacheKey);
|
||||
}
|
||||
|
||||
let helperPatterns = [];
|
||||
if (overrides && overrides.helpers !== undefined) {
|
||||
|
|
@ -55,14 +167,7 @@ function load(projectDir, overrides) {
|
|||
helperPatterns = normalizePatterns(overrides.helpers);
|
||||
}
|
||||
|
||||
const globs = {
|
||||
cwd: projectDir,
|
||||
...normalizeGlobs({
|
||||
extensions,
|
||||
files: overrides && overrides.files ? overrides.files : conf.files,
|
||||
providers
|
||||
})
|
||||
};
|
||||
const globs = resolveGlobsSync(projectDir, overrides && overrides.extensions, overrides && overrides.files);
|
||||
|
||||
const classifyForESLint = file => {
|
||||
const {isTest} = classify(file, globs);
|
||||
|
|
|
|||
13
node_modules/ava/index.d.ts
generated
vendored
13
node_modules/ava/index.d.ts
generated
vendored
|
|
@ -45,6 +45,9 @@ export interface Assertions {
|
|||
/** Assert that `actual` is [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to `expected`. */
|
||||
deepEqual: DeepEqualAssertion;
|
||||
|
||||
/** Assert that `actual` is like `expected`. */
|
||||
like: LikeAssertion;
|
||||
|
||||
/** Fail the test. */
|
||||
fail: FailAssertion;
|
||||
|
||||
|
|
@ -125,6 +128,14 @@ export interface DeepEqualAssertion {
|
|||
skip(actual: any, expected: any, message?: string): void;
|
||||
}
|
||||
|
||||
export interface LikeAssertion {
|
||||
/** Assert that `value` is like `selector`. */
|
||||
(value: any, selector: Record<string, any>, message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(value: any, selector: any, message?: string): void;
|
||||
}
|
||||
|
||||
export interface FailAssertion {
|
||||
/** Fail the test. */
|
||||
(message?: string): void;
|
||||
|
|
@ -342,7 +353,7 @@ export interface TimeoutFn {
|
|||
* Set a timeout for the test, in milliseconds. The test will fail if the timeout is exceeded.
|
||||
* The timeout is reset each time an assertion is made.
|
||||
*/
|
||||
(ms: number): void;
|
||||
(ms: number, message?: string): void;
|
||||
}
|
||||
|
||||
export interface TeardownFn {
|
||||
|
|
|
|||
29
node_modules/ava/lib/api.js
generated
vendored
29
node_modules/ava/lib/api.js
generated
vendored
|
|
@ -17,6 +17,7 @@ const RunStatus = require('./run-status');
|
|||
const fork = require('./fork');
|
||||
const serializeError = require('./serialize-error');
|
||||
const {getApplicableLineNumbers} = require('./line-numbers');
|
||||
const sharedWorkers = require('./plugin-support/shared-workers');
|
||||
|
||||
function resolveModules(modules) {
|
||||
return arrify(modules).map(name => {
|
||||
|
|
@ -110,21 +111,15 @@ class Api extends Emittery {
|
|||
}
|
||||
};
|
||||
|
||||
let cacheDir;
|
||||
let testFiles;
|
||||
try {
|
||||
cacheDir = this._createCacheDir();
|
||||
testFiles = await globs.findTests({cwd: this.options.projectDir, ...apiOptions.globs});
|
||||
if (selectedFiles.length === 0) {
|
||||
if (filter.length === 0) {
|
||||
selectedFiles = testFiles;
|
||||
} else {
|
||||
selectedFiles = globs.applyTestFileFilter({
|
||||
cwd: this.options.projectDir,
|
||||
filter: filter.map(({pattern}) => pattern),
|
||||
testFiles
|
||||
});
|
||||
}
|
||||
selectedFiles = filter.length === 0 ? testFiles : globs.applyTestFileFilter({
|
||||
cwd: this.options.projectDir,
|
||||
filter: filter.map(({pattern}) => pattern),
|
||||
testFiles
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
selectedFiles = [];
|
||||
|
|
@ -147,7 +142,7 @@ class Api extends Emittery {
|
|||
runStatus = new RunStatus(selectedFiles.length, null);
|
||||
}
|
||||
|
||||
const debugWithoutSpecificFile = Boolean(this.options.debug) && selectedFiles.length !== 1;
|
||||
const debugWithoutSpecificFile = Boolean(this.options.debug) && !this.options.debug.active && selectedFiles.length !== 1;
|
||||
|
||||
await this.emit('run', {
|
||||
bailWithoutReporting: debugWithoutSpecificFile,
|
||||
|
|
@ -192,7 +187,7 @@ class Api extends Emittery {
|
|||
|
||||
const {providers = []} = this.options;
|
||||
const providerStates = (await Promise.all(providers.map(async ({type, main}) => {
|
||||
const state = await main.compile({cacheDir, files: testFiles});
|
||||
const state = await main.compile({cacheDir: this._createCacheDir(), files: testFiles});
|
||||
return state === null ? null : {type, state};
|
||||
}))).filter(state => state !== null);
|
||||
|
||||
|
|
@ -206,6 +201,8 @@ class Api extends Emittery {
|
|||
concurrency = 1;
|
||||
}
|
||||
|
||||
const deregisteredSharedWorkers = [];
|
||||
|
||||
// Try and run each file, limited by `concurrency`.
|
||||
await pMap(selectedFiles, async file => {
|
||||
// No new files should be run once a test has timed out or failed,
|
||||
|
|
@ -231,6 +228,7 @@ class Api extends Emittery {
|
|||
|
||||
const worker = fork(file, options, apiOptions.nodeArguments);
|
||||
runStatus.observeWorker(worker, file, {selectingLines: lineNumbers.length > 0});
|
||||
deregisteredSharedWorkers.push(sharedWorkers.observeWorkerProcess(worker, runStatus));
|
||||
|
||||
pendingWorkers.add(worker);
|
||||
worker.promise.then(() => {
|
||||
|
|
@ -238,8 +236,11 @@ class Api extends Emittery {
|
|||
});
|
||||
restartTimer();
|
||||
|
||||
return worker.promise;
|
||||
await worker.promise;
|
||||
}, {concurrency, stopOnError: false});
|
||||
|
||||
// Allow shared workers to clean up before the run ends.
|
||||
await Promise.all(deregisteredSharedWorkers);
|
||||
} catch (error) {
|
||||
if (error && error.name === 'AggregateError') {
|
||||
for (const err of error) {
|
||||
|
|
|
|||
108
node_modules/ava/lib/assert.js
generated
vendored
108
node_modules/ava/lib/assert.js
generated
vendored
|
|
@ -3,11 +3,11 @@ const concordance = require('concordance');
|
|||
const isError = require('is-error');
|
||||
const isPromise = require('is-promise');
|
||||
const concordanceOptions = require('./concordance-options').default;
|
||||
const concordanceDiffOptions = require('./concordance-options').diff;
|
||||
const {CIRCULAR_SELECTOR, isLikeSelector, selectComparable} = require('./like-selector');
|
||||
const snapshotManager = require('./snapshot-manager');
|
||||
|
||||
function formatDescriptorDiff(actualDescriptor, expectedDescriptor, options) {
|
||||
options = {...options, ...concordanceDiffOptions};
|
||||
options = {...options, ...concordanceOptions};
|
||||
return {
|
||||
label: 'Difference:',
|
||||
formatted: concordance.diffDescriptors(actualDescriptor, expectedDescriptor, options)
|
||||
|
|
@ -64,6 +64,21 @@ class AssertionError extends Error {
|
|||
}
|
||||
exports.AssertionError = AssertionError;
|
||||
|
||||
function checkAssertionMessage(assertion, message) {
|
||||
if (typeof message === 'undefined' || typeof message === 'string') {
|
||||
return true;
|
||||
}
|
||||
|
||||
return new AssertionError({
|
||||
assertion,
|
||||
improperUsage: true,
|
||||
message: 'The assertion message must be a string',
|
||||
values: [formatWithLabel('Called with:', message)]
|
||||
});
|
||||
}
|
||||
|
||||
exports.checkAssertionMessage = checkAssertionMessage;
|
||||
|
||||
function getErrorWithLongStackTrace() {
|
||||
const limitBefore = Error.stackTraceLimit;
|
||||
Error.stackTraceLimit = Infinity;
|
||||
|
|
@ -72,8 +87,16 @@ function getErrorWithLongStackTrace() {
|
|||
return err;
|
||||
}
|
||||
|
||||
function validateExpectations(assertion, expectations, numberArgs) { // eslint-disable-line complexity
|
||||
function validateExpectations(assertion, expectations, numberArgs, experiments) { // eslint-disable-line complexity
|
||||
if (numberArgs === 1 || expectations === null || expectations === undefined) {
|
||||
if (experiments.disableNullExpectations && expectations === null) {
|
||||
throw new AssertionError({
|
||||
assertion,
|
||||
message: `The second argument to \`t.${assertion}()\` must be an expectation object or \`undefined\``,
|
||||
values: [formatWithLabel('Called with:', expectations)]
|
||||
});
|
||||
}
|
||||
|
||||
expectations = {};
|
||||
} else if (
|
||||
typeof expectations === 'function' ||
|
||||
|
|
@ -242,7 +265,9 @@ class Assertions {
|
|||
fail = notImplemented,
|
||||
skip = notImplemented,
|
||||
compareWithSnapshot = notImplemented,
|
||||
powerAssert
|
||||
powerAssert,
|
||||
experiments = {},
|
||||
disableSnapshots = false
|
||||
} = {}) {
|
||||
const withSkip = assertionFn => {
|
||||
assertionFn.skip = skip;
|
||||
|
|
@ -267,22 +292,16 @@ class Assertions {
|
|||
});
|
||||
|
||||
const checkMessage = (assertion, message, powerAssert = false) => {
|
||||
if (typeof message === 'undefined' || typeof message === 'string') {
|
||||
return true;
|
||||
const result = checkAssertionMessage(assertion, message);
|
||||
if (result === true) {
|
||||
return this.true;
|
||||
}
|
||||
|
||||
const error = new AssertionError({
|
||||
assertion,
|
||||
improperUsage: true,
|
||||
message: 'The assertion message must be a string',
|
||||
values: [formatWithLabel('Called with:', message)]
|
||||
});
|
||||
|
||||
if (powerAssert) {
|
||||
throw error;
|
||||
throw result;
|
||||
}
|
||||
|
||||
fail(error);
|
||||
fail(result);
|
||||
return false;
|
||||
};
|
||||
|
||||
|
|
@ -387,6 +406,52 @@ class Assertions {
|
|||
}
|
||||
});
|
||||
|
||||
this.like = withSkip((actual, selector, message) => {
|
||||
if (!checkMessage('like', message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isLikeSelector(selector)) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'like',
|
||||
improperUsage: true,
|
||||
message: '`t.like()` selector must be a non-empty object',
|
||||
values: [formatWithLabel('Called with:', selector)]
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
let comparable;
|
||||
try {
|
||||
comparable = selectComparable(actual, selector);
|
||||
} catch (error) {
|
||||
if (error === CIRCULAR_SELECTOR) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'like',
|
||||
improperUsage: true,
|
||||
message: '`t.like()` selector must not contain circular references',
|
||||
values: [formatWithLabel('Called with:', selector)]
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
const result = concordance.compare(comparable, selector, concordanceOptions);
|
||||
if (result.pass) {
|
||||
pass();
|
||||
} else {
|
||||
const actualDescriptor = result.actual || concordance.describe(comparable, concordanceOptions);
|
||||
const expectedDescriptor = result.expected || concordance.describe(selector, concordanceOptions);
|
||||
fail(new AssertionError({
|
||||
assertion: 'like',
|
||||
message,
|
||||
values: [formatDescriptorDiff(actualDescriptor, expectedDescriptor)]
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
this.throws = withSkip((...args) => {
|
||||
// Since arrow functions do not support 'arguments', we are using rest
|
||||
// operator, so we can determine the total number of arguments passed
|
||||
|
|
@ -408,7 +473,7 @@ class Assertions {
|
|||
}
|
||||
|
||||
try {
|
||||
expectations = validateExpectations('throws', expectations, args.length);
|
||||
expectations = validateExpectations('throws', expectations, args.length, experiments);
|
||||
} catch (error) {
|
||||
fail(error);
|
||||
return;
|
||||
|
|
@ -474,7 +539,7 @@ class Assertions {
|
|||
}
|
||||
|
||||
try {
|
||||
expectations = validateExpectations('throwsAsync', expectations, args.length);
|
||||
expectations = validateExpectations('throwsAsync', expectations, args.length, experiments);
|
||||
} catch (error) {
|
||||
fail(error);
|
||||
return Promise.resolve();
|
||||
|
|
@ -634,6 +699,15 @@ class Assertions {
|
|||
});
|
||||
|
||||
this.snapshot = withSkip((expected, ...rest) => {
|
||||
if (disableSnapshots && experiments.disableSnapshotsInHooks) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'snapshot',
|
||||
message: '`t.snapshot()` can only be used in tests',
|
||||
improperUsage: true
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
let message;
|
||||
let snapshotOptions;
|
||||
if (rest.length > 1) {
|
||||
|
|
|
|||
102
node_modules/ava/lib/cli.js
generated
vendored
102
node_modules/ava/lib/cli.js
generated
vendored
|
|
@ -7,7 +7,7 @@ const arrify = require('arrify');
|
|||
const yargs = require('yargs');
|
||||
const readPkg = require('read-pkg');
|
||||
const isCi = require('./is-ci');
|
||||
const loadConfig = require('./load-config');
|
||||
const {loadConfig} = require('./load-config');
|
||||
|
||||
function exit(message) {
|
||||
console.error(`\n ${require('./chalk').get().red(figures.cross)} ${message}`);
|
||||
|
|
@ -83,12 +83,24 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
let confError = null;
|
||||
try {
|
||||
const {argv: {config: configFile}} = yargs.help(false);
|
||||
conf = loadConfig({configFile});
|
||||
conf = await loadConfig({configFile});
|
||||
} catch (error) {
|
||||
confError = error;
|
||||
}
|
||||
|
||||
let debug = null;
|
||||
// Enter debug mode if the main process is being inspected. This assumes the
|
||||
// worker processes are automatically inspected, too. It is not necessary to
|
||||
// run AVA with the debug command, though it's allowed.
|
||||
const activeInspector = require('inspector').url() !== undefined; // eslint-disable-line node/no-unsupported-features/node-builtins
|
||||
let debug = activeInspector ?
|
||||
{
|
||||
active: true,
|
||||
break: false,
|
||||
files: [],
|
||||
host: undefined,
|
||||
port: undefined
|
||||
} : null;
|
||||
|
||||
let resetCache = false;
|
||||
const {argv} = yargs
|
||||
.parserConfiguration({
|
||||
|
|
@ -122,7 +134,11 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
array: true,
|
||||
describe: 'Glob patterns to select what test files to run. Leave empty if you want AVA to run all test files instead. Add a colon and specify line numbers of specific tests to run',
|
||||
type: 'string'
|
||||
}))
|
||||
}), argv => {
|
||||
if (activeInspector) {
|
||||
debug.files = argv.pattern || [];
|
||||
}
|
||||
})
|
||||
.command(
|
||||
'debug [<pattern>...]',
|
||||
'Activate Node.js inspector and run a single test file',
|
||||
|
|
@ -148,6 +164,7 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
}),
|
||||
argv => {
|
||||
debug = {
|
||||
active: activeInspector,
|
||||
break: argv.break === true,
|
||||
files: argv.pattern,
|
||||
host: argv.host,
|
||||
|
|
@ -182,6 +199,10 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
const chalkOptions = {level: combined.color === false ? 0 : require('chalk').level};
|
||||
const chalk = require('./chalk').set(chalkOptions);
|
||||
|
||||
if (combined.updateSnapshots && combined.match) {
|
||||
exit('Snapshots cannot be updated when matching specific tests.');
|
||||
}
|
||||
|
||||
if (confError) {
|
||||
if (confError.parent) {
|
||||
exit(`${confError.message}\n\n${chalk.gray((confError.parent && confError.parent.stack) || confError.parent)}`);
|
||||
|
|
@ -259,11 +280,11 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
|
||||
const ciParallelVars = require('ci-parallel-vars');
|
||||
const Api = require('./api');
|
||||
const VerboseReporter = require('./reporters/verbose');
|
||||
const MiniReporter = require('./reporters/mini');
|
||||
const DefaultReporter = require('./reporters/default');
|
||||
const TapReporter = require('./reporters/tap');
|
||||
const Watcher = require('./watcher');
|
||||
const normalizeExtensions = require('./extensions');
|
||||
const normalizeModuleTypes = require('./module-types');
|
||||
const {normalizeGlobs, normalizePattern} = require('./globs');
|
||||
const normalizeNodeArguments = require('./node-arguments');
|
||||
const validateEnvironmentVariables = require('./environment-variables');
|
||||
|
|
@ -281,12 +302,6 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
|
||||
const {type: defaultModuleType = 'commonjs'} = pkg || {};
|
||||
|
||||
const moduleTypes = {
|
||||
cjs: 'commonjs',
|
||||
mjs: 'module',
|
||||
js: defaultModuleType
|
||||
};
|
||||
|
||||
const providers = [];
|
||||
if (Reflect.has(conf, 'babel')) {
|
||||
try {
|
||||
|
|
@ -328,6 +343,13 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
exit(error.message);
|
||||
}
|
||||
|
||||
let moduleTypes;
|
||||
try {
|
||||
moduleTypes = normalizeModuleTypes(conf.extensions, defaultModuleType, experiments);
|
||||
} catch (error) {
|
||||
exit(error.message);
|
||||
}
|
||||
|
||||
let globs;
|
||||
try {
|
||||
globs = normalizeGlobs({files: conf.files, ignoredByWatcher: conf.ignoredByWatcher, extensions, providers});
|
||||
|
|
@ -357,6 +379,9 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
pattern: normalizePattern(path.relative(projectDir, path.resolve(process.cwd(), pattern))),
|
||||
...rest
|
||||
}));
|
||||
if (combined.updateSnapshots && filter.some(condition => condition.lineNumbers !== null)) {
|
||||
exit('Snapshots cannot be updated when selecting specific tests by their line number.');
|
||||
}
|
||||
|
||||
const api = new Api({
|
||||
cacheEnabled: combined.cache !== false,
|
||||
|
|
@ -384,32 +409,37 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
workerArgv: argv['--']
|
||||
});
|
||||
|
||||
let reporter;
|
||||
if (combined.tap && !combined.watch && debug === null) {
|
||||
reporter = new TapReporter({
|
||||
projectDir,
|
||||
reportStream: process.stdout,
|
||||
stdStream: process.stderr
|
||||
});
|
||||
} else if (debug !== null || combined.verbose || isCi || !process.stdout.isTTY) {
|
||||
reporter = new VerboseReporter({
|
||||
projectDir,
|
||||
reportStream: process.stdout,
|
||||
stdStream: process.stderr,
|
||||
watching: combined.watch
|
||||
});
|
||||
} else {
|
||||
reporter = new MiniReporter({
|
||||
projectDir,
|
||||
reportStream: process.stdout,
|
||||
stdStream: process.stderr,
|
||||
watching: combined.watch
|
||||
});
|
||||
}
|
||||
const reporter = combined.tap && !combined.watch && debug === null ? new TapReporter({
|
||||
projectDir,
|
||||
reportStream: process.stdout,
|
||||
stdStream: process.stderr
|
||||
}) : new DefaultReporter({
|
||||
projectDir,
|
||||
reportStream: process.stdout,
|
||||
stdStream: process.stderr,
|
||||
watching: combined.watch,
|
||||
verbose: debug !== null || combined.verbose || isCi || !process.stdout.isTTY
|
||||
});
|
||||
|
||||
api.on('run', plan => {
|
||||
reporter.startRun(plan);
|
||||
|
||||
if (process.env.AVA_EMIT_RUN_STATUS_OVER_IPC === 'I\'ll find a payphone baby / Take some time to talk to you') {
|
||||
const {controlFlow} = require('./ipc-flow-control');
|
||||
const bufferedSend = controlFlow(process);
|
||||
|
||||
if (process.versions.node >= '12.16.0') {
|
||||
plan.status.on('stateChange', evt => {
|
||||
bufferedSend(evt);
|
||||
});
|
||||
} else {
|
||||
const v8 = require('v8');
|
||||
plan.status.on('stateChange', evt => {
|
||||
bufferedSend([...v8.serialize(evt)]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
plan.status.on('stateChange', evt => {
|
||||
if (evt.type === 'interrupt') {
|
||||
reporter.endRun();
|
||||
|
|
@ -431,14 +461,14 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
} else {
|
||||
let debugWithoutSpecificFile = false;
|
||||
api.on('run', plan => {
|
||||
if (plan.debug && plan.files.length !== 1) {
|
||||
if (debug !== null && plan.files.length !== 1) {
|
||||
debugWithoutSpecificFile = true;
|
||||
}
|
||||
});
|
||||
|
||||
const runStatus = await api.run({filter});
|
||||
|
||||
if (debugWithoutSpecificFile) {
|
||||
if (debugWithoutSpecificFile && !debug.active) {
|
||||
exit('Provide the path to the test file you wish to debug');
|
||||
return;
|
||||
}
|
||||
|
|
|
|||
2
node_modules/ava/lib/code-excerpt.js
generated
vendored
2
node_modules/ava/lib/code-excerpt.js
generated
vendored
|
|
@ -19,7 +19,7 @@ module.exports = (source, options = {}) => {
|
|||
let contents;
|
||||
try {
|
||||
contents = fs.readFileSync(file, 'utf8');
|
||||
} catch (_) {
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
|
|
|||
3
node_modules/ava/lib/concordance-options.js
generated
vendored
3
node_modules/ava/lib/concordance-options.js
generated
vendored
|
|
@ -1,5 +1,5 @@
|
|||
'use strict';
|
||||
const util = require('util');
|
||||
const util = require('util'); // eslint-disable-line unicorn/import-style
|
||||
const ansiStyles = require('ansi-styles');
|
||||
const stripAnsi = require('strip-ansi');
|
||||
const cloneDeepWith = require('lodash/cloneDeepWith');
|
||||
|
|
@ -135,5 +135,4 @@ exports.default = {
|
|||
theme
|
||||
};
|
||||
|
||||
exports.diff = {maxDepth: 1, plugins, theme};
|
||||
exports.snapshotManager = {plugins, theme: plainTheme};
|
||||
|
|
|
|||
5
node_modules/ava/lib/extensions.js
generated
vendored
5
node_modules/ava/lib/extensions.js
generated
vendored
|
|
@ -2,8 +2,11 @@ module.exports = (configuredExtensions, providers = []) => {
|
|||
// Combine all extensions possible for testing. Remove duplicate extensions.
|
||||
const duplicates = new Set();
|
||||
const seen = new Set();
|
||||
|
||||
const normalize = extensions => Array.isArray(extensions) ? extensions : Object.keys(extensions);
|
||||
|
||||
const combine = extensions => {
|
||||
for (const ext of extensions) {
|
||||
for (const ext of normalize(extensions)) {
|
||||
if (seen.has(ext)) {
|
||||
duplicates.add(ext);
|
||||
} else {
|
||||
|
|
|
|||
102
node_modules/ava/lib/fork.js
generated
vendored
102
node_modules/ava/lib/fork.js
generated
vendored
|
|
@ -3,6 +3,7 @@ const childProcess = require('child_process');
|
|||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const Emittery = require('emittery');
|
||||
const {controlFlow} = require('./ipc-flow-control');
|
||||
|
||||
if (fs.realpathSync(__filename) !== __filename) {
|
||||
console.warn('WARNING: `npm link ava` and the `--preserve-symlink` flag are incompatible. We have detected that AVA is linked via `npm link`, and that you are using either an early version of Node 6, or the `--preserve-symlink` flag. This breaks AVA. You should upgrade to Node 6.2.0+, avoid the `--preserve-symlink` flag, or avoid using `npm link ava`.');
|
||||
|
|
@ -11,10 +12,57 @@ if (fs.realpathSync(__filename) !== __filename) {
|
|||
// In case the test file imports a different AVA install,
|
||||
// the presence of this variable allows it to require this one instead
|
||||
const AVA_PATH = path.resolve(__dirname, '..');
|
||||
const WORKER_PATH = require.resolve('./worker/subprocess');
|
||||
|
||||
const workerPath = require.resolve('./worker/subprocess');
|
||||
class SharedWorkerChannel extends Emittery {
|
||||
constructor({channelId, filename, initialData}, sendToFork) {
|
||||
super();
|
||||
|
||||
this.id = channelId;
|
||||
this.filename = filename;
|
||||
this.initialData = initialData;
|
||||
this.sendToFork = sendToFork;
|
||||
}
|
||||
|
||||
signalReady() {
|
||||
this.sendToFork({
|
||||
type: 'shared-worker-ready',
|
||||
channelId: this.id
|
||||
});
|
||||
}
|
||||
|
||||
signalError() {
|
||||
this.sendToFork({
|
||||
type: 'shared-worker-error',
|
||||
channelId: this.id
|
||||
});
|
||||
}
|
||||
|
||||
emitMessage({messageId, replyTo, serializedData}) {
|
||||
this.emit('message', {
|
||||
messageId,
|
||||
replyTo,
|
||||
serializedData
|
||||
});
|
||||
}
|
||||
|
||||
forwardMessageToFork({messageId, replyTo, serializedData}) {
|
||||
this.sendToFork({
|
||||
type: 'shared-worker-message',
|
||||
channelId: this.id,
|
||||
messageId,
|
||||
replyTo,
|
||||
serializedData
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let forkCounter = 0;
|
||||
|
||||
module.exports = (file, options, execArgv = process.execArgv) => {
|
||||
const forkId = `fork/${++forkCounter}`;
|
||||
const sharedWorkerChannels = new Map();
|
||||
|
||||
let finished = false;
|
||||
|
||||
const emitter = new Emittery();
|
||||
|
|
@ -25,12 +73,13 @@ module.exports = (file, options, execArgv = process.execArgv) => {
|
|||
};
|
||||
|
||||
options = {
|
||||
file,
|
||||
baseDir: process.cwd(),
|
||||
file,
|
||||
forkId,
|
||||
...options
|
||||
};
|
||||
|
||||
const subprocess = childProcess.fork(workerPath, options.workerArgv, {
|
||||
const subprocess = childProcess.fork(WORKER_PATH, options.workerArgv, {
|
||||
cwd: options.projectDir,
|
||||
silent: true,
|
||||
env: {NODE_ENV: 'test', ...process.env, ...options.environmentVariables, AVA_PATH},
|
||||
|
|
@ -45,12 +94,12 @@ module.exports = (file, options, execArgv = process.execArgv) => {
|
|||
emitStateChange({type: 'worker-stderr', chunk});
|
||||
});
|
||||
|
||||
const bufferedSend = controlFlow(subprocess);
|
||||
|
||||
let forcedExit = false;
|
||||
const send = evt => {
|
||||
if (subprocess.connected && !finished && !forcedExit) {
|
||||
subprocess.send({ava: evt}, () => {
|
||||
// Disregard errors.
|
||||
});
|
||||
if (!finished && !forcedExit) {
|
||||
bufferedSend({ava: evt});
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -65,15 +114,25 @@ module.exports = (file, options, execArgv = process.execArgv) => {
|
|||
return;
|
||||
}
|
||||
|
||||
if (message.ava.type === 'ready-for-options') {
|
||||
send({type: 'options', options});
|
||||
return;
|
||||
}
|
||||
switch (message.ava.type) {
|
||||
case 'ready-for-options':
|
||||
send({type: 'options', options});
|
||||
break;
|
||||
case 'shared-worker-connect': {
|
||||
const channel = new SharedWorkerChannel(message.ava, send);
|
||||
sharedWorkerChannels.set(channel.id, channel);
|
||||
emitter.emit('connectSharedWorker', channel);
|
||||
break;
|
||||
}
|
||||
|
||||
if (message.ava.type === 'ping') {
|
||||
send({type: 'pong'});
|
||||
} else {
|
||||
emitStateChange(message.ava);
|
||||
case 'shared-worker-message':
|
||||
sharedWorkerChannels.get(message.ava.channelId).emitMessage(message.ava);
|
||||
break;
|
||||
case 'ping':
|
||||
send({type: 'pong'});
|
||||
break;
|
||||
default:
|
||||
emitStateChange(message.ava);
|
||||
}
|
||||
});
|
||||
|
||||
|
|
@ -98,6 +157,10 @@ module.exports = (file, options, execArgv = process.execArgv) => {
|
|||
});
|
||||
|
||||
return {
|
||||
file,
|
||||
forkId,
|
||||
promise,
|
||||
|
||||
exit() {
|
||||
forcedExit = true;
|
||||
subprocess.kill();
|
||||
|
|
@ -107,11 +170,12 @@ module.exports = (file, options, execArgv = process.execArgv) => {
|
|||
send({type: 'peer-failed'});
|
||||
},
|
||||
|
||||
onStateChange(listener) {
|
||||
return emitter.on('stateChange', listener);
|
||||
onConnectSharedWorker(listener) {
|
||||
return emitter.on('connectSharedWorker', listener);
|
||||
},
|
||||
|
||||
file,
|
||||
promise
|
||||
onStateChange(listener) {
|
||||
return emitter.on('stateChange', listener);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
|
|
|||
6
node_modules/ava/lib/globs.js
generated
vendored
6
node_modules/ava/lib/globs.js
generated
vendored
|
|
@ -82,11 +82,7 @@ function normalizeGlobs({extensions, files: filePatterns, ignoredByWatcher: igno
|
|||
filePatterns = defaultTestPatterns;
|
||||
}
|
||||
|
||||
if (ignoredByWatcherPatterns) {
|
||||
ignoredByWatcherPatterns = [...defaultIgnoredByWatcherPatterns, ...normalizePatterns(ignoredByWatcherPatterns)];
|
||||
} else {
|
||||
ignoredByWatcherPatterns = [...defaultIgnoredByWatcherPatterns];
|
||||
}
|
||||
ignoredByWatcherPatterns = ignoredByWatcherPatterns ? [...defaultIgnoredByWatcherPatterns, ...normalizePatterns(ignoredByWatcherPatterns)] : [...defaultIgnoredByWatcherPatterns];
|
||||
|
||||
for (const {level, main} of providers) {
|
||||
if (level >= providerManager.levels.pathRewrites) {
|
||||
|
|
|
|||
39
node_modules/ava/lib/ipc-flow-control.js
generated
vendored
Normal file
39
node_modules/ava/lib/ipc-flow-control.js
generated
vendored
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
function controlFlow(channel) {
|
||||
let errored = false;
|
||||
let deliverImmediately = true;
|
||||
|
||||
const backlog = [];
|
||||
const deliverNext = error => {
|
||||
if (error !== null) {
|
||||
errored = true;
|
||||
}
|
||||
|
||||
if (errored || !channel.connected) {
|
||||
backlog.length = 0; // Free memory.
|
||||
return; // We can't send.
|
||||
}
|
||||
|
||||
let ok = true;
|
||||
while (ok && backlog.length > 0) { // Stop sending after backpressure.
|
||||
ok = channel.send(backlog.shift(), deliverNext);
|
||||
}
|
||||
|
||||
// Re-enable immediate delivery if there is no backpressure and the backlog
|
||||
// has been cleared.
|
||||
deliverImmediately = ok && backlog.length === 0;
|
||||
};
|
||||
|
||||
return message => {
|
||||
if (errored || !channel.connected) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (deliverImmediately) {
|
||||
deliverImmediately = channel.send(message, deliverNext);
|
||||
} else {
|
||||
backlog.push(message);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
exports.controlFlow = controlFlow;
|
||||
37
node_modules/ava/lib/like-selector.js
generated
vendored
Normal file
37
node_modules/ava/lib/like-selector.js
generated
vendored
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
'use strict';
|
||||
function isLikeSelector(selector) {
|
||||
return selector !== null &&
|
||||
typeof selector === 'object' &&
|
||||
Reflect.getPrototypeOf(selector) === Object.prototype &&
|
||||
Reflect.ownKeys(selector).length > 0;
|
||||
}
|
||||
|
||||
exports.isLikeSelector = isLikeSelector;
|
||||
|
||||
const CIRCULAR_SELECTOR = new Error('Encountered a circular selector');
|
||||
exports.CIRCULAR_SELECTOR = CIRCULAR_SELECTOR;
|
||||
|
||||
function selectComparable(lhs, selector, circular = new Set()) {
|
||||
if (circular.has(selector)) {
|
||||
throw CIRCULAR_SELECTOR;
|
||||
}
|
||||
|
||||
circular.add(selector);
|
||||
|
||||
if (lhs === null || typeof lhs !== 'object') {
|
||||
return lhs;
|
||||
}
|
||||
|
||||
const comparable = {};
|
||||
for (const [key, rhs] of Object.entries(selector)) {
|
||||
if (isLikeSelector(rhs)) {
|
||||
comparable[key] = selectComparable(Reflect.get(lhs, key), rhs, circular);
|
||||
} else {
|
||||
comparable[key] = Reflect.get(lhs, key);
|
||||
}
|
||||
}
|
||||
|
||||
return comparable;
|
||||
}
|
||||
|
||||
exports.selectComparable = selectComparable;
|
||||
4
node_modules/ava/lib/line-numbers.js
generated
vendored
4
node_modules/ava/lib/line-numbers.js
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
'use strict';
|
||||
|
||||
const micromatch = require('micromatch');
|
||||
const picomatch = require('picomatch');
|
||||
const flatten = require('lodash/flatten');
|
||||
|
||||
const NUMBER_REGEX = /^\d+$/;
|
||||
|
|
@ -56,7 +56,7 @@ exports.splitPatternAndLineNumbers = splitPatternAndLineNumbers;
|
|||
function getApplicableLineNumbers(normalizedFilePath, filter) {
|
||||
return sortNumbersAscending(distinctArray(flatten(
|
||||
filter
|
||||
.filter(({pattern, lineNumbers}) => lineNumbers && micromatch.isMatch(normalizedFilePath, pattern))
|
||||
.filter(({pattern, lineNumbers}) => lineNumbers && picomatch.isMatch(normalizedFilePath, pattern))
|
||||
.map(({lineNumbers}) => lineNumbers)
|
||||
)));
|
||||
}
|
||||
|
|
|
|||
143
node_modules/ava/lib/load-config.js
generated
vendored
143
node_modules/ava/lib/load-config.js
generated
vendored
|
|
@ -1,27 +1,48 @@
|
|||
'use strict';
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const url = require('url');
|
||||
const vm = require('vm');
|
||||
const isPlainObject = require('is-plain-object');
|
||||
const {isPlainObject} = require('is-plain-object');
|
||||
const pkgConf = require('pkg-conf');
|
||||
|
||||
const NO_SUCH_FILE = Symbol('no ava.config.js file');
|
||||
const MISSING_DEFAULT_EXPORT = Symbol('missing default export');
|
||||
const EXPERIMENTS = new Set();
|
||||
const EXPERIMENTS = new Set([
|
||||
'configurableModuleFormat',
|
||||
'disableNullExpectations',
|
||||
'disableSnapshotsInHooks',
|
||||
'nextGenConfig',
|
||||
'reverseTeardowns',
|
||||
'sharedWorkers'
|
||||
]);
|
||||
|
||||
// *Very* rudimentary support for loading ava.config.js files containing an `export default` statement.
|
||||
const evaluateJsConfig = configFile => {
|
||||
const contents = fs.readFileSync(configFile, 'utf8');
|
||||
const script = new vm.Script(`'use strict';(()=>{let __export__;\n${contents.replace(/export default/g, '__export__ =')};return __export__;})()`, {
|
||||
const evaluateJsConfig = (contents, configFile) => {
|
||||
const script = new vm.Script(`'use strict';(()=>{let __export__;\n${contents.toString('utf8').replace(/export default/g, '__export__ =')};return __export__;})()`, {
|
||||
filename: configFile,
|
||||
lineOffset: -1
|
||||
});
|
||||
return {
|
||||
default: script.runInThisContext()
|
||||
};
|
||||
return script.runInThisContext();
|
||||
};
|
||||
|
||||
const loadJsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.config.js')}) => {
|
||||
const importConfig = async ({configFile, fileForErrorMessage}) => {
|
||||
let module;
|
||||
try {
|
||||
module = await import(url.pathToFileURL(configFile)); // eslint-disable-line node/no-unsupported-features/es-syntax
|
||||
} catch (error) {
|
||||
throw Object.assign(new Error(`Error loading ${fileForErrorMessage}: ${error.message}`), {parent: error});
|
||||
}
|
||||
|
||||
const {default: config = MISSING_DEFAULT_EXPORT} = module;
|
||||
if (config === MISSING_DEFAULT_EXPORT) {
|
||||
throw new Error(`${fileForErrorMessage} must have a default export`);
|
||||
}
|
||||
|
||||
return config;
|
||||
};
|
||||
|
||||
const loadJsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.config.js')}, useImport = false) => {
|
||||
if (!configFile.endsWith('.js')) {
|
||||
return null;
|
||||
}
|
||||
|
|
@ -30,7 +51,10 @@ const loadJsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.confi
|
|||
|
||||
let config;
|
||||
try {
|
||||
({default: config = MISSING_DEFAULT_EXPORT} = evaluateJsConfig(configFile));
|
||||
const contents = fs.readFileSync(configFile);
|
||||
config = useImport && contents.includes('nonSemVerExperiments') && contents.includes('nextGenConfig') ?
|
||||
importConfig({configFile, fileForErrorMessage}) :
|
||||
evaluateJsConfig(contents, configFile) || MISSING_DEFAULT_EXPORT;
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return null;
|
||||
|
|
@ -63,14 +87,17 @@ const loadCjsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.conf
|
|||
}
|
||||
};
|
||||
|
||||
const loadMjsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.config.mjs')}) => {
|
||||
const loadMjsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.config.mjs')}, experimentally = false) => {
|
||||
if (!configFile.endsWith('.mjs')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const fileForErrorMessage = path.relative(projectDir, configFile);
|
||||
try {
|
||||
fs.readFileSync(configFile);
|
||||
const contents = fs.readFileSync(configFile);
|
||||
if (experimentally && contents.includes('nonSemVerExperiments') && contents.includes('nextGenConfig')) {
|
||||
return {config: importConfig({configFile, fileForErrorMessage}), fileForErrorMessage};
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return null;
|
||||
|
|
@ -82,11 +109,7 @@ const loadMjsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.conf
|
|||
throw new Error(`AVA cannot yet load ${fileForErrorMessage} files`);
|
||||
};
|
||||
|
||||
function loadConfig({configFile, resolveFrom = process.cwd(), defaults = {}} = {}) { // eslint-disable-line complexity
|
||||
let packageConf = pkgConf.sync('ava', {cwd: resolveFrom});
|
||||
const filepath = pkgConf.filepath(packageConf);
|
||||
const projectDir = filepath === null ? resolveFrom : path.dirname(filepath);
|
||||
|
||||
function resolveConfigFile(projectDir, configFile) {
|
||||
if (configFile) {
|
||||
configFile = path.resolve(configFile); // Relative to CWD
|
||||
if (path.basename(configFile) !== path.relative(projectDir, configFile)) {
|
||||
|
|
@ -98,6 +121,15 @@ function loadConfig({configFile, resolveFrom = process.cwd(), defaults = {}} = {
|
|||
}
|
||||
}
|
||||
|
||||
return configFile;
|
||||
}
|
||||
|
||||
function loadConfigSync({configFile, resolveFrom = process.cwd(), defaults = {}} = {}) {
|
||||
let packageConf = pkgConf.sync('ava', {cwd: resolveFrom});
|
||||
const filepath = pkgConf.filepath(packageConf);
|
||||
const projectDir = filepath === null ? resolveFrom : path.dirname(filepath);
|
||||
|
||||
configFile = resolveConfigFile(projectDir, configFile);
|
||||
const allowConflictWithPackageJson = Boolean(configFile);
|
||||
|
||||
let [{config: fileConf, fileForErrorMessage} = {config: NO_SUCH_FILE, fileForErrorMessage: undefined}, ...conflicting] = [
|
||||
|
|
@ -157,4 +189,79 @@ function loadConfig({configFile, resolveFrom = process.cwd(), defaults = {}} = {
|
|||
return config;
|
||||
}
|
||||
|
||||
module.exports = loadConfig;
|
||||
exports.loadConfigSync = loadConfigSync;
|
||||
|
||||
async function loadConfig({configFile, resolveFrom = process.cwd(), defaults = {}} = {}) {
|
||||
let packageConf = await pkgConf('ava', {cwd: resolveFrom});
|
||||
const filepath = pkgConf.filepath(packageConf);
|
||||
const projectDir = filepath === null ? resolveFrom : path.dirname(filepath);
|
||||
|
||||
configFile = resolveConfigFile(projectDir, configFile);
|
||||
const allowConflictWithPackageJson = Boolean(configFile);
|
||||
|
||||
// TODO: Refactor resolution logic to implement https://github.com/avajs/ava/issues/2285.
|
||||
let [{config: fileConf, fileForErrorMessage} = {config: NO_SUCH_FILE, fileForErrorMessage: undefined}, ...conflicting] = [
|
||||
loadJsConfig({projectDir, configFile}, true),
|
||||
loadCjsConfig({projectDir, configFile}),
|
||||
loadMjsConfig({projectDir, configFile}, true)
|
||||
].filter(result => result !== null);
|
||||
|
||||
if (conflicting.length > 0) {
|
||||
throw new Error(`Conflicting configuration in ${fileForErrorMessage} and ${conflicting.map(({fileForErrorMessage}) => fileForErrorMessage).join(' & ')}`);
|
||||
}
|
||||
|
||||
let sawPromise = false;
|
||||
if (fileConf !== NO_SUCH_FILE) {
|
||||
if (allowConflictWithPackageJson) {
|
||||
packageConf = {};
|
||||
} else if (Object.keys(packageConf).length > 0) {
|
||||
throw new Error(`Conflicting configuration in ${fileForErrorMessage} and package.json`);
|
||||
}
|
||||
|
||||
if (fileConf && typeof fileConf.then === 'function') { // eslint-disable-line promise/prefer-await-to-then
|
||||
sawPromise = true;
|
||||
fileConf = await fileConf;
|
||||
}
|
||||
|
||||
if (!isPlainObject(fileConf) && typeof fileConf !== 'function') {
|
||||
throw new TypeError(`${fileForErrorMessage} must export a plain object or factory function`);
|
||||
}
|
||||
|
||||
if (typeof fileConf === 'function') {
|
||||
fileConf = fileConf({projectDir});
|
||||
if (fileConf && typeof fileConf.then === 'function') { // eslint-disable-line promise/prefer-await-to-then
|
||||
sawPromise = true;
|
||||
fileConf = await fileConf;
|
||||
}
|
||||
|
||||
if (!isPlainObject(fileConf)) {
|
||||
throw new TypeError(`Factory method exported by ${fileForErrorMessage} must return a plain object`);
|
||||
}
|
||||
}
|
||||
|
||||
if ('ava' in fileConf) {
|
||||
throw new Error(`Encountered ’ava’ property in ${fileForErrorMessage}; avoid wrapping the configuration`);
|
||||
}
|
||||
}
|
||||
|
||||
const config = {...defaults, nonSemVerExperiments: {}, ...fileConf, ...packageConf, projectDir};
|
||||
|
||||
const {nonSemVerExperiments: experiments} = config;
|
||||
if (!isPlainObject(experiments)) {
|
||||
throw new Error(`nonSemVerExperiments from ${fileForErrorMessage} must be an object`);
|
||||
}
|
||||
|
||||
for (const key of Object.keys(experiments)) {
|
||||
if (!EXPERIMENTS.has(key)) {
|
||||
throw new Error(`nonSemVerExperiments.${key} from ${fileForErrorMessage} is not a supported experiment`);
|
||||
}
|
||||
}
|
||||
|
||||
if (sawPromise && experiments.nextGenConfig !== true) {
|
||||
throw new Error(`${fileForErrorMessage} exported a promise or an asynchronous factory function. You must enable the ’asyncConfigurationLoading’ experiment for this to work.`);
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
exports.loadConfig = loadConfig;
|
||||
|
|
|
|||
75
node_modules/ava/lib/module-types.js
generated
vendored
Normal file
75
node_modules/ava/lib/module-types.js
generated
vendored
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
const requireTrueValue = value => {
|
||||
if (value !== true) {
|
||||
throw new TypeError('When specifying module types, use `true` for ’cjs’, ’mjs’ and ’js’ extensions');
|
||||
}
|
||||
};
|
||||
|
||||
const normalize = (extension, type, defaultModuleType) => {
|
||||
switch (extension) {
|
||||
case 'cjs':
|
||||
requireTrueValue(type);
|
||||
return 'commonjs';
|
||||
case 'mjs':
|
||||
requireTrueValue(type);
|
||||
return 'module';
|
||||
case 'js':
|
||||
requireTrueValue(type);
|
||||
return defaultModuleType;
|
||||
default:
|
||||
if (type !== 'commonjs' && type !== 'module') {
|
||||
throw new TypeError(`Module type for ’${extension}’ must be ’commonjs’ or ’module’`);
|
||||
}
|
||||
|
||||
return type;
|
||||
}
|
||||
};
|
||||
|
||||
const deriveFromObject = (extensionsObject, defaultModuleType) => {
|
||||
const moduleTypes = {};
|
||||
for (const [extension, type] of Object.entries(extensionsObject)) {
|
||||
moduleTypes[extension] = normalize(extension, type, defaultModuleType);
|
||||
}
|
||||
|
||||
return moduleTypes;
|
||||
};
|
||||
|
||||
const deriveFromArray = (extensions, defaultModuleType) => {
|
||||
const moduleTypes = {};
|
||||
for (const extension of extensions) {
|
||||
switch (extension) {
|
||||
case 'cjs':
|
||||
moduleTypes.cjs = 'commonjs';
|
||||
break;
|
||||
case 'mjs':
|
||||
moduleTypes.mjs = 'module';
|
||||
break;
|
||||
case 'js':
|
||||
moduleTypes.js = defaultModuleType;
|
||||
break;
|
||||
default:
|
||||
moduleTypes[extension] = 'commonjs';
|
||||
}
|
||||
}
|
||||
|
||||
return moduleTypes;
|
||||
};
|
||||
|
||||
module.exports = (configuredExtensions, defaultModuleType, experiments) => {
|
||||
if (configuredExtensions === undefined) {
|
||||
return {
|
||||
cjs: 'commonjs',
|
||||
mjs: 'module',
|
||||
js: defaultModuleType
|
||||
};
|
||||
}
|
||||
|
||||
if (Array.isArray(configuredExtensions)) {
|
||||
return deriveFromArray(configuredExtensions, defaultModuleType);
|
||||
}
|
||||
|
||||
if (!experiments.configurableModuleFormat) {
|
||||
throw new Error('You must enable the `configurableModuleFormat` experiment in order to specify module types');
|
||||
}
|
||||
|
||||
return deriveFromObject(configuredExtensions, defaultModuleType);
|
||||
};
|
||||
252
node_modules/ava/lib/plugin-support/shared-worker-loader.js
generated
vendored
Normal file
252
node_modules/ava/lib/plugin-support/shared-worker-loader.js
generated
vendored
Normal file
|
|
@ -0,0 +1,252 @@
|
|||
const {EventEmitter, on} = require('events');
|
||||
const v8 = require('v8');
|
||||
const {workerData, parentPort} = require('worker_threads');
|
||||
const pkg = require('../../package.json');
|
||||
|
||||
// Used to forward messages received over the `parentPort`. Every subscription
|
||||
// adds a listener, so do not enforce any maximums.
|
||||
const events = new EventEmitter().setMaxListeners(0);
|
||||
|
||||
// Map of active test workers, used in receiveMessages() to get a reference to
|
||||
// the TestWorker instance, and relevant release functions.
|
||||
const activeTestWorkers = new Map();
|
||||
|
||||
class TestWorker {
|
||||
constructor(id, file) {
|
||||
this.id = id;
|
||||
this.file = file;
|
||||
}
|
||||
|
||||
teardown(fn) {
|
||||
let done = false;
|
||||
const teardownFn = async () => {
|
||||
if (done) {
|
||||
return;
|
||||
}
|
||||
|
||||
done = true;
|
||||
if (activeTestWorkers.has(this.id)) {
|
||||
activeTestWorkers.get(this.id).teardownFns.delete(teardownFn);
|
||||
}
|
||||
|
||||
await fn();
|
||||
};
|
||||
|
||||
activeTestWorkers.get(this.id).teardownFns.add(teardownFn);
|
||||
|
||||
return teardownFn;
|
||||
}
|
||||
|
||||
publish(data) {
|
||||
return publishMessage(this, data);
|
||||
}
|
||||
|
||||
async * subscribe() {
|
||||
yield * receiveMessages(this);
|
||||
}
|
||||
}
|
||||
|
||||
class ReceivedMessage {
|
||||
constructor(testWorker, id, serializedData) {
|
||||
this.testWorker = testWorker;
|
||||
this.id = id;
|
||||
this.data = v8.deserialize(new Uint8Array(serializedData));
|
||||
}
|
||||
|
||||
reply(data) {
|
||||
return publishMessage(this.testWorker, data, this.id);
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure that, no matter how often it's received, we have a stable message
|
||||
// object.
|
||||
const messageCache = new WeakMap();
|
||||
|
||||
async function * receiveMessages(fromTestWorker, replyTo) {
|
||||
for await (const [message] of on(events, 'message')) {
|
||||
if (fromTestWorker !== undefined) {
|
||||
if (message.type === 'deregister-test-worker' && message.id === fromTestWorker.id) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.type === 'message' && message.testWorkerId !== fromTestWorker.id) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (message.type !== 'message') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (replyTo === undefined && message.replyTo !== undefined) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (replyTo !== undefined && message.replyTo !== replyTo) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const active = activeTestWorkers.get(message.testWorkerId);
|
||||
// It is possible for a message to have been buffering for so long — perhaps
|
||||
// due to the caller waiting before iterating to the next message — that the
|
||||
// test worker has been deregistered. Ignore such messages.
|
||||
//
|
||||
// (This is really hard to write a test for, however!)
|
||||
if (active === undefined) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let received = messageCache.get(message);
|
||||
if (received === undefined) {
|
||||
received = new ReceivedMessage(active.instance, message.messageId, message.serializedData);
|
||||
messageCache.set(message, received);
|
||||
}
|
||||
|
||||
yield received;
|
||||
}
|
||||
}
|
||||
|
||||
let messageCounter = 0;
|
||||
const messageIdPrefix = `${workerData.id}/message`;
|
||||
const nextMessageId = () => `${messageIdPrefix}/${++messageCounter}`;
|
||||
|
||||
function publishMessage(testWorker, data, replyTo) {
|
||||
const id = nextMessageId();
|
||||
parentPort.postMessage({
|
||||
type: 'message',
|
||||
messageId: id,
|
||||
testWorkerId: testWorker.id,
|
||||
serializedData: [...v8.serialize(data)],
|
||||
replyTo
|
||||
});
|
||||
|
||||
return {
|
||||
id,
|
||||
async * replies() {
|
||||
yield * receiveMessages(testWorker, id);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function broadcastMessage(data) {
|
||||
const id = nextMessageId();
|
||||
parentPort.postMessage({
|
||||
type: 'broadcast',
|
||||
messageId: id,
|
||||
serializedData: [...v8.serialize(data)]
|
||||
});
|
||||
|
||||
return {
|
||||
id,
|
||||
async * replies() {
|
||||
yield * receiveMessages(undefined, id);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function loadFactory() {
|
||||
try {
|
||||
const mod = require(workerData.filename);
|
||||
if (typeof mod === 'function') {
|
||||
return mod;
|
||||
}
|
||||
|
||||
return mod.default;
|
||||
} catch (error) {
|
||||
if (error && (error.code === 'ERR_REQUIRE_ESM' || (error.code === 'MODULE_NOT_FOUND' && workerData.filename.startsWith('file://')))) {
|
||||
const {default: factory} = await import(workerData.filename); // eslint-disable-line node/no-unsupported-features/es-syntax
|
||||
return factory;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
let signalAvailable = () => {
|
||||
parentPort.postMessage({type: 'available'});
|
||||
signalAvailable = () => {};
|
||||
};
|
||||
|
||||
let fatal;
|
||||
loadFactory(workerData.filename).then(factory => {
|
||||
if (typeof factory !== 'function') {
|
||||
throw new TypeError(`Missing default factory function export for shared worker plugin at ${workerData.filename}`);
|
||||
}
|
||||
|
||||
factory({
|
||||
negotiateProtocol(supported) {
|
||||
if (!supported.includes('experimental')) {
|
||||
fatal = new Error(`This version of AVA (${pkg.version}) is not compatible with shared worker plugin at ${workerData.filename}`);
|
||||
throw fatal;
|
||||
}
|
||||
|
||||
const produceTestWorker = instance => events.emit('testWorker', instance);
|
||||
|
||||
parentPort.on('message', async message => {
|
||||
if (message.type === 'register-test-worker') {
|
||||
const {id, file} = message;
|
||||
const instance = new TestWorker(id, file);
|
||||
|
||||
activeTestWorkers.set(id, {instance, teardownFns: new Set()});
|
||||
|
||||
produceTestWorker(instance);
|
||||
}
|
||||
|
||||
if (message.type === 'deregister-test-worker') {
|
||||
const {id} = message;
|
||||
const {teardownFns} = activeTestWorkers.get(id);
|
||||
activeTestWorkers.delete(id);
|
||||
|
||||
// Run possibly asynchronous release functions serially, in reverse
|
||||
// order. Any error will crash the worker.
|
||||
for await (const fn of [...teardownFns].reverse()) {
|
||||
await fn();
|
||||
}
|
||||
|
||||
parentPort.postMessage({
|
||||
type: 'deregistered-test-worker',
|
||||
id
|
||||
});
|
||||
}
|
||||
|
||||
// Wait for a turn of the event loop, to allow new subscriptions to be
|
||||
// set up in response to the previous message.
|
||||
setImmediate(() => events.emit('message', message));
|
||||
});
|
||||
|
||||
return {
|
||||
initialData: workerData.initialData,
|
||||
protocol: 'experimental',
|
||||
|
||||
ready() {
|
||||
signalAvailable();
|
||||
return this;
|
||||
},
|
||||
|
||||
broadcast(data) {
|
||||
return broadcastMessage(data);
|
||||
},
|
||||
|
||||
async * subscribe() {
|
||||
yield * receiveMessages();
|
||||
},
|
||||
|
||||
async * testWorkers() {
|
||||
for await (const [worker] of on(events, 'testWorker')) {
|
||||
yield worker;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
}).catch(error => {
|
||||
if (fatal === undefined) {
|
||||
fatal = error;
|
||||
}
|
||||
}).finally(() => {
|
||||
if (fatal !== undefined) {
|
||||
process.nextTick(() => {
|
||||
throw fatal;
|
||||
});
|
||||
}
|
||||
});
|
||||
140
node_modules/ava/lib/plugin-support/shared-workers.js
generated
vendored
Normal file
140
node_modules/ava/lib/plugin-support/shared-workers.js
generated
vendored
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
const events = require('events');
|
||||
const serializeError = require('../serialize-error');
|
||||
|
||||
let Worker;
|
||||
try {
|
||||
({Worker} = require('worker_threads'));
|
||||
} catch {}
|
||||
|
||||
const LOADER = require.resolve('./shared-worker-loader');
|
||||
|
||||
let sharedWorkerCounter = 0;
|
||||
const launchedWorkers = new Map();
|
||||
|
||||
const waitForAvailable = async worker => {
|
||||
for await (const [message] of events.on(worker, 'message')) {
|
||||
if (message.type === 'available') {
|
||||
return;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function launchWorker({filename, initialData}) {
|
||||
if (launchedWorkers.has(filename)) {
|
||||
return launchedWorkers.get(filename);
|
||||
}
|
||||
|
||||
const id = `shared-worker/${++sharedWorkerCounter}`;
|
||||
const worker = new Worker(LOADER, {
|
||||
// Ensure the worker crashes for unhandled rejections, rather than allowing undefined behavior.
|
||||
execArgv: ['--unhandled-rejections=strict'],
|
||||
workerData: {
|
||||
filename,
|
||||
id,
|
||||
initialData
|
||||
}
|
||||
});
|
||||
worker.setMaxListeners(0);
|
||||
|
||||
const launched = {
|
||||
statePromises: {
|
||||
available: waitForAvailable(worker),
|
||||
error: events.once(worker, 'error').then(([error]) => error) // eslint-disable-line promise/prefer-await-to-then
|
||||
},
|
||||
exited: false,
|
||||
worker
|
||||
};
|
||||
|
||||
launchedWorkers.set(filename, launched);
|
||||
worker.once('exit', () => {
|
||||
launched.exited = true;
|
||||
});
|
||||
|
||||
return launched;
|
||||
}
|
||||
|
||||
async function observeWorkerProcess(fork, runStatus) {
|
||||
let registrationCount = 0;
|
||||
let signalDeregistered;
|
||||
const deregistered = new Promise(resolve => {
|
||||
signalDeregistered = resolve;
|
||||
});
|
||||
|
||||
fork.promise.finally(() => {
|
||||
if (registrationCount === 0) {
|
||||
signalDeregistered();
|
||||
}
|
||||
});
|
||||
|
||||
fork.onConnectSharedWorker(async channel => {
|
||||
const launched = launchWorker(channel);
|
||||
|
||||
const handleChannelMessage = ({messageId, replyTo, serializedData}) => {
|
||||
launched.worker.postMessage({
|
||||
type: 'message',
|
||||
testWorkerId: fork.forkId,
|
||||
messageId,
|
||||
replyTo,
|
||||
serializedData
|
||||
});
|
||||
};
|
||||
|
||||
const handleWorkerMessage = async message => {
|
||||
if (message.type === 'broadcast' || (message.type === 'message' && message.testWorkerId === fork.forkId)) {
|
||||
const {messageId, replyTo, serializedData} = message;
|
||||
channel.forwardMessageToFork({messageId, replyTo, serializedData});
|
||||
}
|
||||
|
||||
if (message.type === 'deregistered-test-worker' && message.id === fork.forkId) {
|
||||
launched.worker.off('message', handleWorkerMessage);
|
||||
|
||||
registrationCount--;
|
||||
if (registrationCount === 0) {
|
||||
signalDeregistered();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
launched.statePromises.error.then(error => { // eslint-disable-line promise/prefer-await-to-then
|
||||
signalDeregistered();
|
||||
launched.worker.off('message', handleWorkerMessage);
|
||||
runStatus.emitStateChange({type: 'shared-worker-error', err: serializeError('Shared worker error', true, error)});
|
||||
channel.signalError();
|
||||
});
|
||||
|
||||
try {
|
||||
await launched.statePromises.available;
|
||||
|
||||
registrationCount++;
|
||||
launched.worker.postMessage({
|
||||
type: 'register-test-worker',
|
||||
id: fork.forkId,
|
||||
file: fork.file
|
||||
});
|
||||
|
||||
fork.promise.finally(() => {
|
||||
launched.worker.postMessage({
|
||||
type: 'deregister-test-worker',
|
||||
id: fork.forkId
|
||||
});
|
||||
|
||||
channel.off('message', handleChannelMessage);
|
||||
});
|
||||
|
||||
launched.worker.on('message', handleWorkerMessage);
|
||||
channel.on('message', handleChannelMessage);
|
||||
channel.signalReady();
|
||||
} catch {
|
||||
return;
|
||||
} finally {
|
||||
// Attaching listeners has the side-effect of referencing the worker.
|
||||
// Explicitly unreference it now so it does not prevent the main process
|
||||
// from exiting.
|
||||
launched.worker.unref();
|
||||
}
|
||||
});
|
||||
|
||||
return deregistered;
|
||||
}
|
||||
|
||||
exports.observeWorkerProcess = observeWorkerProcess;
|
||||
2
node_modules/ava/lib/provider-manager.js
generated
vendored
2
node_modules/ava/lib/provider-manager.js
generated
vendored
|
|
@ -21,7 +21,7 @@ function load(providerModule, projectDir) {
|
|||
let level;
|
||||
const provider = makeProvider({
|
||||
negotiateProtocol(identifiers, {version}) {
|
||||
const [identifier] = identifiers.filter(identifier => Reflect.has(levelsByProtocol, identifier));
|
||||
const identifier = identifiers.find(identifier => Reflect.has(levelsByProtocol, identifier));
|
||||
|
||||
if (identifier === undefined) {
|
||||
fatal = new Error(`This version of AVA (${ava.version}) is not compatible with ${providerModule}@${version}`);
|
||||
|
|
|
|||
920
node_modules/ava/lib/reporters/default.js
generated
vendored
Normal file
920
node_modules/ava/lib/reporters/default.js
generated
vendored
Normal file
|
|
@ -0,0 +1,920 @@
|
|||
'use strict';
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const stream = require('stream');
|
||||
|
||||
const cliCursor = require('cli-cursor');
|
||||
const figures = require('figures');
|
||||
const indentString = require('indent-string');
|
||||
const ora = require('ora');
|
||||
const plur = require('plur');
|
||||
const prettyMs = require('pretty-ms');
|
||||
const trimOffNewlines = require('trim-off-newlines');
|
||||
|
||||
const chalk = require('../chalk').get();
|
||||
const codeExcerpt = require('../code-excerpt');
|
||||
const beautifyStack = require('./beautify-stack');
|
||||
const colors = require('./colors');
|
||||
const formatSerializedError = require('./format-serialized-error');
|
||||
const improperUsageMessages = require('./improper-usage-messages');
|
||||
const prefixTitle = require('./prefix-title');
|
||||
|
||||
const nodeInternals = require('stack-utils').nodeInternals();
|
||||
|
||||
class LineWriter extends stream.Writable {
|
||||
constructor(dest) {
|
||||
super();
|
||||
|
||||
this.dest = dest;
|
||||
this.columns = dest.columns || 80;
|
||||
this.lastLineIsEmpty = false;
|
||||
}
|
||||
|
||||
_write(chunk, _, callback) {
|
||||
this.dest.write(chunk);
|
||||
callback();
|
||||
}
|
||||
|
||||
writeLine(string) {
|
||||
if (string) {
|
||||
this.write(indentString(string, 2) + os.EOL);
|
||||
this.lastLineIsEmpty = false;
|
||||
} else {
|
||||
this.write(os.EOL);
|
||||
this.lastLineIsEmpty = true;
|
||||
}
|
||||
}
|
||||
|
||||
ensureEmptyLine() {
|
||||
if (!this.lastLineIsEmpty) {
|
||||
this.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class LineWriterWithSpinner extends LineWriter {
|
||||
constructor(dest, spinner) {
|
||||
super(dest);
|
||||
|
||||
this.lastSpinnerText = '';
|
||||
this.spinner = spinner;
|
||||
}
|
||||
|
||||
_write(chunk, _, callback) {
|
||||
this.spinner.clear();
|
||||
this._writeWithSpinner(chunk.toString('utf8'));
|
||||
|
||||
callback();
|
||||
}
|
||||
|
||||
_writev(pieces, callback) {
|
||||
// Discard the current spinner output. Any lines that were meant to be
|
||||
// preserved should be rewritten.
|
||||
this.spinner.clear();
|
||||
|
||||
const last = pieces.pop();
|
||||
for (const piece of pieces) {
|
||||
this.dest.write(piece.chunk);
|
||||
}
|
||||
|
||||
this._writeWithSpinner(last.chunk.toString('utf8'));
|
||||
callback();
|
||||
}
|
||||
|
||||
_writeWithSpinner(string) {
|
||||
if (!this.spinner.isSpinning) {
|
||||
this.dest.write(string);
|
||||
return;
|
||||
}
|
||||
|
||||
this.lastSpinnerText = string;
|
||||
// Ignore whitespace at the end of the chunk. We're continiously rewriting
|
||||
// the last line through the spinner. Also be careful to remove the indent
|
||||
// as the spinner adds its own.
|
||||
this.spinner.text = string.trimEnd().slice(2);
|
||||
this.spinner.render();
|
||||
}
|
||||
}
|
||||
|
||||
function manageCorking(stream) {
|
||||
let corked = false;
|
||||
const cork = () => {
|
||||
corked = true;
|
||||
stream.cork();
|
||||
};
|
||||
|
||||
const uncork = () => {
|
||||
corked = false;
|
||||
stream.uncork();
|
||||
};
|
||||
|
||||
return {
|
||||
decorateFlushingWriter(fn) {
|
||||
return function (...args) {
|
||||
if (corked) {
|
||||
stream.uncork();
|
||||
}
|
||||
|
||||
try {
|
||||
return fn.apply(this, args);
|
||||
} finally {
|
||||
if (corked) {
|
||||
stream.cork();
|
||||
}
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
decorateWriter(fn) {
|
||||
return function (...args) {
|
||||
cork();
|
||||
try {
|
||||
return fn.apply(this, args);
|
||||
} finally {
|
||||
uncork();
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
class Reporter {
|
||||
constructor({
|
||||
verbose,
|
||||
reportStream,
|
||||
stdStream,
|
||||
projectDir,
|
||||
watching,
|
||||
spinner,
|
||||
durationThreshold
|
||||
}) {
|
||||
this.verbose = verbose;
|
||||
this.reportStream = reportStream;
|
||||
this.stdStream = stdStream;
|
||||
this.watching = watching;
|
||||
this.relativeFile = file => path.relative(projectDir, file);
|
||||
|
||||
const {decorateWriter, decorateFlushingWriter} = manageCorking(this.reportStream);
|
||||
this.consumeStateChange = decorateWriter(this.consumeStateChange);
|
||||
this.endRun = decorateWriter(this.endRun);
|
||||
|
||||
if (this.verbose) {
|
||||
this.durationThreshold = durationThreshold || 100;
|
||||
this.spinner = null;
|
||||
this.clearSpinner = () => {};
|
||||
this.lineWriter = new LineWriter(this.reportStream);
|
||||
} else {
|
||||
this.spinner = ora({
|
||||
isEnabled: true,
|
||||
color: spinner ? spinner.color : 'gray',
|
||||
discardStdin: !watching,
|
||||
hideCursor: false,
|
||||
spinner: spinner || (process.platform === 'win32' ? 'line' : 'dots'),
|
||||
stream: reportStream
|
||||
});
|
||||
this.clearSpinner = decorateFlushingWriter(this.spinner.clear.bind(this.spinner));
|
||||
this.lineWriter = new LineWriterWithSpinner(this.reportStream, this.spinner);
|
||||
}
|
||||
|
||||
this.reset();
|
||||
}
|
||||
|
||||
reset() {
|
||||
if (this.removePreviousListener) {
|
||||
this.removePreviousListener();
|
||||
}
|
||||
|
||||
this.prefixTitle = (testFile, title) => title;
|
||||
|
||||
this.runningTestFiles = new Map();
|
||||
this.filesWithMissingAvaImports = new Set();
|
||||
this.filesWithoutDeclaredTests = new Set();
|
||||
this.filesWithoutMatchedLineNumbers = new Set();
|
||||
|
||||
this.failures = [];
|
||||
this.internalErrors = [];
|
||||
this.knownFailures = [];
|
||||
this.lineNumberErrors = [];
|
||||
this.sharedWorkerErrors = [];
|
||||
this.uncaughtExceptions = [];
|
||||
this.unhandledRejections = [];
|
||||
this.unsavedSnapshots = [];
|
||||
|
||||
this.previousFailures = 0;
|
||||
|
||||
this.failFastEnabled = false;
|
||||
this.lastLineIsEmpty = false;
|
||||
this.matching = false;
|
||||
|
||||
this.removePreviousListener = null;
|
||||
this.stats = null;
|
||||
}
|
||||
|
||||
startRun(plan) {
|
||||
if (plan.bailWithoutReporting) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.reset();
|
||||
|
||||
this.failFastEnabled = plan.failFastEnabled;
|
||||
this.matching = plan.matching;
|
||||
this.previousFailures = plan.previousFailures;
|
||||
this.emptyParallelRun = plan.status.emptyParallelRun;
|
||||
|
||||
if (this.watching || plan.files.length > 1) {
|
||||
this.prefixTitle = (testFile, title) => prefixTitle(plan.filePathPrefix, testFile, title);
|
||||
}
|
||||
|
||||
this.removePreviousListener = plan.status.on('stateChange', evt => {
|
||||
this.consumeStateChange(evt);
|
||||
});
|
||||
|
||||
if (this.watching && plan.runVector > 1) {
|
||||
this.lineWriter.write(chalk.gray.dim('\u2500'.repeat(this.lineWriter.columns)) + os.EOL);
|
||||
}
|
||||
|
||||
if (this.spinner === null) {
|
||||
this.lineWriter.writeLine();
|
||||
} else {
|
||||
cliCursor.hide(this.reportStream);
|
||||
this.lineWriter.writeLine();
|
||||
this.spinner.start();
|
||||
}
|
||||
}
|
||||
|
||||
consumeStateChange(event) { // eslint-disable-line complexity
|
||||
const fileStats = this.stats && event.testFile ? this.stats.byFile.get(event.testFile) : null;
|
||||
|
||||
switch (event.type) { // eslint-disable-line default-case
|
||||
case 'hook-failed': {
|
||||
this.failures.push(event);
|
||||
this.writeTestSummary(event);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'stats': {
|
||||
this.stats = event.stats;
|
||||
break;
|
||||
}
|
||||
|
||||
case 'test-failed': {
|
||||
this.failures.push(event);
|
||||
this.writeTestSummary(event);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'test-passed': {
|
||||
if (event.knownFailing) {
|
||||
this.knownFailures.push(event);
|
||||
}
|
||||
|
||||
this.writeTestSummary(event);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'timeout': {
|
||||
this.lineWriter.writeLine(colors.error(`\n${figures.cross} Timed out while running tests`));
|
||||
this.lineWriter.writeLine('');
|
||||
this.writePendingTests(event);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'interrupt': {
|
||||
this.lineWriter.writeLine(colors.error(`\n${figures.cross} Exiting due to SIGINT`));
|
||||
this.lineWriter.writeLine('');
|
||||
this.writePendingTests(event);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'internal-error': {
|
||||
this.internalErrors.push(event);
|
||||
|
||||
if (event.testFile) {
|
||||
this.write(colors.error(`${figures.cross} Internal error when running ${this.relativeFile(event.testFile)}`));
|
||||
} else {
|
||||
this.write(colors.error(`${figures.cross} Internal error`));
|
||||
}
|
||||
|
||||
if (this.verbose) {
|
||||
this.lineWriter.writeLine(colors.stack(event.err.summary));
|
||||
this.lineWriter.writeLine(colors.errorStack(event.err.stack));
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'line-number-selection-error': {
|
||||
this.lineNumberErrors.push(event);
|
||||
|
||||
this.write(colors.information(`${figures.warning} Could not parse ${this.relativeFile(event.testFile)} for line number selection`));
|
||||
break;
|
||||
}
|
||||
|
||||
case 'missing-ava-import': {
|
||||
this.filesWithMissingAvaImports.add(event.testFile);
|
||||
|
||||
this.write(colors.error(`${figures.cross} No tests found in ${this.relativeFile(event.testFile)}, make sure to import "ava" at the top of your test file`));
|
||||
break;
|
||||
}
|
||||
|
||||
case 'hook-finished': {
|
||||
if (this.verbose && event.logs.length > 0) {
|
||||
this.lineWriter.writeLine(` ${this.prefixTitle(event.testFile, event.title)}`);
|
||||
this.writeLogs(event);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'selected-test': {
|
||||
if (this.verbose) {
|
||||
if (event.skip) {
|
||||
this.lineWriter.writeLine(colors.skip(`- ${this.prefixTitle(event.testFile, event.title)}`));
|
||||
} else if (event.todo) {
|
||||
this.lineWriter.writeLine(colors.todo(`- ${this.prefixTitle(event.testFile, event.title)}`));
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'shared-worker-error': {
|
||||
this.sharedWorkerErrors.push(event);
|
||||
|
||||
if (this.verbose) {
|
||||
this.lineWriter.ensureEmptyLine();
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Error in shared worker`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(event);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'snapshot-error':
|
||||
this.unsavedSnapshots.push(event);
|
||||
break;
|
||||
|
||||
case 'uncaught-exception': {
|
||||
this.uncaughtExceptions.push(event);
|
||||
|
||||
if (this.verbose) {
|
||||
this.lineWriter.ensureEmptyLine();
|
||||
this.lineWriter.writeLine(colors.title(`Uncaught exception in ${this.relativeFile(event.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(event);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'unhandled-rejection': {
|
||||
this.unhandledRejections.push(event);
|
||||
|
||||
if (this.verbose) {
|
||||
this.lineWriter.ensureEmptyLine();
|
||||
this.lineWriter.writeLine(colors.title(`Unhandled rejection in ${this.relativeFile(event.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(event);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'worker-failed': {
|
||||
if (fileStats.declaredTests === 0) {
|
||||
this.filesWithoutDeclaredTests.add(event.testFile);
|
||||
}
|
||||
|
||||
if (this.verbose && !this.filesWithMissingAvaImports.has(event.testFile)) {
|
||||
if (event.nonZeroExitCode) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} ${this.relativeFile(event.testFile)} exited with a non-zero exit code: ${event.nonZeroExitCode}`));
|
||||
} else {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} ${this.relativeFile(event.testFile)} exited due to ${event.signal}`));
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'worker-finished': {
|
||||
if (!event.forcedExit && !this.filesWithMissingAvaImports.has(event.testFile)) {
|
||||
if (fileStats.declaredTests === 0) {
|
||||
this.filesWithoutDeclaredTests.add(event.testFile);
|
||||
|
||||
this.write(colors.error(`${figures.cross} No tests found in ${this.relativeFile(event.testFile)}`));
|
||||
} else if (fileStats.selectingLines && fileStats.selectedTests === 0) {
|
||||
this.filesWithoutMatchedLineNumbers.add(event.testFile);
|
||||
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Line numbers for ${this.relativeFile(event.testFile)} did not match any tests`));
|
||||
} else if (this.verbose && !this.failFastEnabled && fileStats.remainingTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} ${fileStats.remainingTests} ${plur('test', fileStats.remainingTests)} remaining in ${this.relativeFile(event.testFile)}`));
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'worker-stderr': {
|
||||
// Forcibly clear the spinner, writing the chunk corrupts the TTY.
|
||||
this.clearSpinner();
|
||||
|
||||
this.stdStream.write(event.chunk);
|
||||
// If the chunk does not end with a linebreak, *forcibly* write one to
|
||||
// ensure it remains visible in the TTY.
|
||||
// Tests cannot assume their standard output is not interrupted. Indeed
|
||||
// we multiplex stdout and stderr into a single stream. However as
|
||||
// long as stdStream is different from reportStream users can read
|
||||
// their original output by redirecting the streams.
|
||||
if (event.chunk[event.chunk.length - 1] !== 0x0A) {
|
||||
this.reportStream.write(os.EOL);
|
||||
}
|
||||
|
||||
if (this.spinner !== null) {
|
||||
this.lineWriter.write(this.lineWriter.lastSpinnerText);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'worker-stdout': {
|
||||
// Forcibly clear the spinner, writing the chunk corrupts the TTY.
|
||||
this.clearSpinner();
|
||||
|
||||
this.stdStream.write(event.chunk);
|
||||
// If the chunk does not end with a linebreak, *forcibly* write one to
|
||||
// ensure it remains visible in the TTY.
|
||||
// Tests cannot assume their standard output is not interrupted. Indeed
|
||||
// we multiplex stdout and stderr into a single stream. However as
|
||||
// long as stdStream is different from reportStream users can read
|
||||
// their original output by redirecting the streams.
|
||||
if (event.chunk[event.chunk.length - 1] !== 0x0A) {
|
||||
this.reportStream.write(os.EOL);
|
||||
}
|
||||
|
||||
if (this.spinner !== null) {
|
||||
this.lineWriter.write(this.lineWriter.lastSpinnerText);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writePendingTests(evt) {
|
||||
for (const [file, testsInFile] of evt.pendingTests) {
|
||||
if (testsInFile.size === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(`${testsInFile.size} tests were pending in ${this.relativeFile(file)}\n`);
|
||||
for (const title of testsInFile) {
|
||||
this.lineWriter.writeLine(`${figures.circleDotted} ${this.prefixTitle(file, title)}`);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine('');
|
||||
}
|
||||
}
|
||||
|
||||
write(string) {
|
||||
if (this.verbose) {
|
||||
this.lineWriter.writeLine(string);
|
||||
} else {
|
||||
this.writeWithCounts(string);
|
||||
}
|
||||
}
|
||||
|
||||
writeWithCounts(string) {
|
||||
if (!this.stats) {
|
||||
return this.lineWriter.writeLine(string);
|
||||
}
|
||||
|
||||
string = string || '';
|
||||
if (string !== '') {
|
||||
string += os.EOL;
|
||||
}
|
||||
|
||||
let firstLinePostfix = this.watching ? ' ' + chalk.gray.dim('[' + new Date().toLocaleTimeString('en-US', {hour12: false}) + ']') : '';
|
||||
|
||||
if (this.stats.passedTests > 0) {
|
||||
string += os.EOL + colors.pass(`${this.stats.passedTests} passed`) + firstLinePostfix;
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
string += os.EOL + colors.error(`${this.stats.passedKnownFailingTests} ${plur('known failure', this.stats.passedKnownFailingTests)}`);
|
||||
}
|
||||
|
||||
if (this.stats.failedHooks > 0) {
|
||||
string += os.EOL + colors.error(`${this.stats.failedHooks} ${plur('hook', this.stats.failedHooks)} failed`) + firstLinePostfix;
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedTests > 0) {
|
||||
string += os.EOL + colors.error(`${this.stats.failedTests} ${plur('test', this.stats.failedTests)} failed`) + firstLinePostfix;
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.skippedTests > 0) {
|
||||
string += os.EOL + colors.skip(`${this.stats.skippedTests} skipped`);
|
||||
}
|
||||
|
||||
if (this.stats.todoTests > 0) {
|
||||
string += os.EOL + colors.todo(`${this.stats.todoTests} todo`);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(string);
|
||||
}
|
||||
|
||||
writeErr(event) {
|
||||
if (event.err.name === 'TSError' && event.err.object && event.err.object.diagnosticText) {
|
||||
this.lineWriter.writeLine(colors.errorStack(trimOffNewlines(event.err.object.diagnosticText)));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.err.source) {
|
||||
this.lineWriter.writeLine(colors.errorSource(`${this.relativeFile(event.err.source.file)}:${event.err.source.line}`));
|
||||
const excerpt = codeExcerpt(event.err.source, {maxWidth: this.reportStream.columns - 2});
|
||||
if (excerpt) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(excerpt);
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
|
||||
if (event.err.avaAssertionError) {
|
||||
const result = formatSerializedError(event.err);
|
||||
if (result.printMessage) {
|
||||
this.lineWriter.writeLine(event.err.message);
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
if (result.formatted) {
|
||||
this.lineWriter.writeLine(result.formatted);
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
const message = improperUsageMessages.forError(event.err);
|
||||
if (message) {
|
||||
this.lineWriter.writeLine(message);
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
} else if (event.err.nonErrorObject) {
|
||||
this.lineWriter.writeLine(trimOffNewlines(event.err.formatted));
|
||||
this.lineWriter.writeLine();
|
||||
} else {
|
||||
this.lineWriter.writeLine(event.err.summary);
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
const formatted = this.formatErrorStack(event.err);
|
||||
if (formatted.length > 0) {
|
||||
this.lineWriter.writeLine(formatted.join('\n'));
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
|
||||
formatErrorStack(error) {
|
||||
if (!error.stack) {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (error.shouldBeautifyStack) {
|
||||
return beautifyStack(error.stack).map(line => {
|
||||
if (nodeInternals.some(internal => internal.test(line))) {
|
||||
return colors.errorStackInternal(`${figures.pointerSmall} ${line}`);
|
||||
}
|
||||
|
||||
return colors.errorStack(`${figures.pointerSmall} ${line}`);
|
||||
});
|
||||
}
|
||||
|
||||
return [error.stack];
|
||||
}
|
||||
|
||||
writeLogs(event, surroundLines) {
|
||||
if (event.logs && event.logs.length > 0) {
|
||||
if (surroundLines) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
for (const log of event.logs) {
|
||||
const logLines = indentString(colors.log(log), 4);
|
||||
const logLinesWithLeadingFigure = logLines.replace(/^ {4}/, ` ${colors.information(figures.info)} `);
|
||||
this.lineWriter.writeLine(logLinesWithLeadingFigure);
|
||||
}
|
||||
|
||||
if (surroundLines) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
writeTestSummary(event) {
|
||||
if (event.type === 'hook-failed' || event.type === 'test-failed') {
|
||||
if (this.verbose) {
|
||||
this.write(`${colors.error(figures.cross)} ${this.prefixTitle(event.testFile, event.title)} ${colors.error(event.err.message)}`);
|
||||
} else {
|
||||
this.write(this.prefixTitle(event.testFile, event.title));
|
||||
}
|
||||
} else if (event.knownFailing) {
|
||||
if (this.verbose) {
|
||||
this.write(`${colors.error(figures.tick)} ${colors.error(this.prefixTitle(event.testFile, event.title))}`);
|
||||
} else {
|
||||
this.write(colors.error(this.prefixTitle(event.testFile, event.title)));
|
||||
}
|
||||
} else if (this.verbose) {
|
||||
const duration = event.duration > this.durationThreshold ? colors.duration(' (' + prettyMs(event.duration) + ')') : '';
|
||||
this.write(`${colors.pass(figures.tick)} ${this.prefixTitle(event.testFile, event.title)}${duration}`);
|
||||
} else {
|
||||
this.write(this.prefixTitle(event.testFile, event.title));
|
||||
}
|
||||
|
||||
if (this.verbose) {
|
||||
this.writeLogs(event);
|
||||
}
|
||||
}
|
||||
|
||||
writeFailure(event) {
|
||||
this.lineWriter.writeLine(colors.title(this.prefixTitle(event.testFile, event.title)));
|
||||
if (!this.writeLogs(event, true)) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
this.writeErr(event);
|
||||
}
|
||||
|
||||
endRun() {// eslint-disable-line complexity
|
||||
let firstLinePostfix = this.watching ? ` ${chalk.gray.dim(`[${new Date().toLocaleTimeString('en-US', {hour12: false})}]`)}` : '';
|
||||
let wroteSomething = false;
|
||||
|
||||
if (!this.verbose) {
|
||||
this.spinner.stop();
|
||||
cliCursor.show(this.reportStream);
|
||||
} else if (this.emptyParallelRun) {
|
||||
this.lineWriter.writeLine('No files tested in this parallel run');
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.stats) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldn’t find any files to test` + firstLinePostfix));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.matching && this.stats.selectedTests === 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldn’t find any matching tests` + firstLinePostfix));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.verbose) {
|
||||
this.lineWriter.writeLine(colors.log(figures.line));
|
||||
this.lineWriter.writeLine();
|
||||
} else {
|
||||
if (this.filesWithMissingAvaImports.size > 0) {
|
||||
for (const testFile of this.filesWithMissingAvaImports) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(testFile)}, make sure to import "ava" at the top of your test file`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
wroteSomething = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.filesWithoutDeclaredTests.size > 0) {
|
||||
for (const testFile of this.filesWithoutDeclaredTests) {
|
||||
if (!this.filesWithMissingAvaImports.has(testFile)) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(testFile)}`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
wroteSomething = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.lineNumberErrors.length > 0) {
|
||||
for (const event of this.lineNumberErrors) {
|
||||
this.lineWriter.writeLine(colors.information(`${figures.warning} Could not parse ${this.relativeFile(event.testFile)} for line number selection` + firstLinePostfix));
|
||||
firstLinePostfix = '';
|
||||
wroteSomething = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.filesWithoutMatchedLineNumbers.size > 0) {
|
||||
for (const testFile of this.filesWithoutMatchedLineNumbers) {
|
||||
if (!this.filesWithMissingAvaImports.has(testFile) && !this.filesWithoutDeclaredTests.has(testFile)) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Line numbers for ${this.relativeFile(testFile)} did not match any tests`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
wroteSomething = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (wroteSomething) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(colors.log(figures.line));
|
||||
this.lineWriter.writeLine();
|
||||
wroteSomething = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.failures.length > 0) {
|
||||
const writeTrailingLines = this.internalErrors.length > 0 || this.sharedWorkerErrors.length > 0 || this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
|
||||
|
||||
const lastFailure = this.failures[this.failures.length - 1];
|
||||
for (const event of this.failures) {
|
||||
this.writeFailure(event);
|
||||
if (event !== lastFailure) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
} else if (!this.verbose && writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
wroteSomething = true;
|
||||
}
|
||||
|
||||
if (this.verbose) {
|
||||
this.lineWriter.writeLine(colors.log(figures.line));
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.verbose) {
|
||||
if (this.internalErrors.length > 0) {
|
||||
const writeTrailingLines = this.sharedWorkerErrors.length > 0 || this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
|
||||
|
||||
const last = this.internalErrors[this.internalErrors.length - 1];
|
||||
for (const event of this.internalErrors) {
|
||||
if (event.testFile) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error when running ${this.relativeFile(event.testFile)}`));
|
||||
} else {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error`));
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(colors.stack(event.err.summary));
|
||||
this.lineWriter.writeLine(colors.errorStack(event.err.stack));
|
||||
if (event !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
wroteSomething = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.sharedWorkerErrors.length > 0) {
|
||||
const writeTrailingLines = this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
|
||||
|
||||
const last = this.sharedWorkerErrors[this.sharedWorkerErrors.length - 1];
|
||||
for (const evt of this.sharedWorkerErrors) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Error in shared worker`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt.err);
|
||||
if (evt !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
wroteSomething = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.uncaughtExceptions.length > 0) {
|
||||
const writeTrailingLines = this.unhandledRejections.length > 0;
|
||||
|
||||
const last = this.uncaughtExceptions[this.uncaughtExceptions.length - 1];
|
||||
for (const event of this.uncaughtExceptions) {
|
||||
this.lineWriter.writeLine(colors.title(`Uncaught exception in ${this.relativeFile(event.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(event);
|
||||
if (event !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
wroteSomething = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.unhandledRejections.length > 0) {
|
||||
const last = this.unhandledRejections[this.unhandledRejections.length - 1];
|
||||
for (const event of this.unhandledRejections) {
|
||||
this.lineWriter.writeLine(colors.title(`Unhandled rejection in ${this.relativeFile(event.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(event);
|
||||
if (event !== last) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
wroteSomething = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (wroteSomething) {
|
||||
this.lineWriter.writeLine(colors.log(figures.line));
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
|
||||
if (this.unsavedSnapshots.length > 0) {
|
||||
this.lineWriter.writeLine(colors.title('Could not update snapshots for the following test files:'));
|
||||
this.lineWriter.writeLine();
|
||||
for (const event of this.unsavedSnapshots) {
|
||||
this.lineWriter.writeLine(`${figures.warning} ${this.relativeFile(event.testFile)}`);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
if (this.failFastEnabled && (this.stats.remainingTests > 0 || this.stats.files > this.stats.finishedWorkers)) {
|
||||
let remaining = '';
|
||||
if (this.stats.remainingTests > 0) {
|
||||
remaining += `At least ${this.stats.remainingTests} ${plur('test was', 'tests were', this.stats.remainingTests)} skipped`;
|
||||
if (this.stats.files > this.stats.finishedWorkers) {
|
||||
remaining += ', as well as ';
|
||||
}
|
||||
}
|
||||
|
||||
if (this.stats.files > this.stats.finishedWorkers) {
|
||||
const skippedFileCount = this.stats.files - this.stats.finishedWorkers;
|
||||
remaining += `${skippedFileCount} ${plur('test file', 'test files', skippedFileCount)}`;
|
||||
if (this.stats.remainingTests === 0) {
|
||||
remaining += ` ${plur('was', 'were', skippedFileCount)} skipped`;
|
||||
}
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(colors.information(`\`--fail-fast\` is on. ${remaining}.`));
|
||||
if (this.verbose) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
|
||||
if (this.verbose && this.stats.parallelRuns) {
|
||||
const {
|
||||
currentFileCount,
|
||||
currentIndex,
|
||||
totalRuns
|
||||
} = this.stats.parallelRuns;
|
||||
this.lineWriter.writeLine(colors.information(`Ran ${currentFileCount} test ${plur('file', currentFileCount)} out of ${this.stats.files} for job ${currentIndex + 1} of ${totalRuns}`));
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
if (this.stats.failedHooks > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.failedHooks} ${plur('hook', this.stats.failedHooks)} failed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.failedTests} ${plur('test', this.stats.failedTests)} failed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (
|
||||
this.stats.failedHooks === 0 &&
|
||||
this.stats.failedTests === 0 &&
|
||||
this.stats.passedTests > 0
|
||||
) {
|
||||
this.lineWriter.writeLine(colors.pass(`${this.stats.passedTests} ${plur('test', this.stats.passedTests)} passed`) + firstLinePostfix
|
||||
);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.passedKnownFailingTests} ${plur('known failure', this.stats.passedKnownFailingTests)}`));
|
||||
}
|
||||
|
||||
if (this.stats.skippedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.skip(`${this.stats.skippedTests} ${plur('test', this.stats.skippedTests)} skipped`));
|
||||
}
|
||||
|
||||
if (this.stats.todoTests > 0) {
|
||||
this.lineWriter.writeLine(colors.todo(`${this.stats.todoTests} ${plur('test', this.stats.todoTests)} todo`));
|
||||
}
|
||||
|
||||
if (this.stats.unhandledRejections > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.unhandledRejections} unhandled ${plur('rejection', this.stats.unhandledRejections)}`));
|
||||
}
|
||||
|
||||
if (this.stats.uncaughtExceptions > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.uncaughtExceptions} uncaught ${plur('exception', this.stats.uncaughtExceptions)}`));
|
||||
}
|
||||
|
||||
if (this.previousFailures > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.previousFailures} previous ${plur('failure', this.previousFailures)} in test files that were not rerun`));
|
||||
}
|
||||
|
||||
if (this.watching) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports = Reporter;
|
||||
619
node_modules/ava/lib/reporters/mini.js
generated
vendored
619
node_modules/ava/lib/reporters/mini.js
generated
vendored
|
|
@ -1,619 +0,0 @@
|
|||
'use strict';
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const stream = require('stream');
|
||||
|
||||
const cliCursor = require('cli-cursor');
|
||||
const figures = require('figures');
|
||||
const indentString = require('indent-string');
|
||||
const ora = require('ora');
|
||||
const plur = require('plur');
|
||||
const trimOffNewlines = require('trim-off-newlines');
|
||||
const beautifyStack = require('./beautify-stack');
|
||||
|
||||
const chalk = require('../chalk').get();
|
||||
const codeExcerpt = require('../code-excerpt');
|
||||
const colors = require('./colors');
|
||||
const formatSerializedError = require('./format-serialized-error');
|
||||
const improperUsageMessages = require('./improper-usage-messages');
|
||||
const prefixTitle = require('./prefix-title');
|
||||
const whileCorked = require('./while-corked');
|
||||
|
||||
const nodeInternals = require('stack-utils').nodeInternals();
|
||||
|
||||
class LineWriter extends stream.Writable {
|
||||
constructor(dest, spinner) {
|
||||
super();
|
||||
|
||||
this.dest = dest;
|
||||
this.columns = dest.columns || 80;
|
||||
this.spinner = spinner;
|
||||
this.lastSpinnerText = '';
|
||||
}
|
||||
|
||||
_write(chunk, encoding, callback) {
|
||||
// Discard the current spinner output. Any lines that were meant to be
|
||||
// preserved should be rewritten.
|
||||
this.spinner.clear();
|
||||
|
||||
this._writeWithSpinner(chunk.toString('utf8'));
|
||||
callback();
|
||||
}
|
||||
|
||||
_writev(pieces, callback) {
|
||||
// Discard the current spinner output. Any lines that were meant to be
|
||||
// preserved should be rewritten.
|
||||
this.spinner.clear();
|
||||
|
||||
const last = pieces.pop();
|
||||
for (const piece of pieces) {
|
||||
this.dest.write(piece.chunk);
|
||||
}
|
||||
|
||||
this._writeWithSpinner(last.chunk.toString('utf8'));
|
||||
callback();
|
||||
}
|
||||
|
||||
_writeWithSpinner(string) {
|
||||
if (!this.spinner.id) {
|
||||
this.dest.write(string);
|
||||
return;
|
||||
}
|
||||
|
||||
this.lastSpinnerText = string;
|
||||
// Ignore whitespace at the end of the chunk. We're continiously rewriting
|
||||
// the last line through the spinner. Also be careful to remove the indent
|
||||
// as the spinner adds its own.
|
||||
this.spinner.text = string.trimEnd().slice(2);
|
||||
this.spinner.render();
|
||||
}
|
||||
|
||||
writeLine(string) {
|
||||
if (string) {
|
||||
this.write(indentString(string, 2) + os.EOL);
|
||||
} else {
|
||||
this.write(os.EOL);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class MiniReporter {
|
||||
constructor(options) {
|
||||
this.reportStream = options.reportStream;
|
||||
this.stdStream = options.stdStream;
|
||||
this.watching = options.watching;
|
||||
|
||||
this.spinner = ora({
|
||||
isEnabled: true,
|
||||
color: options.spinner ? options.spinner.color : 'gray',
|
||||
discardStdin: !options.watching,
|
||||
hideCursor: false,
|
||||
spinner: options.spinner || (process.platform === 'win32' ? 'line' : 'dots'),
|
||||
stream: options.reportStream
|
||||
});
|
||||
this.lineWriter = new LineWriter(this.reportStream, this.spinner);
|
||||
|
||||
this.consumeStateChange = whileCorked(this.reportStream, whileCorked(this.lineWriter, this.consumeStateChange));
|
||||
this.endRun = whileCorked(this.reportStream, whileCorked(this.lineWriter, this.endRun));
|
||||
this.relativeFile = file => path.relative(options.projectDir, file);
|
||||
|
||||
this.reset();
|
||||
}
|
||||
|
||||
reset() {
|
||||
if (this.removePreviousListener) {
|
||||
this.removePreviousListener();
|
||||
}
|
||||
|
||||
this.failFastEnabled = false;
|
||||
this.failures = [];
|
||||
this.filesWithMissingAvaImports = new Set();
|
||||
this.filesWithoutDeclaredTests = new Set();
|
||||
this.filesWithoutMatchedLineNumbers = new Set();
|
||||
this.internalErrors = [];
|
||||
this.knownFailures = [];
|
||||
this.lineNumberErrors = [];
|
||||
this.matching = false;
|
||||
this.prefixTitle = (testFile, title) => title;
|
||||
this.previousFailures = 0;
|
||||
this.removePreviousListener = null;
|
||||
this.stats = null;
|
||||
this.uncaughtExceptions = [];
|
||||
this.unhandledRejections = [];
|
||||
}
|
||||
|
||||
startRun(plan) {
|
||||
if (plan.bailWithoutReporting) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.reset();
|
||||
|
||||
this.failFastEnabled = plan.failFastEnabled;
|
||||
this.matching = plan.matching;
|
||||
this.previousFailures = plan.previousFailures;
|
||||
|
||||
if (this.watching || plan.files.length > 1) {
|
||||
this.prefixTitle = (testFile, title) => prefixTitle(plan.filePathPrefix, testFile, title);
|
||||
}
|
||||
|
||||
this.removePreviousListener = plan.status.on('stateChange', evt => this.consumeStateChange(evt));
|
||||
|
||||
if (this.watching && plan.runVector > 1) {
|
||||
this.reportStream.write(chalk.gray.dim('\u2500'.repeat(this.lineWriter.columns)) + os.EOL);
|
||||
}
|
||||
|
||||
cliCursor.hide(this.reportStream);
|
||||
this.lineWriter.writeLine();
|
||||
|
||||
this.spinner.start();
|
||||
}
|
||||
|
||||
consumeStateChange(evt) { // eslint-disable-line complexity
|
||||
const fileStats = this.stats && evt.testFile ? this.stats.byFile.get(evt.testFile) : null;
|
||||
|
||||
switch (evt.type) {
|
||||
case 'declared-test':
|
||||
// Ignore
|
||||
break;
|
||||
case 'hook-failed':
|
||||
this.failures.push(evt);
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'internal-error':
|
||||
this.internalErrors.push(evt);
|
||||
if (evt.testFile) {
|
||||
this.writeWithCounts(colors.error(`${figures.cross} Internal error when running ${this.relativeFile(evt.testFile)}`));
|
||||
} else {
|
||||
this.writeWithCounts(colors.error(`${figures.cross} Internal error`));
|
||||
}
|
||||
|
||||
break;
|
||||
case 'line-number-selection-error':
|
||||
this.lineNumberErrors.push(evt);
|
||||
this.writeWithCounts(colors.information(`${figures.warning} Could not parse ${this.relativeFile(evt.testFile)} for line number selection`));
|
||||
break;
|
||||
case 'missing-ava-import':
|
||||
this.filesWithMissingAvaImports.add(evt.testFile);
|
||||
this.writeWithCounts(colors.error(`${figures.cross} No tests found in ${this.relativeFile(evt.testFile)}, make sure to import "ava" at the top of your test file`));
|
||||
break;
|
||||
case 'selected-test':
|
||||
// Ignore
|
||||
break;
|
||||
case 'stats':
|
||||
this.stats = evt.stats;
|
||||
break;
|
||||
case 'test-failed':
|
||||
this.failures.push(evt);
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'test-passed':
|
||||
if (evt.knownFailing) {
|
||||
this.knownFailures.push(evt);
|
||||
}
|
||||
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'timeout':
|
||||
this.lineWriter.writeLine(colors.error(`\n${figures.cross} Timed out while running tests`));
|
||||
this.lineWriter.writeLine('');
|
||||
this.writePendingTests(evt);
|
||||
break;
|
||||
case 'interrupt':
|
||||
this.lineWriter.writeLine(colors.error(`\n${figures.cross} Exiting due to SIGINT`));
|
||||
this.lineWriter.writeLine('');
|
||||
this.writePendingTests(evt);
|
||||
break;
|
||||
case 'uncaught-exception':
|
||||
this.uncaughtExceptions.push(evt);
|
||||
break;
|
||||
case 'unhandled-rejection':
|
||||
this.unhandledRejections.push(evt);
|
||||
break;
|
||||
case 'worker-failed':
|
||||
if (fileStats.declaredTests === 0) {
|
||||
this.filesWithoutDeclaredTests.add(evt.testFile);
|
||||
}
|
||||
|
||||
break;
|
||||
case 'worker-finished':
|
||||
if (fileStats.declaredTests === 0) {
|
||||
this.filesWithoutDeclaredTests.add(evt.testFile);
|
||||
this.writeWithCounts(colors.error(`${figures.cross} No tests found in ${this.relativeFile(evt.testFile)}`));
|
||||
} else if (fileStats.selectingLines && fileStats.selectedTests === 0) {
|
||||
this.filesWithoutMatchedLineNumbers.add(evt.testFile);
|
||||
this.writeWithCounts(colors.error(`${figures.cross} Line numbers for ${this.relativeFile(evt.testFile)} did not match any tests`));
|
||||
}
|
||||
|
||||
break;
|
||||
case 'worker-stderr':
|
||||
case 'worker-stdout':
|
||||
// Forcibly clear the spinner, writing the chunk corrupts the TTY.
|
||||
this.spinner.clear();
|
||||
|
||||
this.stdStream.write(evt.chunk);
|
||||
// If the chunk does not end with a linebreak, *forcibly* write one to
|
||||
// ensure it remains visible in the TTY.
|
||||
// Tests cannot assume their standard output is not interrupted. Indeed
|
||||
// we multiplex stdout and stderr into a single stream. However as
|
||||
// long as stdStream is different from reportStream users can read
|
||||
// their original output by redirecting the streams.
|
||||
if (evt.chunk[evt.chunk.length - 1] !== 0x0A) {
|
||||
// Use write() rather than writeLine() so the (presumably corked)
|
||||
// line writer will actually write the empty line before re-rendering
|
||||
// the last spinner text below.
|
||||
this.lineWriter.write(os.EOL);
|
||||
}
|
||||
|
||||
this.lineWriter.write(this.lineWriter.lastSpinnerText);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
writeWithCounts(string) {
|
||||
if (!this.stats) {
|
||||
return this.lineWriter.writeLine(string);
|
||||
}
|
||||
|
||||
string = string || '';
|
||||
if (string !== '') {
|
||||
string += os.EOL;
|
||||
}
|
||||
|
||||
let firstLinePostfix = this.watching ?
|
||||
' ' + chalk.gray.dim('[' + new Date().toLocaleTimeString('en-US', {hour12: false}) + ']') :
|
||||
'';
|
||||
|
||||
if (this.stats.passedTests > 0) {
|
||||
string += os.EOL + colors.pass(`${this.stats.passedTests} passed`) + firstLinePostfix;
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
string += os.EOL + colors.error(`${this.stats.passedKnownFailingTests} ${plur('known failure', this.stats.passedKnownFailingTests)}`);
|
||||
}
|
||||
|
||||
if (this.stats.failedHooks > 0) {
|
||||
string += os.EOL + colors.error(`${this.stats.failedHooks} ${plur('hook', this.stats.failedHooks)} failed`) + firstLinePostfix;
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedTests > 0) {
|
||||
string += os.EOL + colors.error(`${this.stats.failedTests} ${plur('test', this.stats.failedTests)} failed`) + firstLinePostfix;
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.skippedTests > 0) {
|
||||
string += os.EOL + colors.skip(`${this.stats.skippedTests} skipped`);
|
||||
}
|
||||
|
||||
if (this.stats.todoTests > 0) {
|
||||
string += os.EOL + colors.todo(`${this.stats.todoTests} todo`);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(string);
|
||||
}
|
||||
|
||||
writeErr(evt) {
|
||||
if (evt.err.name === 'TSError' && evt.err.object && evt.err.object.diagnosticText) {
|
||||
this.lineWriter.writeLine(colors.errorStack(trimOffNewlines(evt.err.object.diagnosticText)));
|
||||
return;
|
||||
}
|
||||
|
||||
if (evt.err.source) {
|
||||
this.lineWriter.writeLine(colors.errorSource(`${this.relativeFile(evt.err.source.file)}:${evt.err.source.line}`));
|
||||
const excerpt = codeExcerpt(evt.err.source, {maxWidth: this.lineWriter.columns - 2});
|
||||
if (excerpt) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(excerpt);
|
||||
}
|
||||
}
|
||||
|
||||
if (evt.err.avaAssertionError) {
|
||||
const result = formatSerializedError(evt.err);
|
||||
if (result.printMessage) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(evt.err.message);
|
||||
}
|
||||
|
||||
if (result.formatted) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(result.formatted);
|
||||
}
|
||||
|
||||
const message = improperUsageMessages.forError(evt.err);
|
||||
if (message) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(message);
|
||||
}
|
||||
} else if (evt.err.nonErrorObject) {
|
||||
this.lineWriter.writeLine(trimOffNewlines(evt.err.formatted));
|
||||
} else {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(evt.err.summary);
|
||||
}
|
||||
|
||||
const formatted = this.formatErrorStack(evt.err);
|
||||
if (formatted.length > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(formatted.join('\n'));
|
||||
}
|
||||
}
|
||||
|
||||
formatErrorStack(error) {
|
||||
if (!error.stack) {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (error.shouldBeautifyStack) {
|
||||
return beautifyStack(error.stack).map(line => {
|
||||
if (nodeInternals.some(internal => internal.test(line))) {
|
||||
return colors.errorStackInternal(`${figures.pointerSmall} ${line}`);
|
||||
}
|
||||
|
||||
return colors.errorStack(`${figures.pointerSmall} ${line}`);
|
||||
});
|
||||
}
|
||||
|
||||
return [error.stack];
|
||||
}
|
||||
|
||||
writeLogs(evt) {
|
||||
if (evt.logs) {
|
||||
for (const log of evt.logs) {
|
||||
const logLines = indentString(colors.log(log), 4);
|
||||
const logLinesWithLeadingFigure = logLines.replace(
|
||||
/^ {4}/,
|
||||
` ${colors.information(figures.info)} `
|
||||
);
|
||||
this.lineWriter.writeLine(logLinesWithLeadingFigure);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writeTestSummary(evt) {
|
||||
if (evt.type === 'hook-failed' || evt.type === 'test-failed') {
|
||||
this.writeWithCounts(`${this.prefixTitle(evt.testFile, evt.title)}`);
|
||||
} else if (evt.knownFailing) {
|
||||
this.writeWithCounts(`${colors.error(this.prefixTitle(evt.testFile, evt.title))}`);
|
||||
} else {
|
||||
this.writeWithCounts(`${this.prefixTitle(evt.testFile, evt.title)}`);
|
||||
}
|
||||
}
|
||||
|
||||
writeFailure(evt) {
|
||||
this.lineWriter.writeLine(`${colors.title(this.prefixTitle(evt.testFile, evt.title))}`);
|
||||
this.writeLogs(evt);
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
}
|
||||
|
||||
writePendingTests(evt) {
|
||||
for (const [file, testsInFile] of evt.pendingTests) {
|
||||
if (testsInFile.size === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(`${testsInFile.size} tests were pending in ${this.relativeFile(file)}\n`);
|
||||
for (const title of testsInFile) {
|
||||
this.lineWriter.writeLine(`${figures.circleDotted} ${this.prefixTitle(file, title)}`);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine('');
|
||||
}
|
||||
}
|
||||
|
||||
endRun() { // eslint-disable-line complexity
|
||||
this.spinner.stop();
|
||||
cliCursor.show(this.reportStream);
|
||||
|
||||
if (!this.stats) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldn’t find any files to test`));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.matching && this.stats.selectedTests === 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldn’t find any matching tests`));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine();
|
||||
|
||||
let firstLinePostfix = this.watching ?
|
||||
' ' + chalk.gray.dim('[' + new Date().toLocaleTimeString('en-US', {hour12: false}) + ']') :
|
||||
'';
|
||||
|
||||
if (this.filesWithMissingAvaImports.size > 0) {
|
||||
for (const testFile of this.filesWithMissingAvaImports) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(testFile)}, make sure to import "ava" at the top of your test file`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
}
|
||||
|
||||
if (this.filesWithoutDeclaredTests.size > 0) {
|
||||
for (const testFile of this.filesWithoutDeclaredTests) {
|
||||
if (!this.filesWithMissingAvaImports.has(testFile)) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(testFile)}`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.lineNumberErrors.length > 0) {
|
||||
for (const evt of this.lineNumberErrors) {
|
||||
this.lineWriter.writeLine(colors.information(`${figures.warning} Could not parse ${this.relativeFile(evt.testFile)} for line number selection`));
|
||||
}
|
||||
}
|
||||
|
||||
if (this.filesWithoutMatchedLineNumbers.size > 0) {
|
||||
for (const testFile of this.filesWithoutMatchedLineNumbers) {
|
||||
if (!this.filesWithMissingAvaImports.has(testFile) && !this.filesWithoutDeclaredTests.has(testFile)) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Line numbers for ${this.relativeFile(testFile)} did not match any tests`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.filesWithMissingAvaImports.size > 0 || this.filesWithoutDeclaredTests.size > 0 || this.filesWithoutMatchedLineNumbers.size > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
if (this.stats.failedHooks > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.failedHooks} ${plur('hook', this.stats.failedHooks)} failed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.failedTests} ${plur('test', this.stats.failedTests)} failed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedHooks === 0 && this.stats.failedTests === 0 && this.stats.passedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.pass(`${this.stats.passedTests} ${plur('test', this.stats.passedTests)} passed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.passedKnownFailingTests} ${plur('known failure', this.stats.passedKnownFailingTests)}`));
|
||||
}
|
||||
|
||||
if (this.stats.skippedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.skip(`${this.stats.skippedTests} ${plur('test', this.stats.skippedTests)} skipped`));
|
||||
}
|
||||
|
||||
if (this.stats.todoTests > 0) {
|
||||
this.lineWriter.writeLine(colors.todo(`${this.stats.todoTests} ${plur('test', this.stats.todoTests)} todo`));
|
||||
}
|
||||
|
||||
if (this.stats.unhandledRejections > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.unhandledRejections} unhandled ${plur('rejection', this.stats.unhandledRejections)}`));
|
||||
}
|
||||
|
||||
if (this.stats.uncaughtExceptions > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.uncaughtExceptions} uncaught ${plur('exception', this.stats.uncaughtExceptions)}`));
|
||||
}
|
||||
|
||||
if (this.previousFailures > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.previousFailures} previous ${plur('failure', this.previousFailures)} in test files that were not rerun`));
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
for (const evt of this.knownFailures) {
|
||||
this.lineWriter.writeLine(colors.error(this.prefixTitle(evt.testFile, evt.title)));
|
||||
}
|
||||
}
|
||||
|
||||
const shouldWriteFailFastDisclaimer = this.failFastEnabled && (this.stats.remainingTests > 0 || this.stats.files > this.stats.finishedWorkers);
|
||||
|
||||
if (this.failures.length > 0) {
|
||||
const writeTrailingLines = shouldWriteFailFastDisclaimer || this.internalErrors.length > 0 || this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
|
||||
this.lineWriter.writeLine();
|
||||
|
||||
const last = this.failures[this.failures.length - 1];
|
||||
for (const evt of this.failures) {
|
||||
this.writeFailure(evt);
|
||||
if (evt !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.internalErrors.length > 0) {
|
||||
const writeLeadingLine = this.failures.length === 0;
|
||||
const writeTrailingLines = shouldWriteFailFastDisclaimer || this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
|
||||
|
||||
if (writeLeadingLine) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
const last = this.internalErrors[this.internalErrors.length - 1];
|
||||
for (const evt of this.internalErrors) {
|
||||
if (evt.testFile) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error when running ${this.relativeFile(evt.testFile)}`));
|
||||
} else {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error`));
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(colors.stack(evt.err.summary));
|
||||
this.lineWriter.writeLine(colors.errorStack(evt.err.stack));
|
||||
if (evt !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.uncaughtExceptions.length > 0) {
|
||||
const writeLeadingLine = this.failures.length === 0 && this.internalErrors.length === 0;
|
||||
const writeTrailingLines = shouldWriteFailFastDisclaimer || this.unhandledRejections.length > 0;
|
||||
|
||||
if (writeLeadingLine) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
const last = this.uncaughtExceptions[this.uncaughtExceptions.length - 1];
|
||||
for (const evt of this.uncaughtExceptions) {
|
||||
this.lineWriter.writeLine(colors.title(`Uncaught exception in ${this.relativeFile(evt.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
if (evt !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.unhandledRejections.length > 0) {
|
||||
const writeLeadingLine = this.failures.length === 0 && this.internalErrors.length === 0 && this.uncaughtExceptions.length === 0;
|
||||
const writeTrailingLines = shouldWriteFailFastDisclaimer;
|
||||
|
||||
if (writeLeadingLine) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
const last = this.unhandledRejections[this.unhandledRejections.length - 1];
|
||||
for (const evt of this.unhandledRejections) {
|
||||
this.lineWriter.writeLine(colors.title(`Unhandled rejection in ${this.relativeFile(evt.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
if (evt !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldWriteFailFastDisclaimer) {
|
||||
let remaining = '';
|
||||
if (this.stats.remainingTests > 0) {
|
||||
remaining += `At least ${this.stats.remainingTests} ${plur('test was', 'tests were', this.stats.remainingTests)} skipped`;
|
||||
if (this.stats.files > this.stats.finishedWorkers) {
|
||||
remaining += ', as well as ';
|
||||
}
|
||||
}
|
||||
|
||||
if (this.stats.files > this.stats.finishedWorkers) {
|
||||
const skippedFileCount = this.stats.files - this.stats.finishedWorkers;
|
||||
remaining += `${skippedFileCount} ${plur('test file', 'test files', skippedFileCount)}`;
|
||||
if (this.stats.remainingTests === 0) {
|
||||
remaining += ` ${plur('was', 'were', skippedFileCount)} skipped`;
|
||||
}
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(colors.information(`\`--fail-fast\` is on. ${remaining}.`));
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
module.exports = MiniReporter;
|
||||
19
node_modules/ava/lib/reporters/tap.js
generated
vendored
19
node_modules/ava/lib/reporters/tap.js
generated
vendored
|
|
@ -30,7 +30,7 @@ function dumpError(error) {
|
|||
}
|
||||
|
||||
if (error.values.length > 0) {
|
||||
object.values = error.values.reduce((acc, value) => {
|
||||
object.values = error.values.reduce((acc, value) => { // eslint-disable-line unicorn/no-reduce
|
||||
acc[value.label] = stripAnsi(value.formatted);
|
||||
return acc;
|
||||
}, {});
|
||||
|
|
@ -125,12 +125,22 @@ class TapReporter {
|
|||
this.reportStream.write(`# ${stripAnsi(title)}${os.EOL}`);
|
||||
if (evt.logs) {
|
||||
for (const log of evt.logs) {
|
||||
const logLines = indentString(log, 4).replace(/^ {4}/, ' # ');
|
||||
const logLines = indentString(log, 4).replace(/^ {4}/gm, '# ');
|
||||
this.reportStream.write(`${logLines}${os.EOL}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writeTimeout(evt) {
|
||||
const err = new Error(`Exited because no new tests completed within the last ${evt.period}ms of inactivity`);
|
||||
|
||||
for (const [testFile, tests] of evt.pendingTests) {
|
||||
for (const title of tests) {
|
||||
this.writeTest({testFile, title, err}, {passed: false, todo: false, skip: false});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
consumeStateChange(evt) { // eslint-disable-line complexity
|
||||
const fileStats = this.stats && evt.testFile ? this.stats.byFile.get(evt.testFile) : null;
|
||||
|
||||
|
|
@ -158,6 +168,9 @@ class TapReporter {
|
|||
this.writeTest(evt, {passed: false, todo: true, skip: false});
|
||||
}
|
||||
|
||||
break;
|
||||
case 'snapshot-error':
|
||||
this.writeComment(evt, {title: 'Could not update snapshots'});
|
||||
break;
|
||||
case 'stats':
|
||||
this.stats = evt.stats;
|
||||
|
|
@ -169,7 +182,7 @@ class TapReporter {
|
|||
this.writeTest(evt, {passed: true, todo: false, skip: false});
|
||||
break;
|
||||
case 'timeout':
|
||||
this.writeCrash(evt, `Exited because no new tests completed within the last ${evt.period}ms of inactivity`);
|
||||
this.writeTimeout(evt);
|
||||
break;
|
||||
case 'uncaught-exception':
|
||||
this.writeCrash(evt);
|
||||
|
|
|
|||
463
node_modules/ava/lib/reporters/verbose.js
generated
vendored
463
node_modules/ava/lib/reporters/verbose.js
generated
vendored
|
|
@ -1,463 +0,0 @@
|
|||
'use strict';
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const stream = require('stream');
|
||||
|
||||
const figures = require('figures');
|
||||
const indentString = require('indent-string');
|
||||
const plur = require('plur');
|
||||
const prettyMs = require('pretty-ms');
|
||||
const trimOffNewlines = require('trim-off-newlines');
|
||||
const beautifyStack = require('./beautify-stack');
|
||||
|
||||
const chalk = require('../chalk').get();
|
||||
const codeExcerpt = require('../code-excerpt');
|
||||
const colors = require('./colors');
|
||||
const formatSerializedError = require('./format-serialized-error');
|
||||
const improperUsageMessages = require('./improper-usage-messages');
|
||||
const prefixTitle = require('./prefix-title');
|
||||
const whileCorked = require('./while-corked');
|
||||
|
||||
const nodeInternals = require('stack-utils').nodeInternals();
|
||||
|
||||
class LineWriter extends stream.Writable {
|
||||
constructor(dest) {
|
||||
super();
|
||||
|
||||
this.dest = dest;
|
||||
this.columns = dest.columns || 80;
|
||||
this.lastLineIsEmpty = false;
|
||||
}
|
||||
|
||||
_write(chunk, encoding, callback) {
|
||||
this.dest.write(chunk);
|
||||
callback();
|
||||
}
|
||||
|
||||
writeLine(string) {
|
||||
if (string) {
|
||||
this.write(indentString(string, 2) + os.EOL);
|
||||
this.lastLineIsEmpty = false;
|
||||
} else {
|
||||
this.write(os.EOL);
|
||||
this.lastLineIsEmpty = true;
|
||||
}
|
||||
}
|
||||
|
||||
ensureEmptyLine() {
|
||||
if (!this.lastLineIsEmpty) {
|
||||
this.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class VerboseReporter {
|
||||
constructor(options) {
|
||||
this.durationThreshold = options.durationThreshold || 100;
|
||||
this.reportStream = options.reportStream;
|
||||
this.stdStream = options.stdStream;
|
||||
this.watching = options.watching;
|
||||
|
||||
this.lineWriter = new LineWriter(this.reportStream);
|
||||
this.consumeStateChange = whileCorked(this.reportStream, this.consumeStateChange);
|
||||
this.endRun = whileCorked(this.reportStream, this.endRun);
|
||||
this.relativeFile = file => path.relative(options.projectDir, file);
|
||||
|
||||
this.reset();
|
||||
}
|
||||
|
||||
reset() {
|
||||
if (this.removePreviousListener) {
|
||||
this.removePreviousListener();
|
||||
}
|
||||
|
||||
this.failFastEnabled = false;
|
||||
this.failures = [];
|
||||
this.filesWithMissingAvaImports = new Set();
|
||||
this.knownFailures = [];
|
||||
this.runningTestFiles = new Map();
|
||||
this.lastLineIsEmpty = false;
|
||||
this.matching = false;
|
||||
this.prefixTitle = (testFile, title) => title;
|
||||
this.previousFailures = 0;
|
||||
this.removePreviousListener = null;
|
||||
this.stats = null;
|
||||
}
|
||||
|
||||
startRun(plan) {
|
||||
if (plan.bailWithoutReporting) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.reset();
|
||||
|
||||
this.failFastEnabled = plan.failFastEnabled;
|
||||
this.matching = plan.matching;
|
||||
this.previousFailures = plan.previousFailures;
|
||||
this.emptyParallelRun = plan.status.emptyParallelRun;
|
||||
|
||||
if (this.watching || plan.files.length > 1) {
|
||||
this.prefixTitle = (testFile, title) => prefixTitle(plan.filePathPrefix, testFile, title);
|
||||
}
|
||||
|
||||
this.removePreviousListener = plan.status.on('stateChange', evt => this.consumeStateChange(evt));
|
||||
|
||||
if (this.watching && plan.runVector > 1) {
|
||||
this.lineWriter.write(chalk.gray.dim('\u2500'.repeat(this.reportStream.columns || 80)) + os.EOL);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
consumeStateChange(evt) { // eslint-disable-line complexity
|
||||
const fileStats = this.stats && evt.testFile ? this.stats.byFile.get(evt.testFile) : null;
|
||||
|
||||
switch (evt.type) {
|
||||
case 'hook-failed':
|
||||
this.failures.push(evt);
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'internal-error':
|
||||
if (evt.testFile) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error when running ${this.relativeFile(evt.testFile)}`));
|
||||
} else {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error`));
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(colors.stack(evt.err.summary));
|
||||
this.lineWriter.writeLine(colors.errorStack(evt.err.stack));
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
break;
|
||||
case 'line-number-selection-error':
|
||||
this.lineWriter.writeLine(colors.information(`${figures.warning} Could not parse ${this.relativeFile(evt.testFile)} for line number selection`));
|
||||
break;
|
||||
case 'missing-ava-import':
|
||||
this.filesWithMissingAvaImports.add(evt.testFile);
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(evt.testFile)}, make sure to import "ava" at the top of your test file`));
|
||||
break;
|
||||
case 'hook-finished':
|
||||
if (evt.logs.length > 0) {
|
||||
this.lineWriter.writeLine(` ${this.prefixTitle(evt.testFile, evt.title)}`);
|
||||
this.writeLogs(evt);
|
||||
}
|
||||
|
||||
break;
|
||||
case 'selected-test':
|
||||
if (evt.skip) {
|
||||
this.lineWriter.writeLine(colors.skip(`- ${this.prefixTitle(evt.testFile, evt.title)}`));
|
||||
} else if (evt.todo) {
|
||||
this.lineWriter.writeLine(colors.todo(`- ${this.prefixTitle(evt.testFile, evt.title)}`));
|
||||
}
|
||||
|
||||
break;
|
||||
case 'stats':
|
||||
this.stats = evt.stats;
|
||||
break;
|
||||
case 'test-failed':
|
||||
this.failures.push(evt);
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'test-passed':
|
||||
if (evt.knownFailing) {
|
||||
this.knownFailures.push(evt);
|
||||
}
|
||||
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'timeout':
|
||||
this.lineWriter.writeLine(colors.error(`\n${figures.cross} Timed out while running tests`));
|
||||
this.lineWriter.writeLine('');
|
||||
this.writePendingTests(evt);
|
||||
break;
|
||||
case 'interrupt':
|
||||
this.lineWriter.writeLine(colors.error(`\n${figures.cross} Exiting due to SIGINT`));
|
||||
this.lineWriter.writeLine('');
|
||||
this.writePendingTests(evt);
|
||||
break;
|
||||
case 'uncaught-exception':
|
||||
this.lineWriter.ensureEmptyLine();
|
||||
this.lineWriter.writeLine(colors.title(`Uncaught exception in ${this.relativeFile(evt.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
this.lineWriter.writeLine();
|
||||
break;
|
||||
case 'unhandled-rejection':
|
||||
this.lineWriter.ensureEmptyLine();
|
||||
this.lineWriter.writeLine(colors.title(`Unhandled rejection in ${this.relativeFile(evt.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
this.lineWriter.writeLine();
|
||||
break;
|
||||
case 'worker-failed':
|
||||
if (!this.filesWithMissingAvaImports.has(evt.testFile)) {
|
||||
if (evt.nonZeroExitCode) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} ${this.relativeFile(evt.testFile)} exited with a non-zero exit code: ${evt.nonZeroExitCode}`));
|
||||
} else {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} ${this.relativeFile(evt.testFile)} exited due to ${evt.signal}`));
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
case 'worker-finished':
|
||||
if (!evt.forcedExit && !this.filesWithMissingAvaImports.has(evt.testFile)) {
|
||||
if (fileStats.declaredTests === 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(evt.testFile)}`));
|
||||
} else if (fileStats.selectingLines && fileStats.selectedTests === 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Line numbers for ${this.relativeFile(evt.testFile)} did not match any tests`));
|
||||
} else if (!this.failFastEnabled && fileStats.remainingTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} ${fileStats.remainingTests} ${plur('test', fileStats.remainingTests)} remaining in ${this.relativeFile(evt.testFile)}`));
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
case 'worker-stderr':
|
||||
case 'worker-stdout':
|
||||
this.stdStream.write(evt.chunk);
|
||||
// If the chunk does not end with a linebreak, *forcibly* write one to
|
||||
// ensure it remains visible in the TTY.
|
||||
// Tests cannot assume their standard output is not interrupted. Indeed
|
||||
// we multiplex stdout and stderr into a single stream. However as
|
||||
// long as stdStream is different from reportStream users can read
|
||||
// their original output by redirecting the streams.
|
||||
if (evt.chunk[evt.chunk.length - 1] !== 0x0A) {
|
||||
this.reportStream.write(os.EOL);
|
||||
}
|
||||
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
writeErr(evt) {
|
||||
if (evt.err.name === 'TSError' && evt.err.object && evt.err.object.diagnosticText) {
|
||||
this.lineWriter.writeLine(colors.errorStack(trimOffNewlines(evt.err.object.diagnosticText)));
|
||||
return;
|
||||
}
|
||||
|
||||
if (evt.err.source) {
|
||||
this.lineWriter.writeLine(colors.errorSource(`${this.relativeFile(evt.err.source.file)}:${evt.err.source.line}`));
|
||||
const excerpt = codeExcerpt(evt.err.source, {maxWidth: this.reportStream.columns - 2});
|
||||
if (excerpt) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(excerpt);
|
||||
}
|
||||
}
|
||||
|
||||
if (evt.err.avaAssertionError) {
|
||||
const result = formatSerializedError(evt.err);
|
||||
if (result.printMessage) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(evt.err.message);
|
||||
}
|
||||
|
||||
if (result.formatted) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(result.formatted);
|
||||
}
|
||||
|
||||
const message = improperUsageMessages.forError(evt.err);
|
||||
if (message) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(message);
|
||||
}
|
||||
} else if (evt.err.nonErrorObject) {
|
||||
this.lineWriter.writeLine(trimOffNewlines(evt.err.formatted));
|
||||
} else {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(evt.err.summary);
|
||||
}
|
||||
|
||||
const formatted = this.formatErrorStack(evt.err);
|
||||
if (formatted.length > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(formatted.join('\n'));
|
||||
}
|
||||
}
|
||||
|
||||
formatErrorStack(error) {
|
||||
if (!error.stack) {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (error.shouldBeautifyStack) {
|
||||
return beautifyStack(error.stack).map(line => {
|
||||
if (nodeInternals.some(internal => internal.test(line))) {
|
||||
return colors.errorStackInternal(`${figures.pointerSmall} ${line}`);
|
||||
}
|
||||
|
||||
return colors.errorStack(`${figures.pointerSmall} ${line}`);
|
||||
});
|
||||
}
|
||||
|
||||
return [error.stack];
|
||||
}
|
||||
|
||||
writePendingTests(evt) {
|
||||
for (const [file, testsInFile] of evt.pendingTests) {
|
||||
if (testsInFile.size === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(`${testsInFile.size} tests were pending in ${this.relativeFile(file)}\n`);
|
||||
for (const title of testsInFile) {
|
||||
this.lineWriter.writeLine(`${figures.circleDotted} ${this.prefixTitle(file, title)}`);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine('');
|
||||
}
|
||||
}
|
||||
|
||||
writeLogs(evt) {
|
||||
if (evt.logs) {
|
||||
for (const log of evt.logs) {
|
||||
const logLines = indentString(colors.log(log), 4);
|
||||
const logLinesWithLeadingFigure = logLines.replace(
|
||||
/^ {4}/,
|
||||
` ${colors.information(figures.info)} `
|
||||
);
|
||||
this.lineWriter.writeLine(logLinesWithLeadingFigure);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writeTestSummary(evt) {
|
||||
if (evt.type === 'hook-failed' || evt.type === 'test-failed') {
|
||||
this.lineWriter.writeLine(`${colors.error(figures.cross)} ${this.prefixTitle(evt.testFile, evt.title)} ${colors.error(evt.err.message)}`);
|
||||
} else if (evt.knownFailing) {
|
||||
this.lineWriter.writeLine(`${colors.error(figures.tick)} ${colors.error(this.prefixTitle(evt.testFile, evt.title))}`);
|
||||
} else {
|
||||
const duration = evt.duration > this.durationThreshold ? colors.duration(' (' + prettyMs(evt.duration) + ')') : '';
|
||||
|
||||
this.lineWriter.writeLine(`${colors.pass(figures.tick)} ${this.prefixTitle(evt.testFile, evt.title)}${duration}`);
|
||||
}
|
||||
|
||||
this.writeLogs(evt);
|
||||
}
|
||||
|
||||
writeFailure(evt) {
|
||||
this.lineWriter.writeLine(`${colors.title(this.prefixTitle(evt.testFile, evt.title))}`);
|
||||
this.writeLogs(evt);
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
}
|
||||
|
||||
endRun() { // eslint-disable-line complexity
|
||||
if (this.emptyParallelRun) {
|
||||
this.lineWriter.writeLine('No files tested in this parallel run');
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.stats) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldn’t find any files to test`));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.matching && this.stats.selectedTests === 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldn’t find any matching tests`));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine();
|
||||
|
||||
if (this.stats.parallelRuns) {
|
||||
const {currentFileCount, currentIndex, totalRuns} = this.stats.parallelRuns;
|
||||
this.lineWriter.writeLine(colors.information(`Ran ${currentFileCount} test ${plur('file', currentFileCount)} out of ${this.stats.files} for job ${currentIndex + 1} of ${totalRuns}`));
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
let firstLinePostfix = this.watching ?
|
||||
' ' + chalk.gray.dim('[' + new Date().toLocaleTimeString('en-US', {hour12: false}) + ']') :
|
||||
'';
|
||||
|
||||
if (this.stats.failedHooks > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.failedHooks} ${plur('hook', this.stats.failedHooks)} failed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.failedTests} ${plur('test', this.stats.failedTests)} failed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedHooks === 0 && this.stats.failedTests === 0 && this.stats.passedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.pass(`${this.stats.passedTests} ${plur('test', this.stats.passedTests)} passed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.passedKnownFailingTests} ${plur('known failure', this.stats.passedKnownFailingTests)}`));
|
||||
}
|
||||
|
||||
if (this.stats.skippedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.skip(`${this.stats.skippedTests} ${plur('test', this.stats.skippedTests)} skipped`));
|
||||
}
|
||||
|
||||
if (this.stats.todoTests > 0) {
|
||||
this.lineWriter.writeLine(colors.todo(`${this.stats.todoTests} ${plur('test', this.stats.todoTests)} todo`));
|
||||
}
|
||||
|
||||
if (this.stats.unhandledRejections > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.unhandledRejections} unhandled ${plur('rejection', this.stats.unhandledRejections)}`));
|
||||
}
|
||||
|
||||
if (this.stats.uncaughtExceptions > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.uncaughtExceptions} uncaught ${plur('exception', this.stats.uncaughtExceptions)}`));
|
||||
}
|
||||
|
||||
if (this.previousFailures > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.previousFailures} previous ${plur('failure', this.previousFailures)} in test files that were not rerun`));
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
for (const evt of this.knownFailures) {
|
||||
this.lineWriter.writeLine(colors.error(this.prefixTitle(evt.testFile, evt.title)));
|
||||
}
|
||||
}
|
||||
|
||||
const shouldWriteFailFastDisclaimer = this.failFastEnabled && (this.stats.remainingTests > 0 || this.stats.files > this.stats.finishedWorkers);
|
||||
|
||||
if (this.failures.length > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
|
||||
const lastFailure = this.failures[this.failures.length - 1];
|
||||
for (const evt of this.failures) {
|
||||
this.writeFailure(evt);
|
||||
if (evt !== lastFailure || shouldWriteFailFastDisclaimer) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldWriteFailFastDisclaimer) {
|
||||
let remaining = '';
|
||||
if (this.stats.remainingTests > 0) {
|
||||
remaining += `At least ${this.stats.remainingTests} ${plur('test was', 'tests were', this.stats.remainingTests)} skipped`;
|
||||
if (this.stats.files > this.stats.finishedWorkers) {
|
||||
remaining += ', as well as ';
|
||||
}
|
||||
}
|
||||
|
||||
if (this.stats.files > this.stats.finishedWorkers) {
|
||||
const skippedFileCount = this.stats.files - this.stats.finishedWorkers;
|
||||
remaining += `${skippedFileCount} ${plur('test file', 'test files', skippedFileCount)}`;
|
||||
if (this.stats.remainingTests === 0) {
|
||||
remaining += ` ${plur('was', 'were', skippedFileCount)} skipped`;
|
||||
}
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(colors.information(`\`--fail-fast\` is on. ${remaining}.`));
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = VerboseReporter;
|
||||
13
node_modules/ava/lib/reporters/while-corked.js
generated
vendored
13
node_modules/ava/lib/reporters/while-corked.js
generated
vendored
|
|
@ -1,13 +0,0 @@
|
|||
'use strict';
|
||||
function whileCorked(stream, fn) {
|
||||
return function (...args) {
|
||||
stream.cork();
|
||||
try {
|
||||
fn.apply(this, args);
|
||||
} finally {
|
||||
stream.uncork();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = whileCorked;
|
||||
5
node_modules/ava/lib/run-status.js
generated
vendored
5
node_modules/ava/lib/run-status.js
generated
vendored
|
|
@ -27,6 +27,7 @@ class RunStatus extends Emittery {
|
|||
passedKnownFailingTests: 0,
|
||||
passedTests: 0,
|
||||
selectedTests: 0,
|
||||
sharedWorkerErrors: 0,
|
||||
skippedTests: 0,
|
||||
timeouts: 0,
|
||||
todoTests: 0,
|
||||
|
|
@ -93,6 +94,9 @@ class RunStatus extends Emittery {
|
|||
this.addPendingTest(event);
|
||||
}
|
||||
|
||||
break;
|
||||
case 'shared-worker-error':
|
||||
stats.sharedWorkerErrors++;
|
||||
break;
|
||||
case 'test-failed':
|
||||
stats.failedTests++;
|
||||
|
|
@ -164,6 +168,7 @@ class RunStatus extends Emittery {
|
|||
this.stats.failedHooks > 0 ||
|
||||
this.stats.failedTests > 0 ||
|
||||
this.stats.failedWorkers > 0 ||
|
||||
this.stats.sharedWorkerErrors > 0 ||
|
||||
this.stats.timeouts > 0 ||
|
||||
this.stats.uncaughtExceptions > 0 ||
|
||||
this.stats.unhandledRejections > 0
|
||||
|
|
|
|||
79
node_modules/ava/lib/runner.js
generated
vendored
79
node_modules/ava/lib/runner.js
generated
vendored
|
|
@ -23,13 +23,17 @@ class Runner extends Emittery {
|
|||
this.recordNewSnapshots = options.recordNewSnapshots === true;
|
||||
this.runOnlyExclusive = options.runOnlyExclusive === true;
|
||||
this.serial = options.serial === true;
|
||||
this.skippingTests = false;
|
||||
this.snapshotDir = options.snapshotDir;
|
||||
this.updateSnapshots = options.updateSnapshots;
|
||||
|
||||
this.activeRunnables = new Set();
|
||||
this.boundCompareTestSnapshot = this.compareTestSnapshot.bind(this);
|
||||
this.skippedSnapshots = false;
|
||||
this.boundSkipSnapshot = this.skipSnapshot.bind(this);
|
||||
this.interrupted = false;
|
||||
this.snapshots = null;
|
||||
this.nextTaskIndex = 0;
|
||||
this.tasks = {
|
||||
after: [],
|
||||
afterAlways: [],
|
||||
|
|
@ -41,6 +45,7 @@ class Runner extends Emittery {
|
|||
serial: [],
|
||||
todo: []
|
||||
};
|
||||
this.waitForReady = [];
|
||||
|
||||
const uniqueTestTitles = new Set();
|
||||
this.registerUniqueTitle = title => {
|
||||
|
|
@ -74,6 +79,8 @@ class Runner extends Emittery {
|
|||
});
|
||||
}
|
||||
|
||||
metadata.taskIndex = this.nextTaskIndex++;
|
||||
|
||||
const {args, buildTitle, implementations, rawTitle} = parseTestArgs(testArgs);
|
||||
|
||||
if (this.checkSelectedByLineNumbers) {
|
||||
|
|
@ -147,6 +154,10 @@ class Runner extends Emittery {
|
|||
task.metadata.exclusive = matcher([title], this.match).length === 1;
|
||||
}
|
||||
|
||||
if (task.metadata.skipped) {
|
||||
this.skippingTests = true;
|
||||
}
|
||||
|
||||
if (task.metadata.exclusive) {
|
||||
this.runOnlyExclusive = true;
|
||||
}
|
||||
|
|
@ -182,7 +193,7 @@ class Runner extends Emittery {
|
|||
fixedLocation: this.snapshotDir,
|
||||
projectDir: this.projectDir,
|
||||
recordNewSnapshots: this.recordNewSnapshots,
|
||||
updating: this.updateSnapshots
|
||||
updating: this.updateSnapshots && !this.runOnlyExclusive && !this.skippingTests
|
||||
});
|
||||
this.emit('dependency', this.snapshots.snapPath);
|
||||
}
|
||||
|
|
@ -190,18 +201,35 @@ class Runner extends Emittery {
|
|||
return this.snapshots.compare(options);
|
||||
}
|
||||
|
||||
skipSnapshot() {
|
||||
this.skippedSnapshots = true;
|
||||
}
|
||||
|
||||
saveSnapshotState() {
|
||||
if (
|
||||
this.updateSnapshots &&
|
||||
(
|
||||
this.runOnlyExclusive ||
|
||||
this.skippingTests ||
|
||||
this.skippedSnapshots
|
||||
)
|
||||
) {
|
||||
return {cannotSave: true};
|
||||
}
|
||||
|
||||
if (this.snapshots) {
|
||||
return this.snapshots.save();
|
||||
return {touchedFiles: this.snapshots.save()};
|
||||
}
|
||||
|
||||
if (this.updateSnapshots) {
|
||||
// TODO: There may be unused snapshot files if no test caused the
|
||||
// snapshots to be loaded. Prune them. But not if tests (including hooks!)
|
||||
// were skipped. Perhaps emit a warning if this occurs?
|
||||
return {touchedFiles: snapshotManager.cleanSnapshots({
|
||||
file: this.file,
|
||||
fixedLocation: this.snapshotDir,
|
||||
projectDir: this.projectDir
|
||||
})};
|
||||
}
|
||||
|
||||
return null;
|
||||
return {};
|
||||
}
|
||||
|
||||
onRun(runnable) {
|
||||
|
|
@ -241,7 +269,7 @@ class Runner extends Emittery {
|
|||
};
|
||||
|
||||
let waitForSerial = Promise.resolve();
|
||||
await runnables.reduce((previous, runnable) => {
|
||||
await runnables.reduce((previous, runnable) => { // eslint-disable-line unicorn/no-reduce
|
||||
if (runnable.metadata.serial || this.serial) {
|
||||
waitForSerial = previous.then(() => {
|
||||
// Serial runnables run as long as there was no previous failure, unless
|
||||
|
|
@ -275,7 +303,7 @@ class Runner extends Emittery {
|
|||
return result;
|
||||
}
|
||||
|
||||
async runHooks(tasks, contextRef, titleSuffix, testPassed) {
|
||||
async runHooks(tasks, contextRef, {titleSuffix, testPassed, associatedTaskIndex} = {}) {
|
||||
const hooks = tasks.map(task => new Runnable({
|
||||
contextRef,
|
||||
experiments: this.experiments,
|
||||
|
|
@ -284,8 +312,9 @@ class Runner extends Emittery {
|
|||
task.implementation :
|
||||
t => task.implementation.apply(null, [t].concat(task.args)),
|
||||
compareTestSnapshot: this.boundCompareTestSnapshot,
|
||||
skipSnapshot: this.boundSkipSnapshot,
|
||||
updateSnapshots: this.updateSnapshots,
|
||||
metadata: task.metadata,
|
||||
metadata: {...task.metadata, associatedTaskIndex},
|
||||
powerAssert: this.powerAssert,
|
||||
title: `${task.title}${titleSuffix || ''}`,
|
||||
isHook: true,
|
||||
|
|
@ -316,7 +345,14 @@ class Runner extends Emittery {
|
|||
|
||||
async runTest(task, contextRef) {
|
||||
const hookSuffix = ` for ${task.title}`;
|
||||
let hooksOk = await this.runHooks(this.tasks.beforeEach, contextRef, hookSuffix);
|
||||
let hooksOk = await this.runHooks(
|
||||
this.tasks.beforeEach,
|
||||
contextRef,
|
||||
{
|
||||
titleSuffix: hookSuffix,
|
||||
associatedTaskIndex: task.metadata.taskIndex
|
||||
}
|
||||
);
|
||||
|
||||
let testOk = false;
|
||||
if (hooksOk) {
|
||||
|
|
@ -329,6 +365,7 @@ class Runner extends Emittery {
|
|||
task.implementation :
|
||||
t => task.implementation.apply(null, [t].concat(task.args)),
|
||||
compareTestSnapshot: this.boundCompareTestSnapshot,
|
||||
skipSnapshot: this.boundSkipSnapshot,
|
||||
updateSnapshots: this.updateSnapshots,
|
||||
metadata: task.metadata,
|
||||
powerAssert: this.powerAssert,
|
||||
|
|
@ -348,7 +385,14 @@ class Runner extends Emittery {
|
|||
logs: result.logs
|
||||
});
|
||||
|
||||
hooksOk = await this.runHooks(this.tasks.afterEach, contextRef, hookSuffix, testOk);
|
||||
hooksOk = await this.runHooks(
|
||||
this.tasks.afterEach,
|
||||
contextRef,
|
||||
{
|
||||
titleSuffix: hookSuffix,
|
||||
testPassed: testOk,
|
||||
associatedTaskIndex: task.metadata.taskIndex
|
||||
});
|
||||
} else {
|
||||
this.emit('stateChange', {
|
||||
type: 'test-failed',
|
||||
|
|
@ -362,7 +406,14 @@ class Runner extends Emittery {
|
|||
}
|
||||
}
|
||||
|
||||
const alwaysOk = await this.runHooks(this.tasks.afterEachAlways, contextRef, hookSuffix, testOk);
|
||||
const alwaysOk = await this.runHooks(
|
||||
this.tasks.afterEachAlways,
|
||||
contextRef,
|
||||
{
|
||||
titleSuffix: hookSuffix,
|
||||
testPassed: testOk,
|
||||
associatedTaskIndex: task.metadata.taskIndex
|
||||
});
|
||||
return alwaysOk && hooksOk && testOk;
|
||||
}
|
||||
|
||||
|
|
@ -435,6 +486,8 @@ class Runner extends Emittery {
|
|||
});
|
||||
}
|
||||
|
||||
await Promise.all(this.waitForReady);
|
||||
|
||||
if (concurrentTests.length === 0 && serialTests.length === 0) {
|
||||
this.emit('finish');
|
||||
// Don't run any hooks if there are no tests to run.
|
||||
|
|
@ -451,7 +504,7 @@ class Runner extends Emittery {
|
|||
return false;
|
||||
}
|
||||
|
||||
return serialTests.reduce(async (previous, task) => {
|
||||
return serialTests.reduce(async (previous, task) => { // eslint-disable-line unicorn/no-reduce
|
||||
const previousOk = await previous;
|
||||
// Don't start tests after an interrupt.
|
||||
if (this.interrupted) {
|
||||
|
|
|
|||
76
node_modules/ava/lib/snapshot-manager.js
generated
vendored
76
node_modules/ava/lib/snapshot-manager.js
generated
vendored
|
|
@ -104,13 +104,32 @@ function combineEntries(entries) {
|
|||
const buffers = [];
|
||||
let byteLength = 0;
|
||||
|
||||
const sortedKeys = [...entries.keys()].sort();
|
||||
const sortedKeys = [...entries.keys()].sort((keyA, keyB) => {
|
||||
const [a, b] = [entries.get(keyA), entries.get(keyB)];
|
||||
const taskDifference = a.taskIndex - b.taskIndex;
|
||||
|
||||
if (taskDifference !== 0) {
|
||||
return taskDifference;
|
||||
}
|
||||
|
||||
const [assocA, assocB] = [a.associatedTaskIndex, b.associatedTaskIndex];
|
||||
if (assocA !== undefined && assocB !== undefined) {
|
||||
const assocDifference = assocA - assocB;
|
||||
|
||||
if (assocDifference !== 0) {
|
||||
return assocDifference;
|
||||
}
|
||||
}
|
||||
|
||||
return a.snapIndex - b.snapIndex;
|
||||
});
|
||||
|
||||
for (const key of sortedKeys) {
|
||||
const keyBuffer = Buffer.from(`\n\n## ${key}\n\n`, 'utf8');
|
||||
buffers.push(keyBuffer);
|
||||
byteLength += keyBuffer.byteLength;
|
||||
|
||||
const formattedEntries = entries.get(key);
|
||||
const formattedEntries = entries.get(key).buffers;
|
||||
const last = formattedEntries[formattedEntries.length - 1];
|
||||
for (const entry of formattedEntries) {
|
||||
buffers.push(entry);
|
||||
|
|
@ -176,10 +195,11 @@ function encodeSnapshots(buffersByHash) {
|
|||
byteOffset += 2;
|
||||
|
||||
const entries = [];
|
||||
for (const pair of buffersByHash) {
|
||||
const hash = pair[0];
|
||||
const snapshotBuffers = pair[1];
|
||||
|
||||
// Maps can't have duplicate keys, so all items in [...buffersByHash.keys()]
|
||||
// are unique, so sortedHashes should be deterministic.
|
||||
const sortedHashes = [...buffersByHash.keys()].sort();
|
||||
const sortedBuffersByHash = [...sortedHashes.map(hash => [hash, buffersByHash.get(hash)])];
|
||||
for (const [hash, snapshotBuffers] of sortedBuffersByHash) {
|
||||
buffers.push(Buffer.from(hash, 'hex'));
|
||||
byteOffset += MD5_HASH_LENGTH;
|
||||
|
||||
|
|
@ -332,6 +352,7 @@ class Manager {
|
|||
const descriptor = concordance.describe(options.expected, concordanceOptions);
|
||||
const snapshot = concordance.serialize(descriptor);
|
||||
const entry = formatEntry(options.label, descriptor);
|
||||
const {taskIndex, snapIndex, associatedTaskIndex} = options;
|
||||
|
||||
return () => { // Must be called in order!
|
||||
this.hasChanges = true;
|
||||
|
|
@ -353,9 +374,9 @@ class Manager {
|
|||
snapshots.push(snapshot);
|
||||
|
||||
if (this.reportEntries.has(options.belongsTo)) {
|
||||
this.reportEntries.get(options.belongsTo).push(entry);
|
||||
this.reportEntries.get(options.belongsTo).buffers.push(entry);
|
||||
} else {
|
||||
this.reportEntries.set(options.belongsTo, [entry]);
|
||||
this.reportEntries.set(options.belongsTo, {buffers: [entry], taskIndex, snapIndex, associatedTaskIndex});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
@ -428,12 +449,49 @@ const determineSnapshotDir = mem(({file, fixedLocation, projectDir}) => {
|
|||
|
||||
exports.determineSnapshotDir = determineSnapshotDir;
|
||||
|
||||
function load({file, fixedLocation, projectDir, recordNewSnapshots, updating}) {
|
||||
function determineSnapshotPaths({file, fixedLocation, projectDir}) {
|
||||
const dir = determineSnapshotDir({file, fixedLocation, projectDir});
|
||||
const relFile = path.relative(projectDir, resolveSourceFile(file));
|
||||
const name = path.basename(relFile);
|
||||
const reportFile = `${name}.md`;
|
||||
const snapFile = `${name}.snap`;
|
||||
|
||||
return {
|
||||
dir,
|
||||
relFile,
|
||||
snapFile,
|
||||
reportFile
|
||||
};
|
||||
}
|
||||
|
||||
function cleanFile(file) {
|
||||
try {
|
||||
fs.unlinkSync(file);
|
||||
return [file];
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return [];
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Remove snapshot and report if they exist. Returns an array containing the
|
||||
// paths of the touched files.
|
||||
function cleanSnapshots({file, fixedLocation, projectDir}) {
|
||||
const {dir, snapFile, reportFile} = determineSnapshotPaths({file, fixedLocation, projectDir});
|
||||
|
||||
return [
|
||||
...cleanFile(path.join(dir, snapFile)),
|
||||
...cleanFile(path.join(dir, reportFile))
|
||||
];
|
||||
}
|
||||
|
||||
exports.cleanSnapshots = cleanSnapshots;
|
||||
|
||||
function load({file, fixedLocation, projectDir, recordNewSnapshots, updating}) {
|
||||
const {dir, relFile, snapFile, reportFile} = determineSnapshotPaths({file, fixedLocation, projectDir});
|
||||
const snapPath = path.join(dir, snapFile);
|
||||
|
||||
let appendOnly = !updating;
|
||||
|
|
|
|||
65
node_modules/ava/lib/test.js
generated
vendored
65
node_modules/ava/lib/test.js
generated
vendored
|
|
@ -39,7 +39,9 @@ class ExecutionContext extends assert.Assertions {
|
|||
compareWithSnapshot: options => {
|
||||
return test.compareWithSnapshot(options);
|
||||
},
|
||||
powerAssert: test.powerAssert
|
||||
powerAssert: test.powerAssert,
|
||||
experiments: test.experiments,
|
||||
disableSnapshots: test.isHook === true
|
||||
});
|
||||
testMap.set(this, test);
|
||||
|
||||
|
|
@ -64,8 +66,8 @@ class ExecutionContext extends assert.Assertions {
|
|||
|
||||
this.plan.skip = () => {};
|
||||
|
||||
this.timeout = ms => {
|
||||
test.timeout(ms);
|
||||
this.timeout = (ms, message) => {
|
||||
test.timeout(ms, message);
|
||||
};
|
||||
|
||||
this.teardown = callback => {
|
||||
|
|
@ -73,6 +75,12 @@ class ExecutionContext extends assert.Assertions {
|
|||
};
|
||||
|
||||
this.try = async (...attemptArgs) => {
|
||||
if (test.isHook) {
|
||||
const error = new Error('`t.try()` can only be used in tests');
|
||||
test.saveFirstError(error);
|
||||
throw error;
|
||||
}
|
||||
|
||||
const {args, buildTitle, implementations, receivedImplementationArray} = parseTestArgs(attemptArgs);
|
||||
|
||||
if (implementations.length === 0) {
|
||||
|
|
@ -179,7 +187,8 @@ class ExecutionContext extends assert.Assertions {
|
|||
}
|
||||
|
||||
get passed() {
|
||||
return testMap.get(this).testPassed;
|
||||
const test = testMap.get(this);
|
||||
return test.isHook ? test.testPassed : !test.assertError;
|
||||
}
|
||||
|
||||
_throwsArgStart(assertion, file, line) {
|
||||
|
|
@ -221,7 +230,17 @@ class Test {
|
|||
const index = id ? 0 : this.nextSnapshotIndex++;
|
||||
const label = id ? '' : message || `Snapshot ${index + 1}`; // Human-readable labels start counting at 1.
|
||||
|
||||
const {record, ...result} = options.compareTestSnapshot({belongsTo, deferRecording, expected, index, label});
|
||||
const {taskIndex, associatedTaskIndex} = this.metadata;
|
||||
const {record, ...result} = options.compareTestSnapshot({
|
||||
belongsTo,
|
||||
deferRecording,
|
||||
expected,
|
||||
index,
|
||||
label,
|
||||
taskIndex,
|
||||
snapIndex: this.snapshotCount,
|
||||
associatedTaskIndex
|
||||
});
|
||||
if (record) {
|
||||
this.deferredSnapshotRecordings.push(record);
|
||||
}
|
||||
|
|
@ -230,6 +249,10 @@ class Test {
|
|||
};
|
||||
|
||||
this.skipSnapshot = () => {
|
||||
if (typeof options.skipSnapshot === 'function') {
|
||||
options.skipSnapshot();
|
||||
}
|
||||
|
||||
if (options.updateSnapshots) {
|
||||
this.addFailedAssertion(new Error('Snapshot assertions cannot be skipped when updating snapshots'));
|
||||
} else {
|
||||
|
|
@ -289,11 +312,8 @@ class Test {
|
|||
};
|
||||
}
|
||||
|
||||
if (this.metadata.inline) {
|
||||
throw new Error('`t.end()` is not supported inside `t.try()`');
|
||||
} else {
|
||||
throw new Error('`t.end()` is not supported in this context. To use `t.end()` as a callback, you must use "callback mode" via `test.cb(testName, fn)`');
|
||||
}
|
||||
const error_ = this.metadata.inline ? new Error('`t.end()` is not supported inside `t.try()`') : new Error('`t.end()` is not supported in this context. To use `t.end()` as a callback, you must use "callback mode" via `test.cb(testName, fn)`');
|
||||
throw error_;
|
||||
}
|
||||
|
||||
endCallback(error, savedError) {
|
||||
|
|
@ -430,7 +450,14 @@ class Test {
|
|||
this.planError = planError;
|
||||
}
|
||||
|
||||
timeout(ms) {
|
||||
timeout(ms, message) {
|
||||
const result = assert.checkAssertionMessage('timeout', message);
|
||||
if (result !== true) {
|
||||
this.saveFirstError(result);
|
||||
// Allow the timeout to be set even when the message is invalid.
|
||||
message = '';
|
||||
}
|
||||
|
||||
if (this.finishing) {
|
||||
return;
|
||||
}
|
||||
|
|
@ -438,7 +465,7 @@ class Test {
|
|||
this.clearTimeout();
|
||||
this.timeoutMs = ms;
|
||||
this.timeoutTimer = nowAndTimers.setTimeout(() => {
|
||||
this.saveFirstError(new Error('Test timeout exceeded'));
|
||||
this.saveFirstError(new Error(message || 'Test timeout exceeded'));
|
||||
|
||||
if (this.finishDueToTimeout) {
|
||||
this.finishDueToTimeout();
|
||||
|
|
@ -482,7 +509,13 @@ class Test {
|
|||
}
|
||||
|
||||
async runTeardowns() {
|
||||
for (const teardown of this.teardowns) {
|
||||
const teardowns = [...this.teardowns];
|
||||
|
||||
if (this.experiments.reverseTeardowns) {
|
||||
teardowns.reverse();
|
||||
}
|
||||
|
||||
for (const teardown of teardowns) {
|
||||
try {
|
||||
await teardown(); // eslint-disable-line no-await-in-loop
|
||||
} catch (error) {
|
||||
|
|
@ -714,11 +747,7 @@ class Test {
|
|||
if (this.metadata.failing) {
|
||||
passed = !passed;
|
||||
|
||||
if (passed) {
|
||||
error = null;
|
||||
} else {
|
||||
error = new Error('Test was expected to fail, but succeeded, you should stop marking the test as failing');
|
||||
}
|
||||
error = passed ? null : new Error('Test was expected to fail, but succeeded, you should stop marking the test as failing');
|
||||
}
|
||||
|
||||
return {
|
||||
|
|
|
|||
203
node_modules/ava/lib/worker/ipc.js
generated
vendored
203
node_modules/ava/lib/worker/ipc.js
generated
vendored
|
|
@ -1,50 +1,42 @@
|
|||
'use strict';
|
||||
const Emittery = require('emittery');
|
||||
const events = require('events');
|
||||
const pEvent = require('p-event');
|
||||
const {controlFlow} = require('../ipc-flow-control');
|
||||
const {get: getOptions} = require('./options');
|
||||
|
||||
const emitter = new Emittery();
|
||||
process.on('message', message => {
|
||||
if (!message.ava) {
|
||||
return;
|
||||
}
|
||||
const selectAvaMessage = type => message => message.ava && message.ava.type === type;
|
||||
|
||||
switch (message.ava.type) {
|
||||
case 'options':
|
||||
emitter.emit('options', message.ava.options);
|
||||
break;
|
||||
case 'peer-failed':
|
||||
emitter.emit('peerFailed');
|
||||
break;
|
||||
case 'pong':
|
||||
emitter.emit('pong');
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
exports.options = emitter.once('options');
|
||||
exports.peerFailed = emitter.once('peerFailed');
|
||||
exports.options = pEvent(process, 'message', selectAvaMessage('options')).then(message => message.ava.options);
|
||||
exports.peerFailed = pEvent(process, 'message', selectAvaMessage('peer-failed'));
|
||||
|
||||
const bufferedSend = controlFlow(process);
|
||||
function send(evt) {
|
||||
if (process.connected) {
|
||||
process.send({ava: evt});
|
||||
}
|
||||
bufferedSend({ava: evt});
|
||||
}
|
||||
|
||||
exports.send = send;
|
||||
|
||||
let refs = 1;
|
||||
function ref() {
|
||||
if (++refs === 1) {
|
||||
process.channel.ref();
|
||||
}
|
||||
}
|
||||
|
||||
function unref() {
|
||||
process.channel.unref();
|
||||
if (refs > 0 && --refs === 0) {
|
||||
process.channel.unref();
|
||||
}
|
||||
}
|
||||
|
||||
exports.unref = unref;
|
||||
|
||||
let pendingPings = Promise.resolve();
|
||||
async function flush() {
|
||||
process.channel.ref();
|
||||
ref();
|
||||
const promise = pendingPings.then(async () => { // eslint-disable-line promise/prefer-await-to-then
|
||||
send({type: 'ping'});
|
||||
await emitter.once('pong');
|
||||
await pEvent(process, 'message', selectAvaMessage('pong'));
|
||||
if (promise === pendingPings) {
|
||||
unref();
|
||||
}
|
||||
|
|
@ -54,3 +46,156 @@ async function flush() {
|
|||
}
|
||||
|
||||
exports.flush = flush;
|
||||
|
||||
let channelCounter = 0;
|
||||
let messageCounter = 0;
|
||||
|
||||
const channelEmitters = new Map();
|
||||
function createChannelEmitter(channelId) {
|
||||
if (channelEmitters.size === 0) {
|
||||
process.on('message', message => {
|
||||
if (!message.ava) {
|
||||
return;
|
||||
}
|
||||
|
||||
const {channelId, type, ...payload} = message.ava;
|
||||
if (
|
||||
type === 'shared-worker-error' ||
|
||||
type === 'shared-worker-message' ||
|
||||
type === 'shared-worker-ready'
|
||||
) {
|
||||
const emitter = channelEmitters.get(channelId);
|
||||
if (emitter !== undefined) {
|
||||
emitter.emit(type, payload);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const emitter = new events.EventEmitter();
|
||||
channelEmitters.set(channelId, emitter);
|
||||
return [emitter, () => channelEmitters.delete(channelId)];
|
||||
}
|
||||
|
||||
function registerSharedWorker(filename, initialData) {
|
||||
const channelId = `${getOptions().forkId}/channel/${++channelCounter}`;
|
||||
const [channelEmitter, unsubscribe] = createChannelEmitter(channelId);
|
||||
|
||||
let forcedUnref = false;
|
||||
let refs = 0;
|
||||
const forceUnref = () => {
|
||||
if (forcedUnref) {
|
||||
return;
|
||||
}
|
||||
|
||||
forcedUnref = true;
|
||||
if (refs > 0) {
|
||||
unref();
|
||||
}
|
||||
};
|
||||
|
||||
const refChannel = () => {
|
||||
if (!forcedUnref && ++refs === 1) {
|
||||
ref();
|
||||
}
|
||||
};
|
||||
|
||||
const unrefChannel = () => {
|
||||
if (!forcedUnref && refs > 0 && --refs === 0) {
|
||||
unref();
|
||||
}
|
||||
};
|
||||
|
||||
send({
|
||||
type: 'shared-worker-connect',
|
||||
channelId,
|
||||
filename,
|
||||
initialData
|
||||
});
|
||||
|
||||
let currentlyAvailable = false;
|
||||
let error = null;
|
||||
|
||||
refChannel();
|
||||
const ready = pEvent(channelEmitter, 'shared-worker-ready').then(() => { // eslint-disable-line promise/prefer-await-to-then
|
||||
currentlyAvailable = error === null;
|
||||
}).finally(unrefChannel);
|
||||
|
||||
const messageEmitters = new Set();
|
||||
const handleMessage = message => {
|
||||
// Wait for a turn of the event loop, to allow new subscriptions to be set
|
||||
// up in response to the previous message.
|
||||
setImmediate(() => {
|
||||
for (const emitter of messageEmitters) {
|
||||
emitter.emit('message', message);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
channelEmitter.on('shared-worker-message', handleMessage);
|
||||
|
||||
pEvent(channelEmitter, 'shared-worker-error').then(() => { // eslint-disable-line promise/prefer-await-to-then
|
||||
unsubscribe();
|
||||
forceUnref();
|
||||
|
||||
error = new Error('The shared worker is no longer available');
|
||||
currentlyAvailable = false;
|
||||
for (const emitter of messageEmitters) {
|
||||
emitter.emit('error', error);
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
forceUnref,
|
||||
ready,
|
||||
channel: {
|
||||
available: ready,
|
||||
|
||||
get currentlyAvailable() {
|
||||
return currentlyAvailable;
|
||||
},
|
||||
|
||||
async * receive() {
|
||||
if (error !== null) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
const emitter = new events.EventEmitter();
|
||||
messageEmitters.add(emitter);
|
||||
try {
|
||||
refChannel();
|
||||
for await (const [message] of events.on(emitter, 'message')) {
|
||||
yield message;
|
||||
}
|
||||
} finally {
|
||||
unrefChannel();
|
||||
messageEmitters.delete(emitter);
|
||||
}
|
||||
},
|
||||
|
||||
post(serializedData, replyTo) {
|
||||
if (error !== null) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (!currentlyAvailable) {
|
||||
throw new Error('Shared worker is not yet available');
|
||||
}
|
||||
|
||||
const messageId = `${channelId}/message/${++messageCounter}`;
|
||||
send({
|
||||
type: 'shared-worker-message',
|
||||
channelId,
|
||||
messageId,
|
||||
replyTo,
|
||||
serializedData
|
||||
});
|
||||
|
||||
return messageId;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
exports.registerSharedWorker = registerSharedWorker;
|
||||
|
||||
|
|
|
|||
121
node_modules/ava/lib/worker/plugin.js
generated
vendored
Normal file
121
node_modules/ava/lib/worker/plugin.js
generated
vendored
Normal file
|
|
@ -0,0 +1,121 @@
|
|||
const v8 = require('v8');
|
||||
const pkg = require('../../package.json');
|
||||
const subprocess = require('./subprocess');
|
||||
const options = require('./options');
|
||||
|
||||
const workers = new Map();
|
||||
const workerTeardownFns = new WeakMap();
|
||||
|
||||
function createSharedWorker(filename, initialData, teardown) {
|
||||
const channel = subprocess.registerSharedWorker(filename, initialData, teardown);
|
||||
|
||||
class ReceivedMessage {
|
||||
constructor(id, serializedData) {
|
||||
this.id = id;
|
||||
this.data = v8.deserialize(new Uint8Array(serializedData));
|
||||
}
|
||||
|
||||
reply(data) {
|
||||
return publishMessage(data, this.id);
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure that, no matter how often it's received, we have a stable message
|
||||
// object.
|
||||
const messageCache = new WeakMap();
|
||||
async function * receiveMessages(replyTo) {
|
||||
for await (const evt of channel.receive()) {
|
||||
if (replyTo === undefined && evt.replyTo !== undefined) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (replyTo !== undefined && evt.replyTo !== replyTo) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let message = messageCache.get(evt);
|
||||
if (message === undefined) {
|
||||
message = new ReceivedMessage(evt.messageId, evt.serializedData);
|
||||
messageCache.set(evt, message);
|
||||
}
|
||||
|
||||
yield message;
|
||||
}
|
||||
}
|
||||
|
||||
function publishMessage(data, replyTo) {
|
||||
const id = channel.post([...v8.serialize(data)], replyTo);
|
||||
|
||||
return {
|
||||
id,
|
||||
async * replies() {
|
||||
yield * receiveMessages(id);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
available: channel.available,
|
||||
protocol: 'experimental',
|
||||
|
||||
get currentlyAvailable() {
|
||||
return channel.currentlyAvailable;
|
||||
},
|
||||
|
||||
publish(data) {
|
||||
return publishMessage(data);
|
||||
},
|
||||
|
||||
async * subscribe() {
|
||||
yield * receiveMessages();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const supportsSharedWorkers = process.versions.node >= '12.17.0';
|
||||
|
||||
function registerSharedWorker({
|
||||
filename,
|
||||
initialData,
|
||||
supportedProtocols,
|
||||
teardown
|
||||
}) {
|
||||
if (!options.get().experiments.sharedWorkers) {
|
||||
throw new Error('Shared workers are experimental. Opt in to them in your AVA configuration');
|
||||
}
|
||||
|
||||
if (!supportsSharedWorkers) {
|
||||
throw new Error('Shared workers require Node.js 12.17 or newer');
|
||||
}
|
||||
|
||||
if (!supportedProtocols.includes('experimental')) {
|
||||
throw new Error(`This version of AVA (${pkg.version}) does not support any of the desired shared worker protocols: ${supportedProtocols.join()}`);
|
||||
}
|
||||
|
||||
let worker = workers.get(filename);
|
||||
if (worker === undefined) {
|
||||
worker = createSharedWorker(filename, initialData, async () => {
|
||||
// Run possibly asynchronous teardown functions serially, in reverse
|
||||
// order. Any error will crash the worker.
|
||||
const teardownFns = workerTeardownFns.get(worker);
|
||||
if (teardownFns !== undefined) {
|
||||
for await (const fn of [...teardownFns].reverse()) {
|
||||
await fn();
|
||||
}
|
||||
}
|
||||
});
|
||||
workers.set(filename, worker);
|
||||
}
|
||||
|
||||
if (teardown !== undefined) {
|
||||
if (workerTeardownFns.has(worker)) {
|
||||
workerTeardownFns.get(worker).push(teardown);
|
||||
} else {
|
||||
workerTeardownFns.set(worker, [teardown]);
|
||||
}
|
||||
}
|
||||
|
||||
return worker;
|
||||
}
|
||||
|
||||
exports.registerSharedWorker = registerSharedWorker;
|
||||
43
node_modules/ava/lib/worker/subprocess.js
generated
vendored
43
node_modules/ava/lib/worker/subprocess.js
generated
vendored
|
|
@ -32,6 +32,8 @@ ipc.options.then(async options => {
|
|||
const dependencyTracking = require('./dependency-tracker');
|
||||
const lineNumberSelection = require('./line-numbers');
|
||||
|
||||
const sharedWorkerTeardowns = [];
|
||||
|
||||
async function exit(code) {
|
||||
if (!process.exitCode) {
|
||||
process.exitCode = code;
|
||||
|
|
@ -89,10 +91,12 @@ ipc.options.then(async options => {
|
|||
exit(1);
|
||||
});
|
||||
|
||||
runner.on('finish', () => {
|
||||
runner.on('finish', async () => {
|
||||
try {
|
||||
const touchedFiles = runner.saveSnapshotState();
|
||||
if (touchedFiles) {
|
||||
const {cannotSave, touchedFiles} = runner.saveSnapshotState();
|
||||
if (cannotSave) {
|
||||
ipc.send({type: 'snapshot-error'});
|
||||
} else if (touchedFiles) {
|
||||
ipc.send({type: 'touched-files', files: touchedFiles});
|
||||
}
|
||||
} catch (error) {
|
||||
|
|
@ -101,6 +105,14 @@ ipc.options.then(async options => {
|
|||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await Promise.all(sharedWorkerTeardowns.map(fn => fn()));
|
||||
} catch (error) {
|
||||
ipc.send({type: 'uncaught-exception', err: serializeError('Shared worker teardown error', false, error, runner.file)});
|
||||
exit(1);
|
||||
return;
|
||||
}
|
||||
|
||||
nowAndTimers.setImmediate(() => {
|
||||
currentlyUnhandled()
|
||||
.filter(rejection => !attributedRejections.has(rejection.promise))
|
||||
|
|
@ -127,6 +139,19 @@ ipc.options.then(async options => {
|
|||
return runner;
|
||||
};
|
||||
|
||||
exports.registerSharedWorker = (filename, initialData, teardown) => {
|
||||
const {channel, forceUnref, ready} = ipc.registerSharedWorker(filename, initialData);
|
||||
runner.waitForReady.push(ready);
|
||||
sharedWorkerTeardowns.push(async () => {
|
||||
try {
|
||||
await teardown();
|
||||
} finally {
|
||||
forceUnref();
|
||||
}
|
||||
});
|
||||
return channel;
|
||||
};
|
||||
|
||||
// Store value to prevent required modules from modifying it.
|
||||
const testPath = options.file;
|
||||
|
||||
|
|
@ -196,15 +221,21 @@ ipc.options.then(async options => {
|
|||
if (Reflect.has(mod, Symbol.for('esm:package'))) {
|
||||
requireFn = mod(module);
|
||||
}
|
||||
} catch (_) {}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// Install dependency tracker after the require configuration has been evaluated
|
||||
// to make sure we also track dependencies with custom require hooks
|
||||
dependencyTracking.install(testPath);
|
||||
|
||||
if (options.debug) {
|
||||
require('inspector').open(options.debug.port, options.debug.host, true); // eslint-disable-line node/no-unsupported-features/node-builtins
|
||||
if (options.debug && options.debug.port !== undefined && options.debug.host !== undefined) {
|
||||
// If an inspector was active when the main process started, and is
|
||||
// already active for the worker process, do not open a new one.
|
||||
const inspector = require('inspector'); // eslint-disable-line node/no-unsupported-features/node-builtins
|
||||
if (!options.debug.active || inspector.url() === undefined) {
|
||||
inspector.open(options.debug.port, options.debug.host, true);
|
||||
}
|
||||
|
||||
if (options.debug.break) {
|
||||
debugger; // eslint-disable-line no-debugger
|
||||
}
|
||||
|
|
|
|||
1
node_modules/ava/node_modules/.bin/acorn
generated
vendored
Symbolic link
1
node_modules/ava/node_modules/.bin/acorn
generated
vendored
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../acorn/bin/acorn
|
||||
121
node_modules/ava/node_modules/acorn-walk/CHANGELOG.md
generated
vendored
121
node_modules/ava/node_modules/acorn-walk/CHANGELOG.md
generated
vendored
|
|
@ -1,121 +0,0 @@
|
|||
## 7.1.1 (2020-02-13)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Clean up the type definitions to actually work well with the main parser.
|
||||
|
||||
## 7.1.0 (2020-02-11)
|
||||
|
||||
### New features
|
||||
|
||||
Add a TypeScript definition file for the library.
|
||||
|
||||
## 7.0.0 (2017-08-12)
|
||||
|
||||
### New features
|
||||
|
||||
Support walking `ImportExpression` nodes.
|
||||
|
||||
## 6.2.0 (2017-07-04)
|
||||
|
||||
### New features
|
||||
|
||||
Add support for `Import` nodes.
|
||||
|
||||
## 6.1.0 (2018-09-28)
|
||||
|
||||
### New features
|
||||
|
||||
The walker now walks `TemplateElement` nodes.
|
||||
|
||||
## 6.0.1 (2018-09-14)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix bad "main" field in package.json.
|
||||
|
||||
## 6.0.0 (2018-09-14)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
This is now a separate package, `acorn-walk`, rather than part of the main `acorn` package.
|
||||
|
||||
The `ScopeBody` and `ScopeExpression` meta-node-types are no longer supported.
|
||||
|
||||
## 5.7.1 (2018-06-15)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Make sure the walker and bin files are rebuilt on release (the previous release didn't get the up-to-date versions).
|
||||
|
||||
## 5.7.0 (2018-06-15)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix crash in walker when walking a binding-less catch node.
|
||||
|
||||
## 5.6.2 (2018-06-05)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
In the walker, go back to allowing the `baseVisitor` argument to be null to default to the default base everywhere.
|
||||
|
||||
## 5.6.1 (2018-06-01)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix regression when passing `null` as fourth argument to `walk.recursive`.
|
||||
|
||||
## 5.6.0 (2018-05-31)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a bug in the walker that caused a crash when walking an object pattern spread.
|
||||
|
||||
## 5.5.1 (2018-03-06)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix regression in walker causing property values in object patterns to be walked as expressions.
|
||||
|
||||
## 5.5.0 (2018-02-27)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Support object spread in the AST walker.
|
||||
|
||||
## 5.4.1 (2018-02-02)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
5.4.0 somehow accidentally included an old version of walk.js.
|
||||
|
||||
## 5.2.0 (2017-10-30)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
The `full` and `fullAncestor` walkers no longer visit nodes multiple times.
|
||||
|
||||
## 5.1.0 (2017-07-05)
|
||||
|
||||
### New features
|
||||
|
||||
New walker functions `full` and `fullAncestor`.
|
||||
|
||||
## 3.2.0 (2016-06-07)
|
||||
|
||||
### New features
|
||||
|
||||
Make it possible to use `visit.ancestor` with a walk state.
|
||||
|
||||
## 3.1.0 (2016-04-18)
|
||||
|
||||
### New features
|
||||
|
||||
The walker now allows defining handlers for `CatchClause` nodes.
|
||||
|
||||
## 2.5.2 (2015-10-27)
|
||||
|
||||
### Fixes
|
||||
|
||||
Fix bug where the walker walked an exported `let` statement as an expression.
|
||||
126
node_modules/ava/node_modules/acorn-walk/README.md
generated
vendored
126
node_modules/ava/node_modules/acorn-walk/README.md
generated
vendored
|
|
@ -1,126 +0,0 @@
|
|||
# Acorn AST walker
|
||||
|
||||
An abstract syntax tree walker for the
|
||||
[ESTree](https://github.com/estree/estree) format.
|
||||
|
||||
## Community
|
||||
|
||||
Acorn is open source software released under an
|
||||
[MIT license](https://github.com/acornjs/acorn/blob/master/acorn-walk/LICENSE).
|
||||
|
||||
You are welcome to
|
||||
[report bugs](https://github.com/acornjs/acorn/issues) or create pull
|
||||
requests on [github](https://github.com/acornjs/acorn). For questions
|
||||
and discussion, please use the
|
||||
[Tern discussion forum](https://discuss.ternjs.net).
|
||||
|
||||
## Installation
|
||||
|
||||
The easiest way to install acorn is from [`npm`](https://www.npmjs.com/):
|
||||
|
||||
```sh
|
||||
npm install acorn-walk
|
||||
```
|
||||
|
||||
Alternately, you can download the source and build acorn yourself:
|
||||
|
||||
```sh
|
||||
git clone https://github.com/acornjs/acorn.git
|
||||
cd acorn
|
||||
npm install
|
||||
```
|
||||
|
||||
## Interface
|
||||
|
||||
An algorithm for recursing through a syntax tree is stored as an
|
||||
object, with a property for each tree node type holding a function
|
||||
that will recurse through such a node. There are several ways to run
|
||||
such a walker.
|
||||
|
||||
**simple**`(node, visitors, base, state)` does a 'simple' walk over a
|
||||
tree. `node` should be the AST node to walk, and `visitors` an object
|
||||
with properties whose names correspond to node types in the [ESTree
|
||||
spec](https://github.com/estree/estree). The properties should contain
|
||||
functions that will be called with the node object and, if applicable
|
||||
the state at that point. The last two arguments are optional. `base`
|
||||
is a walker algorithm, and `state` is a start state. The default
|
||||
walker will simply visit all statements and expressions and not
|
||||
produce a meaningful state. (An example of a use of state is to track
|
||||
scope at each point in the tree.)
|
||||
|
||||
```js
|
||||
const acorn = require("acorn")
|
||||
const walk = require("acorn-walk")
|
||||
|
||||
walk.simple(acorn.parse("let x = 10"), {
|
||||
Literal(node) {
|
||||
console.log(`Found a literal: ${node.value}`)
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
**ancestor**`(node, visitors, base, state)` does a 'simple' walk over
|
||||
a tree, building up an array of ancestor nodes (including the current node)
|
||||
and passing the array to the callbacks as a third parameter.
|
||||
|
||||
```js
|
||||
const acorn = require("acorn")
|
||||
const walk = require("acorn-walk")
|
||||
|
||||
walk.ancestor(acorn.parse("foo('hi')"), {
|
||||
Literal(_, ancestors) {
|
||||
console.log("This literal's ancestors are:", ancestors.map(n => n.type))
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
**recursive**`(node, state, functions, base)` does a 'recursive'
|
||||
walk, where the walker functions are responsible for continuing the
|
||||
walk on the child nodes of their target node. `state` is the start
|
||||
state, and `functions` should contain an object that maps node types
|
||||
to walker functions. Such functions are called with `(node, state, c)`
|
||||
arguments, and can cause the walk to continue on a sub-node by calling
|
||||
the `c` argument on it with `(node, state)` arguments. The optional
|
||||
`base` argument provides the fallback walker functions for node types
|
||||
that aren't handled in the `functions` object. If not given, the
|
||||
default walkers will be used.
|
||||
|
||||
**make**`(functions, base)` builds a new walker object by using the
|
||||
walker functions in `functions` and filling in the missing ones by
|
||||
taking defaults from `base`.
|
||||
|
||||
**full**`(node, callback, base, state)` does a 'full' walk over a
|
||||
tree, calling the callback with the arguments (node, state, type) for
|
||||
each node
|
||||
|
||||
**fullAncestor**`(node, callback, base, state)` does a 'full' walk
|
||||
over a tree, building up an array of ancestor nodes (including the
|
||||
current node) and passing the array to the callbacks as a third
|
||||
parameter.
|
||||
|
||||
```js
|
||||
const acorn = require("acorn")
|
||||
const walk = require("acorn-walk")
|
||||
|
||||
walk.full(acorn.parse("1 + 1"), node => {
|
||||
console.log(`There's a ${node.type} node at ${node.ch}`)
|
||||
})
|
||||
```
|
||||
|
||||
**findNodeAt**`(node, start, end, test, base, state)` tries to locate
|
||||
a node in a tree at the given start and/or end offsets, which
|
||||
satisfies the predicate `test`. `start` and `end` can be either `null`
|
||||
(as wildcard) or a number. `test` may be a string (indicating a node
|
||||
type) or a function that takes `(nodeType, node)` arguments and
|
||||
returns a boolean indicating whether this node is interesting. `base`
|
||||
and `state` are optional, and can be used to specify a custom walker.
|
||||
Nodes are tested from inner to outer, so if two nodes match the
|
||||
boundaries, the inner one will be preferred.
|
||||
|
||||
**findNodeAround**`(node, pos, test, base, state)` is a lot like
|
||||
`findNodeAt`, but will match any node that exists 'around' (spanning)
|
||||
the given position.
|
||||
|
||||
**findNodeAfter**`(node, pos, test, base, state)` is similar to
|
||||
`findNodeAround`, but will match all nodes *after* the given position
|
||||
(testing outer nodes before inner nodes).
|
||||
115
node_modules/ava/node_modules/acorn-walk/dist/walk.d.ts
generated
vendored
115
node_modules/ava/node_modules/acorn-walk/dist/walk.d.ts
generated
vendored
|
|
@ -1,115 +0,0 @@
|
|||
import {Node} from 'acorn';
|
||||
|
||||
declare module "acorn-walk" {
|
||||
type FullWalkerCallback<TState> = (
|
||||
node: Node,
|
||||
state: TState,
|
||||
type: string
|
||||
) => void;
|
||||
|
||||
type FullAncestorWalkerCallback<TState> = (
|
||||
node: Node,
|
||||
state: TState | Node[],
|
||||
ancestors: Node[],
|
||||
type: string
|
||||
) => void;
|
||||
type WalkerCallback<TState> = (node: Node, state: TState) => void;
|
||||
|
||||
type SimpleWalkerFn<TState> = (
|
||||
node: Node,
|
||||
state: TState
|
||||
) => void;
|
||||
|
||||
type AncestorWalkerFn<TState> = (
|
||||
node: Node,
|
||||
state: TState| Node[],
|
||||
ancestors: Node[]
|
||||
) => void;
|
||||
|
||||
type RecursiveWalkerFn<TState> = (
|
||||
node: Node,
|
||||
state: TState,
|
||||
callback: WalkerCallback<TState>
|
||||
) => void;
|
||||
|
||||
type SimpleVisitors<TState> = {
|
||||
[type: string]: SimpleWalkerFn<TState>
|
||||
};
|
||||
|
||||
type AncestorVisitors<TState> = {
|
||||
[type: string]: AncestorWalkerFn<TState>
|
||||
};
|
||||
|
||||
type RecursiveVisitors<TState> = {
|
||||
[type: string]: RecursiveWalkerFn<TState>
|
||||
};
|
||||
|
||||
type FindPredicate = (type: string, node: Node) => boolean;
|
||||
|
||||
interface Found<TState> {
|
||||
node: Node,
|
||||
state: TState
|
||||
}
|
||||
|
||||
export function simple<TState>(
|
||||
node: Node,
|
||||
visitors: SimpleVisitors<TState>,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): void;
|
||||
|
||||
export function ancestor<TState>(
|
||||
node: Node,
|
||||
visitors: AncestorVisitors<TState>,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): void;
|
||||
|
||||
export function recursive<TState>(
|
||||
node: Node,
|
||||
state: TState,
|
||||
functions: RecursiveVisitors<TState>,
|
||||
base?: RecursiveVisitors<TState>
|
||||
): void;
|
||||
|
||||
export function full<TState>(
|
||||
node: Node,
|
||||
callback: FullWalkerCallback<TState>,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): void;
|
||||
|
||||
export function fullAncestor<TState>(
|
||||
node: Node,
|
||||
callback: FullAncestorWalkerCallback<TState>,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): void;
|
||||
|
||||
export function make<TState>(
|
||||
functions: RecursiveVisitors<TState>,
|
||||
base?: RecursiveVisitors<TState>
|
||||
): RecursiveVisitors<TState>;
|
||||
|
||||
export function findNodeAt<TState>(
|
||||
node: Node,
|
||||
start: number | undefined,
|
||||
end: number | undefined,
|
||||
type: string,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): Found<TState> | undefined;
|
||||
|
||||
export function findNodeAt<TState>(
|
||||
node: Node,
|
||||
start: number | undefined,
|
||||
end: number | undefined,
|
||||
type?: FindPredicate,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): Found<TState> | undefined;
|
||||
|
||||
export const findNodeAround: typeof findNodeAt;
|
||||
|
||||
export const findNodeAfter: typeof findNodeAt;
|
||||
}
|
||||
461
node_modules/ava/node_modules/acorn-walk/dist/walk.js
generated
vendored
461
node_modules/ava/node_modules/acorn-walk/dist/walk.js
generated
vendored
|
|
@ -1,461 +0,0 @@
|
|||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||
(global = global || self, factory((global.acorn = global.acorn || {}, global.acorn.walk = {})));
|
||||
}(this, function (exports) { 'use strict';
|
||||
|
||||
// AST walker module for Mozilla Parser API compatible trees
|
||||
|
||||
// A simple walk is one where you simply specify callbacks to be
|
||||
// called on specific nodes. The last two arguments are optional. A
|
||||
// simple use would be
|
||||
//
|
||||
// walk.simple(myTree, {
|
||||
// Expression: function(node) { ... }
|
||||
// });
|
||||
//
|
||||
// to do something with all expressions. All Parser API node types
|
||||
// can be used to identify node types, as well as Expression and
|
||||
// Statement, which denote categories of nodes.
|
||||
//
|
||||
// The base argument can be used to pass a custom (recursive)
|
||||
// walker, and state can be used to give this walked an initial
|
||||
// state.
|
||||
|
||||
function simple(node, visitors, baseVisitor, state, override) {
|
||||
if (!baseVisitor) { baseVisitor = base
|
||||
; }(function c(node, st, override) {
|
||||
var type = override || node.type, found = visitors[type];
|
||||
baseVisitor[type](node, st, c);
|
||||
if (found) { found(node, st); }
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
// An ancestor walk keeps an array of ancestor nodes (including the
|
||||
// current node) and passes them to the callback as third parameter
|
||||
// (and also as state parameter when no other state is present).
|
||||
function ancestor(node, visitors, baseVisitor, state, override) {
|
||||
var ancestors = [];
|
||||
if (!baseVisitor) { baseVisitor = base
|
||||
; }(function c(node, st, override) {
|
||||
var type = override || node.type, found = visitors[type];
|
||||
var isNew = node !== ancestors[ancestors.length - 1];
|
||||
if (isNew) { ancestors.push(node); }
|
||||
baseVisitor[type](node, st, c);
|
||||
if (found) { found(node, st || ancestors, ancestors); }
|
||||
if (isNew) { ancestors.pop(); }
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
// A recursive walk is one where your functions override the default
|
||||
// walkers. They can modify and replace the state parameter that's
|
||||
// threaded through the walk, and can opt how and whether to walk
|
||||
// their child nodes (by calling their third argument on these
|
||||
// nodes).
|
||||
function recursive(node, state, funcs, baseVisitor, override) {
|
||||
var visitor = funcs ? make(funcs, baseVisitor || undefined) : baseVisitor
|
||||
;(function c(node, st, override) {
|
||||
visitor[override || node.type](node, st, c);
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
function makeTest(test) {
|
||||
if (typeof test === "string")
|
||||
{ return function (type) { return type === test; } }
|
||||
else if (!test)
|
||||
{ return function () { return true; } }
|
||||
else
|
||||
{ return test }
|
||||
}
|
||||
|
||||
var Found = function Found(node, state) { this.node = node; this.state = state; };
|
||||
|
||||
// A full walk triggers the callback on each node
|
||||
function full(node, callback, baseVisitor, state, override) {
|
||||
if (!baseVisitor) { baseVisitor = base
|
||||
; }(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
baseVisitor[type](node, st, c);
|
||||
if (!override) { callback(node, st, type); }
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
// An fullAncestor walk is like an ancestor walk, but triggers
|
||||
// the callback on each node
|
||||
function fullAncestor(node, callback, baseVisitor, state) {
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
var ancestors = []
|
||||
;(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
var isNew = node !== ancestors[ancestors.length - 1];
|
||||
if (isNew) { ancestors.push(node); }
|
||||
baseVisitor[type](node, st, c);
|
||||
if (!override) { callback(node, st || ancestors, ancestors, type); }
|
||||
if (isNew) { ancestors.pop(); }
|
||||
})(node, state);
|
||||
}
|
||||
|
||||
// Find a node with a given start, end, and type (all are optional,
|
||||
// null can be used as wildcard). Returns a {node, state} object, or
|
||||
// undefined when it doesn't find a matching node.
|
||||
function findNodeAt(node, start, end, test, baseVisitor, state) {
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
test = makeTest(test);
|
||||
try {
|
||||
(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
if ((start == null || node.start <= start) &&
|
||||
(end == null || node.end >= end))
|
||||
{ baseVisitor[type](node, st, c); }
|
||||
if ((start == null || node.start === start) &&
|
||||
(end == null || node.end === end) &&
|
||||
test(type, node))
|
||||
{ throw new Found(node, st) }
|
||||
})(node, state);
|
||||
} catch (e) {
|
||||
if (e instanceof Found) { return e }
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
// Find the innermost node of a given type that contains the given
|
||||
// position. Interface similar to findNodeAt.
|
||||
function findNodeAround(node, pos, test, baseVisitor, state) {
|
||||
test = makeTest(test);
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
try {
|
||||
(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
if (node.start > pos || node.end < pos) { return }
|
||||
baseVisitor[type](node, st, c);
|
||||
if (test(type, node)) { throw new Found(node, st) }
|
||||
})(node, state);
|
||||
} catch (e) {
|
||||
if (e instanceof Found) { return e }
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
// Find the outermost matching node after a given position.
|
||||
function findNodeAfter(node, pos, test, baseVisitor, state) {
|
||||
test = makeTest(test);
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
try {
|
||||
(function c(node, st, override) {
|
||||
if (node.end < pos) { return }
|
||||
var type = override || node.type;
|
||||
if (node.start >= pos && test(type, node)) { throw new Found(node, st) }
|
||||
baseVisitor[type](node, st, c);
|
||||
})(node, state);
|
||||
} catch (e) {
|
||||
if (e instanceof Found) { return e }
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
// Find the outermost matching node before a given position.
|
||||
function findNodeBefore(node, pos, test, baseVisitor, state) {
|
||||
test = makeTest(test);
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
var max
|
||||
;(function c(node, st, override) {
|
||||
if (node.start > pos) { return }
|
||||
var type = override || node.type;
|
||||
if (node.end <= pos && (!max || max.node.end < node.end) && test(type, node))
|
||||
{ max = new Found(node, st); }
|
||||
baseVisitor[type](node, st, c);
|
||||
})(node, state);
|
||||
return max
|
||||
}
|
||||
|
||||
// Fallback to an Object.create polyfill for older environments.
|
||||
var create = Object.create || function(proto) {
|
||||
function Ctor() {}
|
||||
Ctor.prototype = proto;
|
||||
return new Ctor
|
||||
};
|
||||
|
||||
// Used to create a custom walker. Will fill in all missing node
|
||||
// type properties with the defaults.
|
||||
function make(funcs, baseVisitor) {
|
||||
var visitor = create(baseVisitor || base);
|
||||
for (var type in funcs) { visitor[type] = funcs[type]; }
|
||||
return visitor
|
||||
}
|
||||
|
||||
function skipThrough(node, st, c) { c(node, st); }
|
||||
function ignore(_node, _st, _c) {}
|
||||
|
||||
// Node walkers.
|
||||
|
||||
var base = {};
|
||||
|
||||
base.Program = base.BlockStatement = function (node, st, c) {
|
||||
for (var i = 0, list = node.body; i < list.length; i += 1)
|
||||
{
|
||||
var stmt = list[i];
|
||||
|
||||
c(stmt, st, "Statement");
|
||||
}
|
||||
};
|
||||
base.Statement = skipThrough;
|
||||
base.EmptyStatement = ignore;
|
||||
base.ExpressionStatement = base.ParenthesizedExpression =
|
||||
function (node, st, c) { return c(node.expression, st, "Expression"); };
|
||||
base.IfStatement = function (node, st, c) {
|
||||
c(node.test, st, "Expression");
|
||||
c(node.consequent, st, "Statement");
|
||||
if (node.alternate) { c(node.alternate, st, "Statement"); }
|
||||
};
|
||||
base.LabeledStatement = function (node, st, c) { return c(node.body, st, "Statement"); };
|
||||
base.BreakStatement = base.ContinueStatement = ignore;
|
||||
base.WithStatement = function (node, st, c) {
|
||||
c(node.object, st, "Expression");
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.SwitchStatement = function (node, st, c) {
|
||||
c(node.discriminant, st, "Expression");
|
||||
for (var i$1 = 0, list$1 = node.cases; i$1 < list$1.length; i$1 += 1) {
|
||||
var cs = list$1[i$1];
|
||||
|
||||
if (cs.test) { c(cs.test, st, "Expression"); }
|
||||
for (var i = 0, list = cs.consequent; i < list.length; i += 1)
|
||||
{
|
||||
var cons = list[i];
|
||||
|
||||
c(cons, st, "Statement");
|
||||
}
|
||||
}
|
||||
};
|
||||
base.SwitchCase = function (node, st, c) {
|
||||
if (node.test) { c(node.test, st, "Expression"); }
|
||||
for (var i = 0, list = node.consequent; i < list.length; i += 1)
|
||||
{
|
||||
var cons = list[i];
|
||||
|
||||
c(cons, st, "Statement");
|
||||
}
|
||||
};
|
||||
base.ReturnStatement = base.YieldExpression = base.AwaitExpression = function (node, st, c) {
|
||||
if (node.argument) { c(node.argument, st, "Expression"); }
|
||||
};
|
||||
base.ThrowStatement = base.SpreadElement =
|
||||
function (node, st, c) { return c(node.argument, st, "Expression"); };
|
||||
base.TryStatement = function (node, st, c) {
|
||||
c(node.block, st, "Statement");
|
||||
if (node.handler) { c(node.handler, st); }
|
||||
if (node.finalizer) { c(node.finalizer, st, "Statement"); }
|
||||
};
|
||||
base.CatchClause = function (node, st, c) {
|
||||
if (node.param) { c(node.param, st, "Pattern"); }
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.WhileStatement = base.DoWhileStatement = function (node, st, c) {
|
||||
c(node.test, st, "Expression");
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.ForStatement = function (node, st, c) {
|
||||
if (node.init) { c(node.init, st, "ForInit"); }
|
||||
if (node.test) { c(node.test, st, "Expression"); }
|
||||
if (node.update) { c(node.update, st, "Expression"); }
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.ForInStatement = base.ForOfStatement = function (node, st, c) {
|
||||
c(node.left, st, "ForInit");
|
||||
c(node.right, st, "Expression");
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.ForInit = function (node, st, c) {
|
||||
if (node.type === "VariableDeclaration") { c(node, st); }
|
||||
else { c(node, st, "Expression"); }
|
||||
};
|
||||
base.DebuggerStatement = ignore;
|
||||
|
||||
base.FunctionDeclaration = function (node, st, c) { return c(node, st, "Function"); };
|
||||
base.VariableDeclaration = function (node, st, c) {
|
||||
for (var i = 0, list = node.declarations; i < list.length; i += 1)
|
||||
{
|
||||
var decl = list[i];
|
||||
|
||||
c(decl, st);
|
||||
}
|
||||
};
|
||||
base.VariableDeclarator = function (node, st, c) {
|
||||
c(node.id, st, "Pattern");
|
||||
if (node.init) { c(node.init, st, "Expression"); }
|
||||
};
|
||||
|
||||
base.Function = function (node, st, c) {
|
||||
if (node.id) { c(node.id, st, "Pattern"); }
|
||||
for (var i = 0, list = node.params; i < list.length; i += 1)
|
||||
{
|
||||
var param = list[i];
|
||||
|
||||
c(param, st, "Pattern");
|
||||
}
|
||||
c(node.body, st, node.expression ? "Expression" : "Statement");
|
||||
};
|
||||
|
||||
base.Pattern = function (node, st, c) {
|
||||
if (node.type === "Identifier")
|
||||
{ c(node, st, "VariablePattern"); }
|
||||
else if (node.type === "MemberExpression")
|
||||
{ c(node, st, "MemberPattern"); }
|
||||
else
|
||||
{ c(node, st); }
|
||||
};
|
||||
base.VariablePattern = ignore;
|
||||
base.MemberPattern = skipThrough;
|
||||
base.RestElement = function (node, st, c) { return c(node.argument, st, "Pattern"); };
|
||||
base.ArrayPattern = function (node, st, c) {
|
||||
for (var i = 0, list = node.elements; i < list.length; i += 1) {
|
||||
var elt = list[i];
|
||||
|
||||
if (elt) { c(elt, st, "Pattern"); }
|
||||
}
|
||||
};
|
||||
base.ObjectPattern = function (node, st, c) {
|
||||
for (var i = 0, list = node.properties; i < list.length; i += 1) {
|
||||
var prop = list[i];
|
||||
|
||||
if (prop.type === "Property") {
|
||||
if (prop.computed) { c(prop.key, st, "Expression"); }
|
||||
c(prop.value, st, "Pattern");
|
||||
} else if (prop.type === "RestElement") {
|
||||
c(prop.argument, st, "Pattern");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
base.Expression = skipThrough;
|
||||
base.ThisExpression = base.Super = base.MetaProperty = ignore;
|
||||
base.ArrayExpression = function (node, st, c) {
|
||||
for (var i = 0, list = node.elements; i < list.length; i += 1) {
|
||||
var elt = list[i];
|
||||
|
||||
if (elt) { c(elt, st, "Expression"); }
|
||||
}
|
||||
};
|
||||
base.ObjectExpression = function (node, st, c) {
|
||||
for (var i = 0, list = node.properties; i < list.length; i += 1)
|
||||
{
|
||||
var prop = list[i];
|
||||
|
||||
c(prop, st);
|
||||
}
|
||||
};
|
||||
base.FunctionExpression = base.ArrowFunctionExpression = base.FunctionDeclaration;
|
||||
base.SequenceExpression = function (node, st, c) {
|
||||
for (var i = 0, list = node.expressions; i < list.length; i += 1)
|
||||
{
|
||||
var expr = list[i];
|
||||
|
||||
c(expr, st, "Expression");
|
||||
}
|
||||
};
|
||||
base.TemplateLiteral = function (node, st, c) {
|
||||
for (var i = 0, list = node.quasis; i < list.length; i += 1)
|
||||
{
|
||||
var quasi = list[i];
|
||||
|
||||
c(quasi, st);
|
||||
}
|
||||
|
||||
for (var i$1 = 0, list$1 = node.expressions; i$1 < list$1.length; i$1 += 1)
|
||||
{
|
||||
var expr = list$1[i$1];
|
||||
|
||||
c(expr, st, "Expression");
|
||||
}
|
||||
};
|
||||
base.TemplateElement = ignore;
|
||||
base.UnaryExpression = base.UpdateExpression = function (node, st, c) {
|
||||
c(node.argument, st, "Expression");
|
||||
};
|
||||
base.BinaryExpression = base.LogicalExpression = function (node, st, c) {
|
||||
c(node.left, st, "Expression");
|
||||
c(node.right, st, "Expression");
|
||||
};
|
||||
base.AssignmentExpression = base.AssignmentPattern = function (node, st, c) {
|
||||
c(node.left, st, "Pattern");
|
||||
c(node.right, st, "Expression");
|
||||
};
|
||||
base.ConditionalExpression = function (node, st, c) {
|
||||
c(node.test, st, "Expression");
|
||||
c(node.consequent, st, "Expression");
|
||||
c(node.alternate, st, "Expression");
|
||||
};
|
||||
base.NewExpression = base.CallExpression = function (node, st, c) {
|
||||
c(node.callee, st, "Expression");
|
||||
if (node.arguments)
|
||||
{ for (var i = 0, list = node.arguments; i < list.length; i += 1)
|
||||
{
|
||||
var arg = list[i];
|
||||
|
||||
c(arg, st, "Expression");
|
||||
} }
|
||||
};
|
||||
base.MemberExpression = function (node, st, c) {
|
||||
c(node.object, st, "Expression");
|
||||
if (node.computed) { c(node.property, st, "Expression"); }
|
||||
};
|
||||
base.ExportNamedDeclaration = base.ExportDefaultDeclaration = function (node, st, c) {
|
||||
if (node.declaration)
|
||||
{ c(node.declaration, st, node.type === "ExportNamedDeclaration" || node.declaration.id ? "Statement" : "Expression"); }
|
||||
if (node.source) { c(node.source, st, "Expression"); }
|
||||
};
|
||||
base.ExportAllDeclaration = function (node, st, c) {
|
||||
c(node.source, st, "Expression");
|
||||
};
|
||||
base.ImportDeclaration = function (node, st, c) {
|
||||
for (var i = 0, list = node.specifiers; i < list.length; i += 1)
|
||||
{
|
||||
var spec = list[i];
|
||||
|
||||
c(spec, st);
|
||||
}
|
||||
c(node.source, st, "Expression");
|
||||
};
|
||||
base.ImportExpression = function (node, st, c) {
|
||||
c(node.source, st, "Expression");
|
||||
};
|
||||
base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.Literal = ignore;
|
||||
|
||||
base.TaggedTemplateExpression = function (node, st, c) {
|
||||
c(node.tag, st, "Expression");
|
||||
c(node.quasi, st, "Expression");
|
||||
};
|
||||
base.ClassDeclaration = base.ClassExpression = function (node, st, c) { return c(node, st, "Class"); };
|
||||
base.Class = function (node, st, c) {
|
||||
if (node.id) { c(node.id, st, "Pattern"); }
|
||||
if (node.superClass) { c(node.superClass, st, "Expression"); }
|
||||
c(node.body, st);
|
||||
};
|
||||
base.ClassBody = function (node, st, c) {
|
||||
for (var i = 0, list = node.body; i < list.length; i += 1)
|
||||
{
|
||||
var elt = list[i];
|
||||
|
||||
c(elt, st);
|
||||
}
|
||||
};
|
||||
base.MethodDefinition = base.Property = function (node, st, c) {
|
||||
if (node.computed) { c(node.key, st, "Expression"); }
|
||||
c(node.value, st, "Expression");
|
||||
};
|
||||
|
||||
exports.ancestor = ancestor;
|
||||
exports.base = base;
|
||||
exports.findNodeAfter = findNodeAfter;
|
||||
exports.findNodeAround = findNodeAround;
|
||||
exports.findNodeAt = findNodeAt;
|
||||
exports.findNodeBefore = findNodeBefore;
|
||||
exports.full = full;
|
||||
exports.fullAncestor = fullAncestor;
|
||||
exports.make = make;
|
||||
exports.recursive = recursive;
|
||||
exports.simple = simple;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
1
node_modules/ava/node_modules/acorn-walk/dist/walk.js.map
generated
vendored
1
node_modules/ava/node_modules/acorn-walk/dist/walk.js.map
generated
vendored
File diff suppressed because one or more lines are too long
441
node_modules/ava/node_modules/acorn-walk/dist/walk.mjs
generated
vendored
441
node_modules/ava/node_modules/acorn-walk/dist/walk.mjs
generated
vendored
|
|
@ -1,441 +0,0 @@
|
|||
// AST walker module for Mozilla Parser API compatible trees
|
||||
|
||||
// A simple walk is one where you simply specify callbacks to be
|
||||
// called on specific nodes. The last two arguments are optional. A
|
||||
// simple use would be
|
||||
//
|
||||
// walk.simple(myTree, {
|
||||
// Expression: function(node) { ... }
|
||||
// });
|
||||
//
|
||||
// to do something with all expressions. All Parser API node types
|
||||
// can be used to identify node types, as well as Expression and
|
||||
// Statement, which denote categories of nodes.
|
||||
//
|
||||
// The base argument can be used to pass a custom (recursive)
|
||||
// walker, and state can be used to give this walked an initial
|
||||
// state.
|
||||
|
||||
function simple(node, visitors, baseVisitor, state, override) {
|
||||
if (!baseVisitor) { baseVisitor = base
|
||||
; }(function c(node, st, override) {
|
||||
var type = override || node.type, found = visitors[type];
|
||||
baseVisitor[type](node, st, c);
|
||||
if (found) { found(node, st); }
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
// An ancestor walk keeps an array of ancestor nodes (including the
|
||||
// current node) and passes them to the callback as third parameter
|
||||
// (and also as state parameter when no other state is present).
|
||||
function ancestor(node, visitors, baseVisitor, state, override) {
|
||||
var ancestors = [];
|
||||
if (!baseVisitor) { baseVisitor = base
|
||||
; }(function c(node, st, override) {
|
||||
var type = override || node.type, found = visitors[type];
|
||||
var isNew = node !== ancestors[ancestors.length - 1];
|
||||
if (isNew) { ancestors.push(node); }
|
||||
baseVisitor[type](node, st, c);
|
||||
if (found) { found(node, st || ancestors, ancestors); }
|
||||
if (isNew) { ancestors.pop(); }
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
// A recursive walk is one where your functions override the default
|
||||
// walkers. They can modify and replace the state parameter that's
|
||||
// threaded through the walk, and can opt how and whether to walk
|
||||
// their child nodes (by calling their third argument on these
|
||||
// nodes).
|
||||
function recursive(node, state, funcs, baseVisitor, override) {
|
||||
var visitor = funcs ? make(funcs, baseVisitor || undefined) : baseVisitor
|
||||
;(function c(node, st, override) {
|
||||
visitor[override || node.type](node, st, c);
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
function makeTest(test) {
|
||||
if (typeof test === "string")
|
||||
{ return function (type) { return type === test; } }
|
||||
else if (!test)
|
||||
{ return function () { return true; } }
|
||||
else
|
||||
{ return test }
|
||||
}
|
||||
|
||||
var Found = function Found(node, state) { this.node = node; this.state = state; };
|
||||
|
||||
// A full walk triggers the callback on each node
|
||||
function full(node, callback, baseVisitor, state, override) {
|
||||
if (!baseVisitor) { baseVisitor = base
|
||||
; }(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
baseVisitor[type](node, st, c);
|
||||
if (!override) { callback(node, st, type); }
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
// An fullAncestor walk is like an ancestor walk, but triggers
|
||||
// the callback on each node
|
||||
function fullAncestor(node, callback, baseVisitor, state) {
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
var ancestors = []
|
||||
;(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
var isNew = node !== ancestors[ancestors.length - 1];
|
||||
if (isNew) { ancestors.push(node); }
|
||||
baseVisitor[type](node, st, c);
|
||||
if (!override) { callback(node, st || ancestors, ancestors, type); }
|
||||
if (isNew) { ancestors.pop(); }
|
||||
})(node, state);
|
||||
}
|
||||
|
||||
// Find a node with a given start, end, and type (all are optional,
|
||||
// null can be used as wildcard). Returns a {node, state} object, or
|
||||
// undefined when it doesn't find a matching node.
|
||||
function findNodeAt(node, start, end, test, baseVisitor, state) {
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
test = makeTest(test);
|
||||
try {
|
||||
(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
if ((start == null || node.start <= start) &&
|
||||
(end == null || node.end >= end))
|
||||
{ baseVisitor[type](node, st, c); }
|
||||
if ((start == null || node.start === start) &&
|
||||
(end == null || node.end === end) &&
|
||||
test(type, node))
|
||||
{ throw new Found(node, st) }
|
||||
})(node, state);
|
||||
} catch (e) {
|
||||
if (e instanceof Found) { return e }
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
// Find the innermost node of a given type that contains the given
|
||||
// position. Interface similar to findNodeAt.
|
||||
function findNodeAround(node, pos, test, baseVisitor, state) {
|
||||
test = makeTest(test);
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
try {
|
||||
(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
if (node.start > pos || node.end < pos) { return }
|
||||
baseVisitor[type](node, st, c);
|
||||
if (test(type, node)) { throw new Found(node, st) }
|
||||
})(node, state);
|
||||
} catch (e) {
|
||||
if (e instanceof Found) { return e }
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
// Find the outermost matching node after a given position.
|
||||
function findNodeAfter(node, pos, test, baseVisitor, state) {
|
||||
test = makeTest(test);
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
try {
|
||||
(function c(node, st, override) {
|
||||
if (node.end < pos) { return }
|
||||
var type = override || node.type;
|
||||
if (node.start >= pos && test(type, node)) { throw new Found(node, st) }
|
||||
baseVisitor[type](node, st, c);
|
||||
})(node, state);
|
||||
} catch (e) {
|
||||
if (e instanceof Found) { return e }
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
// Find the outermost matching node before a given position.
|
||||
function findNodeBefore(node, pos, test, baseVisitor, state) {
|
||||
test = makeTest(test);
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
var max
|
||||
;(function c(node, st, override) {
|
||||
if (node.start > pos) { return }
|
||||
var type = override || node.type;
|
||||
if (node.end <= pos && (!max || max.node.end < node.end) && test(type, node))
|
||||
{ max = new Found(node, st); }
|
||||
baseVisitor[type](node, st, c);
|
||||
})(node, state);
|
||||
return max
|
||||
}
|
||||
|
||||
// Fallback to an Object.create polyfill for older environments.
|
||||
var create = Object.create || function(proto) {
|
||||
function Ctor() {}
|
||||
Ctor.prototype = proto;
|
||||
return new Ctor
|
||||
};
|
||||
|
||||
// Used to create a custom walker. Will fill in all missing node
|
||||
// type properties with the defaults.
|
||||
function make(funcs, baseVisitor) {
|
||||
var visitor = create(baseVisitor || base);
|
||||
for (var type in funcs) { visitor[type] = funcs[type]; }
|
||||
return visitor
|
||||
}
|
||||
|
||||
function skipThrough(node, st, c) { c(node, st); }
|
||||
function ignore(_node, _st, _c) {}
|
||||
|
||||
// Node walkers.
|
||||
|
||||
var base = {};
|
||||
|
||||
base.Program = base.BlockStatement = function (node, st, c) {
|
||||
for (var i = 0, list = node.body; i < list.length; i += 1)
|
||||
{
|
||||
var stmt = list[i];
|
||||
|
||||
c(stmt, st, "Statement");
|
||||
}
|
||||
};
|
||||
base.Statement = skipThrough;
|
||||
base.EmptyStatement = ignore;
|
||||
base.ExpressionStatement = base.ParenthesizedExpression =
|
||||
function (node, st, c) { return c(node.expression, st, "Expression"); };
|
||||
base.IfStatement = function (node, st, c) {
|
||||
c(node.test, st, "Expression");
|
||||
c(node.consequent, st, "Statement");
|
||||
if (node.alternate) { c(node.alternate, st, "Statement"); }
|
||||
};
|
||||
base.LabeledStatement = function (node, st, c) { return c(node.body, st, "Statement"); };
|
||||
base.BreakStatement = base.ContinueStatement = ignore;
|
||||
base.WithStatement = function (node, st, c) {
|
||||
c(node.object, st, "Expression");
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.SwitchStatement = function (node, st, c) {
|
||||
c(node.discriminant, st, "Expression");
|
||||
for (var i$1 = 0, list$1 = node.cases; i$1 < list$1.length; i$1 += 1) {
|
||||
var cs = list$1[i$1];
|
||||
|
||||
if (cs.test) { c(cs.test, st, "Expression"); }
|
||||
for (var i = 0, list = cs.consequent; i < list.length; i += 1)
|
||||
{
|
||||
var cons = list[i];
|
||||
|
||||
c(cons, st, "Statement");
|
||||
}
|
||||
}
|
||||
};
|
||||
base.SwitchCase = function (node, st, c) {
|
||||
if (node.test) { c(node.test, st, "Expression"); }
|
||||
for (var i = 0, list = node.consequent; i < list.length; i += 1)
|
||||
{
|
||||
var cons = list[i];
|
||||
|
||||
c(cons, st, "Statement");
|
||||
}
|
||||
};
|
||||
base.ReturnStatement = base.YieldExpression = base.AwaitExpression = function (node, st, c) {
|
||||
if (node.argument) { c(node.argument, st, "Expression"); }
|
||||
};
|
||||
base.ThrowStatement = base.SpreadElement =
|
||||
function (node, st, c) { return c(node.argument, st, "Expression"); };
|
||||
base.TryStatement = function (node, st, c) {
|
||||
c(node.block, st, "Statement");
|
||||
if (node.handler) { c(node.handler, st); }
|
||||
if (node.finalizer) { c(node.finalizer, st, "Statement"); }
|
||||
};
|
||||
base.CatchClause = function (node, st, c) {
|
||||
if (node.param) { c(node.param, st, "Pattern"); }
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.WhileStatement = base.DoWhileStatement = function (node, st, c) {
|
||||
c(node.test, st, "Expression");
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.ForStatement = function (node, st, c) {
|
||||
if (node.init) { c(node.init, st, "ForInit"); }
|
||||
if (node.test) { c(node.test, st, "Expression"); }
|
||||
if (node.update) { c(node.update, st, "Expression"); }
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.ForInStatement = base.ForOfStatement = function (node, st, c) {
|
||||
c(node.left, st, "ForInit");
|
||||
c(node.right, st, "Expression");
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.ForInit = function (node, st, c) {
|
||||
if (node.type === "VariableDeclaration") { c(node, st); }
|
||||
else { c(node, st, "Expression"); }
|
||||
};
|
||||
base.DebuggerStatement = ignore;
|
||||
|
||||
base.FunctionDeclaration = function (node, st, c) { return c(node, st, "Function"); };
|
||||
base.VariableDeclaration = function (node, st, c) {
|
||||
for (var i = 0, list = node.declarations; i < list.length; i += 1)
|
||||
{
|
||||
var decl = list[i];
|
||||
|
||||
c(decl, st);
|
||||
}
|
||||
};
|
||||
base.VariableDeclarator = function (node, st, c) {
|
||||
c(node.id, st, "Pattern");
|
||||
if (node.init) { c(node.init, st, "Expression"); }
|
||||
};
|
||||
|
||||
base.Function = function (node, st, c) {
|
||||
if (node.id) { c(node.id, st, "Pattern"); }
|
||||
for (var i = 0, list = node.params; i < list.length; i += 1)
|
||||
{
|
||||
var param = list[i];
|
||||
|
||||
c(param, st, "Pattern");
|
||||
}
|
||||
c(node.body, st, node.expression ? "Expression" : "Statement");
|
||||
};
|
||||
|
||||
base.Pattern = function (node, st, c) {
|
||||
if (node.type === "Identifier")
|
||||
{ c(node, st, "VariablePattern"); }
|
||||
else if (node.type === "MemberExpression")
|
||||
{ c(node, st, "MemberPattern"); }
|
||||
else
|
||||
{ c(node, st); }
|
||||
};
|
||||
base.VariablePattern = ignore;
|
||||
base.MemberPattern = skipThrough;
|
||||
base.RestElement = function (node, st, c) { return c(node.argument, st, "Pattern"); };
|
||||
base.ArrayPattern = function (node, st, c) {
|
||||
for (var i = 0, list = node.elements; i < list.length; i += 1) {
|
||||
var elt = list[i];
|
||||
|
||||
if (elt) { c(elt, st, "Pattern"); }
|
||||
}
|
||||
};
|
||||
base.ObjectPattern = function (node, st, c) {
|
||||
for (var i = 0, list = node.properties; i < list.length; i += 1) {
|
||||
var prop = list[i];
|
||||
|
||||
if (prop.type === "Property") {
|
||||
if (prop.computed) { c(prop.key, st, "Expression"); }
|
||||
c(prop.value, st, "Pattern");
|
||||
} else if (prop.type === "RestElement") {
|
||||
c(prop.argument, st, "Pattern");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
base.Expression = skipThrough;
|
||||
base.ThisExpression = base.Super = base.MetaProperty = ignore;
|
||||
base.ArrayExpression = function (node, st, c) {
|
||||
for (var i = 0, list = node.elements; i < list.length; i += 1) {
|
||||
var elt = list[i];
|
||||
|
||||
if (elt) { c(elt, st, "Expression"); }
|
||||
}
|
||||
};
|
||||
base.ObjectExpression = function (node, st, c) {
|
||||
for (var i = 0, list = node.properties; i < list.length; i += 1)
|
||||
{
|
||||
var prop = list[i];
|
||||
|
||||
c(prop, st);
|
||||
}
|
||||
};
|
||||
base.FunctionExpression = base.ArrowFunctionExpression = base.FunctionDeclaration;
|
||||
base.SequenceExpression = function (node, st, c) {
|
||||
for (var i = 0, list = node.expressions; i < list.length; i += 1)
|
||||
{
|
||||
var expr = list[i];
|
||||
|
||||
c(expr, st, "Expression");
|
||||
}
|
||||
};
|
||||
base.TemplateLiteral = function (node, st, c) {
|
||||
for (var i = 0, list = node.quasis; i < list.length; i += 1)
|
||||
{
|
||||
var quasi = list[i];
|
||||
|
||||
c(quasi, st);
|
||||
}
|
||||
|
||||
for (var i$1 = 0, list$1 = node.expressions; i$1 < list$1.length; i$1 += 1)
|
||||
{
|
||||
var expr = list$1[i$1];
|
||||
|
||||
c(expr, st, "Expression");
|
||||
}
|
||||
};
|
||||
base.TemplateElement = ignore;
|
||||
base.UnaryExpression = base.UpdateExpression = function (node, st, c) {
|
||||
c(node.argument, st, "Expression");
|
||||
};
|
||||
base.BinaryExpression = base.LogicalExpression = function (node, st, c) {
|
||||
c(node.left, st, "Expression");
|
||||
c(node.right, st, "Expression");
|
||||
};
|
||||
base.AssignmentExpression = base.AssignmentPattern = function (node, st, c) {
|
||||
c(node.left, st, "Pattern");
|
||||
c(node.right, st, "Expression");
|
||||
};
|
||||
base.ConditionalExpression = function (node, st, c) {
|
||||
c(node.test, st, "Expression");
|
||||
c(node.consequent, st, "Expression");
|
||||
c(node.alternate, st, "Expression");
|
||||
};
|
||||
base.NewExpression = base.CallExpression = function (node, st, c) {
|
||||
c(node.callee, st, "Expression");
|
||||
if (node.arguments)
|
||||
{ for (var i = 0, list = node.arguments; i < list.length; i += 1)
|
||||
{
|
||||
var arg = list[i];
|
||||
|
||||
c(arg, st, "Expression");
|
||||
} }
|
||||
};
|
||||
base.MemberExpression = function (node, st, c) {
|
||||
c(node.object, st, "Expression");
|
||||
if (node.computed) { c(node.property, st, "Expression"); }
|
||||
};
|
||||
base.ExportNamedDeclaration = base.ExportDefaultDeclaration = function (node, st, c) {
|
||||
if (node.declaration)
|
||||
{ c(node.declaration, st, node.type === "ExportNamedDeclaration" || node.declaration.id ? "Statement" : "Expression"); }
|
||||
if (node.source) { c(node.source, st, "Expression"); }
|
||||
};
|
||||
base.ExportAllDeclaration = function (node, st, c) {
|
||||
c(node.source, st, "Expression");
|
||||
};
|
||||
base.ImportDeclaration = function (node, st, c) {
|
||||
for (var i = 0, list = node.specifiers; i < list.length; i += 1)
|
||||
{
|
||||
var spec = list[i];
|
||||
|
||||
c(spec, st);
|
||||
}
|
||||
c(node.source, st, "Expression");
|
||||
};
|
||||
base.ImportExpression = function (node, st, c) {
|
||||
c(node.source, st, "Expression");
|
||||
};
|
||||
base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.Literal = ignore;
|
||||
|
||||
base.TaggedTemplateExpression = function (node, st, c) {
|
||||
c(node.tag, st, "Expression");
|
||||
c(node.quasi, st, "Expression");
|
||||
};
|
||||
base.ClassDeclaration = base.ClassExpression = function (node, st, c) { return c(node, st, "Class"); };
|
||||
base.Class = function (node, st, c) {
|
||||
if (node.id) { c(node.id, st, "Pattern"); }
|
||||
if (node.superClass) { c(node.superClass, st, "Expression"); }
|
||||
c(node.body, st);
|
||||
};
|
||||
base.ClassBody = function (node, st, c) {
|
||||
for (var i = 0, list = node.body; i < list.length; i += 1)
|
||||
{
|
||||
var elt = list[i];
|
||||
|
||||
c(elt, st);
|
||||
}
|
||||
};
|
||||
base.MethodDefinition = base.Property = function (node, st, c) {
|
||||
if (node.computed) { c(node.key, st, "Expression"); }
|
||||
c(node.value, st, "Expression");
|
||||
};
|
||||
|
||||
export { ancestor, base, findNodeAfter, findNodeAround, findNodeAt, findNodeBefore, full, fullAncestor, make, recursive, simple };
|
||||
1
node_modules/ava/node_modules/acorn-walk/dist/walk.mjs.map
generated
vendored
1
node_modules/ava/node_modules/acorn-walk/dist/walk.mjs.map
generated
vendored
File diff suppressed because one or more lines are too long
764
node_modules/ava/node_modules/acorn/CHANGELOG.md
generated
vendored
Normal file
764
node_modules/ava/node_modules/acorn/CHANGELOG.md
generated
vendored
Normal file
|
|
@ -0,0 +1,764 @@
|
|||
## 8.5.0 (2021-09-06)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Improve context-dependent tokenization in a number of corner cases.
|
||||
|
||||
Fix location tracking after a 0x2028 or 0x2029 character in a string literal (which before did not increase the line number).
|
||||
|
||||
Fix an issue where arrow function bodies in for loop context would inappropriately consume `in` operators.
|
||||
|
||||
Fix wrong end locations stored on SequenceExpression nodes.
|
||||
|
||||
Implement restriction that `for`/`of` loop LHS can't start with `let`.
|
||||
|
||||
### New features
|
||||
|
||||
Add support for ES2022 class static blocks.
|
||||
|
||||
Allow multiple input files to be passed to the CLI tool.
|
||||
|
||||
## 8.4.1 (2021-06-24)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a bug where `allowAwaitOutsideFunction` would allow `await` in class field initializers, and setting `ecmaVersion` to 13 or higher would allow top-level await in non-module sources.
|
||||
|
||||
## 8.4.0 (2021-06-11)
|
||||
|
||||
### New features
|
||||
|
||||
A new option, `allowSuperOutsideMethod`, can be used to suppress the error when `super` is used in the wrong context.
|
||||
|
||||
## 8.3.0 (2021-05-31)
|
||||
|
||||
### New features
|
||||
|
||||
Default `allowAwaitOutsideFunction` to true for ECMAScript 2022 an higher.
|
||||
|
||||
Add support for the `p` ([indices](https://github.com/tc39/proposal-regexp-match-indices)) regexp flag.
|
||||
|
||||
## 8.2.4 (2021-05-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix spec conformity in corner case 'for await (async of ...)'.
|
||||
|
||||
## 8.2.3 (2021-05-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix an issue where the library couldn't parse 'for (async of ...)'.
|
||||
|
||||
Fix a bug in UTF-16 decoding that would read characters incorrectly in some circumstances.
|
||||
|
||||
## 8.2.2 (2021-04-29)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a bug where a class field initialized to an async arrow function wouldn't allow await inside it. Same issue existed for generator arrow functions with yield.
|
||||
|
||||
## 8.2.1 (2021-04-24)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a regression introduced in 8.2.0 where static or async class methods with keyword names fail to parse.
|
||||
|
||||
## 8.2.0 (2021-04-24)
|
||||
|
||||
### New features
|
||||
|
||||
Add support for ES2022 class fields and private methods.
|
||||
|
||||
## 8.1.1 (2021-04-12)
|
||||
|
||||
### Various
|
||||
|
||||
Stop shipping source maps in the NPM package.
|
||||
|
||||
## 8.1.0 (2021-03-09)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a spurious error in nested destructuring arrays.
|
||||
|
||||
### New features
|
||||
|
||||
Expose `allowAwaitOutsideFunction` in CLI interface.
|
||||
|
||||
Make `allowImportExportAnywhere` also apply to `import.meta`.
|
||||
|
||||
## 8.0.5 (2021-01-25)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Adjust package.json to work with Node 12.16.0 and 13.0-13.6.
|
||||
|
||||
## 8.0.4 (2020-10-05)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Make `await x ** y` an error, following the spec.
|
||||
|
||||
Fix potentially exponential regular expression.
|
||||
|
||||
## 8.0.3 (2020-10-02)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a wasteful loop during `Parser` creation when setting `ecmaVersion` to `"latest"`.
|
||||
|
||||
## 8.0.2 (2020-09-30)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Make the TypeScript types reflect the current allowed values for `ecmaVersion`.
|
||||
|
||||
Fix another regexp/division tokenizer issue.
|
||||
|
||||
## 8.0.1 (2020-08-12)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Provide the correct value in the `version` export.
|
||||
|
||||
## 8.0.0 (2020-08-12)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Disallow expressions like `(a = b) = c`.
|
||||
|
||||
Make non-octal escape sequences a syntax error in strict mode.
|
||||
|
||||
### New features
|
||||
|
||||
The package can now be loaded directly as an ECMAScript module in node 13+.
|
||||
|
||||
Update to the set of Unicode properties from ES2021.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
The `ecmaVersion` option is now required. For the moment, omitting it will still work with a warning, but that will change in a future release.
|
||||
|
||||
Some changes to method signatures that may be used by plugins.
|
||||
|
||||
## 7.4.0 (2020-08-03)
|
||||
|
||||
### New features
|
||||
|
||||
Add support for logical assignment operators.
|
||||
|
||||
Add support for numeric separators.
|
||||
|
||||
## 7.3.1 (2020-06-11)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Make the string in the `version` export match the actual library version.
|
||||
|
||||
## 7.3.0 (2020-06-11)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a bug that caused parsing of object patterns with a property named `set` that had a default value to fail.
|
||||
|
||||
### New features
|
||||
|
||||
Add support for optional chaining (`?.`).
|
||||
|
||||
## 7.2.0 (2020-05-09)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix precedence issue in parsing of async arrow functions.
|
||||
|
||||
### New features
|
||||
|
||||
Add support for nullish coalescing.
|
||||
|
||||
Add support for `import.meta`.
|
||||
|
||||
Support `export * as ...` syntax.
|
||||
|
||||
Upgrade to Unicode 13.
|
||||
|
||||
## 6.4.1 (2020-03-09)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
More carefully check for valid UTF16 surrogate pairs in regexp validator.
|
||||
|
||||
## 7.1.1 (2020-03-01)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Treat `\8` and `\9` as invalid escapes in template strings.
|
||||
|
||||
Allow unicode escapes in property names that are keywords.
|
||||
|
||||
Don't error on an exponential operator expression as argument to `await`.
|
||||
|
||||
More carefully check for valid UTF16 surrogate pairs in regexp validator.
|
||||
|
||||
## 7.1.0 (2019-09-24)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Disallow trailing object literal commas when ecmaVersion is less than 5.
|
||||
|
||||
### New features
|
||||
|
||||
Add a static `acorn` property to the `Parser` class that contains the entire module interface, to allow plugins to access the instance of the library that they are acting on.
|
||||
|
||||
## 7.0.0 (2019-08-13)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
Changes the node format for dynamic imports to use the `ImportExpression` node type, as defined in [ESTree](https://github.com/estree/estree/blob/master/es2020.md#importexpression).
|
||||
|
||||
Makes 10 (ES2019) the default value for the `ecmaVersion` option.
|
||||
|
||||
## 6.3.0 (2019-08-12)
|
||||
|
||||
### New features
|
||||
|
||||
`sourceType: "module"` can now be used even when `ecmaVersion` is less than 6, to parse module-style code that otherwise conforms to an older standard.
|
||||
|
||||
## 6.2.1 (2019-07-21)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix bug causing Acorn to treat some characters as identifier characters that shouldn't be treated as such.
|
||||
|
||||
Fix issue where setting the `allowReserved` option to `"never"` allowed reserved words in some circumstances.
|
||||
|
||||
## 6.2.0 (2019-07-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Improve valid assignment checking in `for`/`in` and `for`/`of` loops.
|
||||
|
||||
Disallow binding `let` in patterns.
|
||||
|
||||
### New features
|
||||
|
||||
Support bigint syntax with `ecmaVersion` >= 11.
|
||||
|
||||
Support dynamic `import` syntax with `ecmaVersion` >= 11.
|
||||
|
||||
Upgrade to Unicode version 12.
|
||||
|
||||
## 6.1.1 (2019-02-27)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix bug that caused parsing default exports of with names to fail.
|
||||
|
||||
## 6.1.0 (2019-02-08)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix scope checking when redefining a `var` as a lexical binding.
|
||||
|
||||
### New features
|
||||
|
||||
Split up `parseSubscripts` to use an internal `parseSubscript` method to make it easier to extend with plugins.
|
||||
|
||||
## 6.0.7 (2019-02-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Check that exported bindings are defined.
|
||||
|
||||
Don't treat `\u180e` as a whitespace character.
|
||||
|
||||
Check for duplicate parameter names in methods.
|
||||
|
||||
Don't allow shorthand properties when they are generators or async methods.
|
||||
|
||||
Forbid binding `await` in async arrow function's parameter list.
|
||||
|
||||
## 6.0.6 (2019-01-30)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
The content of class declarations and expressions is now always parsed in strict mode.
|
||||
|
||||
Don't allow `let` or `const` to bind the variable name `let`.
|
||||
|
||||
Treat class declarations as lexical.
|
||||
|
||||
Don't allow a generator function declaration as the sole body of an `if` or `else`.
|
||||
|
||||
Ignore `"use strict"` when after an empty statement.
|
||||
|
||||
Allow string line continuations with special line terminator characters.
|
||||
|
||||
Treat `for` bodies as part of the `for` scope when checking for conflicting bindings.
|
||||
|
||||
Fix bug with parsing `yield` in a `for` loop initializer.
|
||||
|
||||
Implement special cases around scope checking for functions.
|
||||
|
||||
## 6.0.5 (2019-01-02)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix TypeScript type for `Parser.extend` and add `allowAwaitOutsideFunction` to options type.
|
||||
|
||||
Don't treat `let` as a keyword when the next token is `{` on the next line.
|
||||
|
||||
Fix bug that broke checking for parentheses around an object pattern in a destructuring assignment when `preserveParens` was on.
|
||||
|
||||
## 6.0.4 (2018-11-05)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Further improvements to tokenizing regular expressions in corner cases.
|
||||
|
||||
## 6.0.3 (2018-11-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix bug in tokenizing an expression-less return followed by a function followed by a regular expression.
|
||||
|
||||
Remove stray symlink in the package tarball.
|
||||
|
||||
## 6.0.2 (2018-09-26)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix bug where default expressions could fail to parse inside an object destructuring assignment expression.
|
||||
|
||||
## 6.0.1 (2018-09-14)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix wrong value in `version` export.
|
||||
|
||||
## 6.0.0 (2018-09-14)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Better handle variable-redefinition checks for catch bindings and functions directly under if statements.
|
||||
|
||||
Forbid `new.target` in top-level arrow functions.
|
||||
|
||||
Fix issue with parsing a regexp after `yield` in some contexts.
|
||||
|
||||
### New features
|
||||
|
||||
The package now comes with TypeScript definitions.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
The default value of the `ecmaVersion` option is now 9 (2018).
|
||||
|
||||
Plugins work differently, and will have to be rewritten to work with this version.
|
||||
|
||||
The loose parser and walker have been moved into separate packages (`acorn-loose` and `acorn-walk`).
|
||||
|
||||
## 5.7.3 (2018-09-10)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix failure to tokenize regexps after expressions like `x.of`.
|
||||
|
||||
Better error message for unterminated template literals.
|
||||
|
||||
## 5.7.2 (2018-08-24)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Properly handle `allowAwaitOutsideFunction` in for statements.
|
||||
|
||||
Treat function declarations at the top level of modules like let bindings.
|
||||
|
||||
Don't allow async function declarations as the only statement under a label.
|
||||
|
||||
## 5.7.0 (2018-06-15)
|
||||
|
||||
### New features
|
||||
|
||||
Upgraded to Unicode 11.
|
||||
|
||||
## 5.6.0 (2018-05-31)
|
||||
|
||||
### New features
|
||||
|
||||
Allow U+2028 and U+2029 in string when ECMAVersion >= 10.
|
||||
|
||||
Allow binding-less catch statements when ECMAVersion >= 10.
|
||||
|
||||
Add `allowAwaitOutsideFunction` option for parsing top-level `await`.
|
||||
|
||||
## 5.5.3 (2018-03-08)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
A _second_ republish of the code in 5.5.1, this time with yarn, to hopefully get valid timestamps.
|
||||
|
||||
## 5.5.2 (2018-03-08)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
A republish of the code in 5.5.1 in an attempt to solve an issue with the file timestamps in the npm package being 0.
|
||||
|
||||
## 5.5.1 (2018-03-06)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix misleading error message for octal escapes in template strings.
|
||||
|
||||
## 5.5.0 (2018-02-27)
|
||||
|
||||
### New features
|
||||
|
||||
The identifier character categorization is now based on Unicode version 10.
|
||||
|
||||
Acorn will now validate the content of regular expressions, including new ES9 features.
|
||||
|
||||
## 5.4.0 (2018-02-01)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Disallow duplicate or escaped flags on regular expressions.
|
||||
|
||||
Disallow octal escapes in strings in strict mode.
|
||||
|
||||
### New features
|
||||
|
||||
Add support for async iteration.
|
||||
|
||||
Add support for object spread and rest.
|
||||
|
||||
## 5.3.0 (2017-12-28)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix parsing of floating point literals with leading zeroes in loose mode.
|
||||
|
||||
Allow duplicate property names in object patterns.
|
||||
|
||||
Don't allow static class methods named `prototype`.
|
||||
|
||||
Disallow async functions directly under `if` or `else`.
|
||||
|
||||
Parse right-hand-side of `for`/`of` as an assignment expression.
|
||||
|
||||
Stricter parsing of `for`/`in`.
|
||||
|
||||
Don't allow unicode escapes in contextual keywords.
|
||||
|
||||
### New features
|
||||
|
||||
Parsing class members was factored into smaller methods to allow plugins to hook into it.
|
||||
|
||||
## 5.2.1 (2017-10-30)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a token context corruption bug.
|
||||
|
||||
## 5.2.0 (2017-10-30)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix token context tracking for `class` and `function` in property-name position.
|
||||
|
||||
Make sure `%*` isn't parsed as a valid operator.
|
||||
|
||||
Allow shorthand properties `get` and `set` to be followed by default values.
|
||||
|
||||
Disallow `super` when not in callee or object position.
|
||||
|
||||
### New features
|
||||
|
||||
Support [`directive` property](https://github.com/estree/estree/compare/b3de58c9997504d6fba04b72f76e6dd1619ee4eb...1da8e603237144f44710360f8feb7a9977e905e0) on directive expression statements.
|
||||
|
||||
## 5.1.2 (2017-09-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Disable parsing of legacy HTML-style comments in modules.
|
||||
|
||||
Fix parsing of async methods whose names are keywords.
|
||||
|
||||
## 5.1.1 (2017-07-06)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix problem with disambiguating regexp and division after a class.
|
||||
|
||||
## 5.1.0 (2017-07-05)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix tokenizing of regexps in an object-desctructuring `for`/`of` loop and after `yield`.
|
||||
|
||||
Parse zero-prefixed numbers with non-octal digits as decimal.
|
||||
|
||||
Allow object/array patterns in rest parameters.
|
||||
|
||||
Don't error when `yield` is used as a property name.
|
||||
|
||||
Allow `async` as a shorthand object property.
|
||||
|
||||
### New features
|
||||
|
||||
Implement the [template literal revision proposal](https://github.com/tc39/proposal-template-literal-revision) for ES9.
|
||||
|
||||
## 5.0.3 (2017-04-01)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix spurious duplicate variable definition errors for named functions.
|
||||
|
||||
## 5.0.2 (2017-03-30)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
A binary operator after a parenthesized arrow expression is no longer incorrectly treated as an error.
|
||||
|
||||
## 5.0.0 (2017-03-28)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Raise an error for duplicated lexical bindings.
|
||||
|
||||
Fix spurious error when an assignement expression occurred after a spread expression.
|
||||
|
||||
Accept regular expressions after `of` (in `for`/`of`), `yield` (in a generator), and braced arrow functions.
|
||||
|
||||
Allow labels in front or `var` declarations, even in strict mode.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
Parse declarations following `export default` as declaration nodes, not expressions. This means that class and function declarations nodes can now have `null` as their `id`.
|
||||
|
||||
## 4.0.11 (2017-02-07)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Allow all forms of member expressions to be parenthesized as lvalue.
|
||||
|
||||
## 4.0.10 (2017-02-07)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Don't expect semicolons after default-exported functions or classes, even when they are expressions.
|
||||
|
||||
Check for use of `'use strict'` directives in non-simple parameter functions, even when already in strict mode.
|
||||
|
||||
## 4.0.9 (2017-02-06)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix incorrect error raised for parenthesized simple assignment targets, so that `(x) = 1` parses again.
|
||||
|
||||
## 4.0.8 (2017-02-03)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Solve spurious parenthesized pattern errors by temporarily erring on the side of accepting programs that our delayed errors don't handle correctly yet.
|
||||
|
||||
## 4.0.7 (2017-02-02)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Accept invalidly rejected code like `(x).y = 2` again.
|
||||
|
||||
Don't raise an error when a function _inside_ strict code has a non-simple parameter list.
|
||||
|
||||
## 4.0.6 (2017-02-02)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix exponential behavior (manifesting itself as a complete hang for even relatively small source files) introduced by the new 'use strict' check.
|
||||
|
||||
## 4.0.5 (2017-02-02)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Disallow parenthesized pattern expressions.
|
||||
|
||||
Allow keywords as export names.
|
||||
|
||||
Don't allow the `async` keyword to be parenthesized.
|
||||
|
||||
Properly raise an error when a keyword contains a character escape.
|
||||
|
||||
Allow `"use strict"` to appear after other string literal expressions.
|
||||
|
||||
Disallow labeled declarations.
|
||||
|
||||
## 4.0.4 (2016-12-19)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix crash when `export` was followed by a keyword that can't be
|
||||
exported.
|
||||
|
||||
## 4.0.3 (2016-08-16)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Allow regular function declarations inside single-statement `if` branches in loose mode. Forbid them entirely in strict mode.
|
||||
|
||||
Properly parse properties named `async` in ES2017 mode.
|
||||
|
||||
Fix bug where reserved words were broken in ES2017 mode.
|
||||
|
||||
## 4.0.2 (2016-08-11)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Don't ignore period or 'e' characters after octal numbers.
|
||||
|
||||
Fix broken parsing for call expressions in default parameter values of arrow functions.
|
||||
|
||||
## 4.0.1 (2016-08-08)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix false positives in duplicated export name errors.
|
||||
|
||||
## 4.0.0 (2016-08-07)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
The default `ecmaVersion` option value is now 7.
|
||||
|
||||
A number of internal method signatures changed, so plugins might need to be updated.
|
||||
|
||||
### Bug fixes
|
||||
|
||||
The parser now raises errors on duplicated export names.
|
||||
|
||||
`arguments` and `eval` can now be used in shorthand properties.
|
||||
|
||||
Duplicate parameter names in non-simple argument lists now always produce an error.
|
||||
|
||||
### New features
|
||||
|
||||
The `ecmaVersion` option now also accepts year-style version numbers
|
||||
(2015, etc).
|
||||
|
||||
Support for `async`/`await` syntax when `ecmaVersion` is >= 8.
|
||||
|
||||
Support for trailing commas in call expressions when `ecmaVersion` is >= 8.
|
||||
|
||||
## 3.3.0 (2016-07-25)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix bug in tokenizing of regexp operator after a function declaration.
|
||||
|
||||
Fix parser crash when parsing an array pattern with a hole.
|
||||
|
||||
### New features
|
||||
|
||||
Implement check against complex argument lists in functions that enable strict mode in ES7.
|
||||
|
||||
## 3.2.0 (2016-06-07)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Improve handling of lack of unicode regexp support in host
|
||||
environment.
|
||||
|
||||
Properly reject shorthand properties whose name is a keyword.
|
||||
|
||||
### New features
|
||||
|
||||
Visitors created with `visit.make` now have their base as _prototype_, rather than copying properties into a fresh object.
|
||||
|
||||
## 3.1.0 (2016-04-18)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Properly tokenize the division operator directly after a function expression.
|
||||
|
||||
Allow trailing comma in destructuring arrays.
|
||||
|
||||
## 3.0.4 (2016-02-25)
|
||||
|
||||
### Fixes
|
||||
|
||||
Allow update expressions as left-hand-side of the ES7 exponential operator.
|
||||
|
||||
## 3.0.2 (2016-02-10)
|
||||
|
||||
### Fixes
|
||||
|
||||
Fix bug that accidentally made `undefined` a reserved word when parsing ES7.
|
||||
|
||||
## 3.0.0 (2016-02-10)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
The default value of the `ecmaVersion` option is now 6 (used to be 5).
|
||||
|
||||
Support for comprehension syntax (which was dropped from the draft spec) has been removed.
|
||||
|
||||
### Fixes
|
||||
|
||||
`let` and `yield` are now “contextual keywords”, meaning you can mostly use them as identifiers in ES5 non-strict code.
|
||||
|
||||
A parenthesized class or function expression after `export default` is now parsed correctly.
|
||||
|
||||
### New features
|
||||
|
||||
When `ecmaVersion` is set to 7, Acorn will parse the exponentiation operator (`**`).
|
||||
|
||||
The identifier character ranges are now based on Unicode 8.0.0.
|
||||
|
||||
Plugins can now override the `raiseRecoverable` method to override the way non-critical errors are handled.
|
||||
|
||||
## 2.7.0 (2016-01-04)
|
||||
|
||||
### Fixes
|
||||
|
||||
Stop allowing rest parameters in setters.
|
||||
|
||||
Disallow `y` rexexp flag in ES5.
|
||||
|
||||
Disallow `\00` and `\000` escapes in strict mode.
|
||||
|
||||
Raise an error when an import name is a reserved word.
|
||||
|
||||
## 2.6.2 (2015-11-10)
|
||||
|
||||
### Fixes
|
||||
|
||||
Don't crash when no options object is passed.
|
||||
|
||||
## 2.6.0 (2015-11-09)
|
||||
|
||||
### Fixes
|
||||
|
||||
Add `await` as a reserved word in module sources.
|
||||
|
||||
Disallow `yield` in a parameter default value for a generator.
|
||||
|
||||
Forbid using a comma after a rest pattern in an array destructuring.
|
||||
|
||||
### New features
|
||||
|
||||
Support parsing stdin in command-line tool.
|
||||
|
||||
## 2.5.0 (2015-10-27)
|
||||
|
||||
### Fixes
|
||||
|
||||
Fix tokenizer support in the command-line tool.
|
||||
|
||||
Stop allowing `new.target` outside of functions.
|
||||
|
||||
Remove legacy `guard` and `guardedHandler` properties from try nodes.
|
||||
|
||||
Stop allowing multiple `__proto__` properties on an object literal in strict mode.
|
||||
|
||||
Don't allow rest parameters to be non-identifier patterns.
|
||||
|
||||
Check for duplicate paramter names in arrow functions.
|
||||
|
|
@ -1,4 +1,6 @@
|
|||
Copyright (C) 2012-2018 by various contributors (see AUTHORS)
|
||||
MIT License
|
||||
|
||||
Copyright (C) 2012-2020 by various contributors (see AUTHORS)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
280
node_modules/ava/node_modules/acorn/README.md
generated
vendored
Normal file
280
node_modules/ava/node_modules/acorn/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,280 @@
|
|||
# Acorn
|
||||
|
||||
A tiny, fast JavaScript parser written in JavaScript.
|
||||
|
||||
## Community
|
||||
|
||||
Acorn is open source software released under an
|
||||
[MIT license](https://github.com/acornjs/acorn/blob/master/acorn/LICENSE).
|
||||
|
||||
You are welcome to
|
||||
[report bugs](https://github.com/acornjs/acorn/issues) or create pull
|
||||
requests on [github](https://github.com/acornjs/acorn). For questions
|
||||
and discussion, please use the
|
||||
[Tern discussion forum](https://discuss.ternjs.net).
|
||||
|
||||
## Installation
|
||||
|
||||
The easiest way to install acorn is from [`npm`](https://www.npmjs.com/):
|
||||
|
||||
```sh
|
||||
npm install acorn
|
||||
```
|
||||
|
||||
Alternately, you can download the source and build acorn yourself:
|
||||
|
||||
```sh
|
||||
git clone https://github.com/acornjs/acorn.git
|
||||
cd acorn
|
||||
npm install
|
||||
```
|
||||
|
||||
## Interface
|
||||
|
||||
**parse**`(input, options)` is the main interface to the library. The
|
||||
`input` parameter is a string, `options` must be an object setting
|
||||
some of the options listed below. The return value will be an abstract
|
||||
syntax tree object as specified by the [ESTree
|
||||
spec](https://github.com/estree/estree).
|
||||
|
||||
```javascript
|
||||
let acorn = require("acorn");
|
||||
console.log(acorn.parse("1 + 1", {ecmaVersion: 2020}));
|
||||
```
|
||||
|
||||
When encountering a syntax error, the parser will raise a
|
||||
`SyntaxError` object with a meaningful message. The error object will
|
||||
have a `pos` property that indicates the string offset at which the
|
||||
error occurred, and a `loc` object that contains a `{line, column}`
|
||||
object referring to that same position.
|
||||
|
||||
Options are provided by in a second argument, which should be an
|
||||
object containing any of these fields (only `ecmaVersion` is
|
||||
required):
|
||||
|
||||
- **ecmaVersion**: Indicates the ECMAScript version to parse. Must be
|
||||
either 3, 5, 6 (or 2015), 7 (2016), 8 (2017), 9 (2018), 10 (2019),
|
||||
11 (2020), 12 (2021), 13 (2022, partial support)
|
||||
or `"latest"` (the latest the library supports). This influences
|
||||
support for strict mode, the set of reserved words, and support
|
||||
for new syntax features.
|
||||
|
||||
**NOTE**: Only 'stage 4' (finalized) ECMAScript features are being
|
||||
implemented by Acorn. Other proposed new features must be
|
||||
implemented through plugins.
|
||||
|
||||
- **sourceType**: Indicate the mode the code should be parsed in. Can be
|
||||
either `"script"` or `"module"`. This influences global strict mode
|
||||
and parsing of `import` and `export` declarations.
|
||||
|
||||
**NOTE**: If set to `"module"`, then static `import` / `export` syntax
|
||||
will be valid, even if `ecmaVersion` is less than 6.
|
||||
|
||||
- **onInsertedSemicolon**: If given a callback, that callback will be
|
||||
called whenever a missing semicolon is inserted by the parser. The
|
||||
callback will be given the character offset of the point where the
|
||||
semicolon is inserted as argument, and if `locations` is on, also a
|
||||
`{line, column}` object representing this position.
|
||||
|
||||
- **onTrailingComma**: Like `onInsertedSemicolon`, but for trailing
|
||||
commas.
|
||||
|
||||
- **allowReserved**: If `false`, using a reserved word will generate
|
||||
an error. Defaults to `true` for `ecmaVersion` 3, `false` for higher
|
||||
versions. When given the value `"never"`, reserved words and
|
||||
keywords can also not be used as property names (as in Internet
|
||||
Explorer's old parser).
|
||||
|
||||
- **allowReturnOutsideFunction**: By default, a return statement at
|
||||
the top level raises an error. Set this to `true` to accept such
|
||||
code.
|
||||
|
||||
- **allowImportExportEverywhere**: By default, `import` and `export`
|
||||
declarations can only appear at a program's top level. Setting this
|
||||
option to `true` allows them anywhere where a statement is allowed,
|
||||
and also allows `import.meta` expressions to appear in scripts
|
||||
(when `sourceType` is not `"module"`).
|
||||
|
||||
- **allowAwaitOutsideFunction**: If `false`, `await` expressions can
|
||||
only appear inside `async` functions. Defaults to `true` for
|
||||
`ecmaVersion` 2022 and later, `false` for lower versions. Setting this option to
|
||||
`true` allows to have top-level `await` expressions. They are
|
||||
still not allowed in non-`async` functions, though.
|
||||
|
||||
- **allowSuperOutsideMethod**: By default, `super` outside a method
|
||||
raises an error. Set this to `true` to accept such code.
|
||||
|
||||
- **allowHashBang**: When this is enabled (off by default), if the
|
||||
code starts with the characters `#!` (as in a shellscript), the
|
||||
first line will be treated as a comment.
|
||||
|
||||
- **locations**: When `true`, each node has a `loc` object attached
|
||||
with `start` and `end` subobjects, each of which contains the
|
||||
one-based line and zero-based column numbers in `{line, column}`
|
||||
form. Default is `false`.
|
||||
|
||||
- **onToken**: If a function is passed for this option, each found
|
||||
token will be passed in same format as tokens returned from
|
||||
`tokenizer().getToken()`.
|
||||
|
||||
If array is passed, each found token is pushed to it.
|
||||
|
||||
Note that you are not allowed to call the parser from the
|
||||
callback—that will corrupt its internal state.
|
||||
|
||||
- **onComment**: If a function is passed for this option, whenever a
|
||||
comment is encountered the function will be called with the
|
||||
following parameters:
|
||||
|
||||
- `block`: `true` if the comment is a block comment, false if it
|
||||
is a line comment.
|
||||
- `text`: The content of the comment.
|
||||
- `start`: Character offset of the start of the comment.
|
||||
- `end`: Character offset of the end of the comment.
|
||||
|
||||
When the `locations` options is on, the `{line, column}` locations
|
||||
of the comment’s start and end are passed as two additional
|
||||
parameters.
|
||||
|
||||
If array is passed for this option, each found comment is pushed
|
||||
to it as object in Esprima format:
|
||||
|
||||
```javascript
|
||||
{
|
||||
"type": "Line" | "Block",
|
||||
"value": "comment text",
|
||||
"start": Number,
|
||||
"end": Number,
|
||||
// If `locations` option is on:
|
||||
"loc": {
|
||||
"start": {line: Number, column: Number}
|
||||
"end": {line: Number, column: Number}
|
||||
},
|
||||
// If `ranges` option is on:
|
||||
"range": [Number, Number]
|
||||
}
|
||||
```
|
||||
|
||||
Note that you are not allowed to call the parser from the
|
||||
callback—that will corrupt its internal state.
|
||||
|
||||
- **ranges**: Nodes have their start and end characters offsets
|
||||
recorded in `start` and `end` properties (directly on the node,
|
||||
rather than the `loc` object, which holds line/column data. To also
|
||||
add a
|
||||
[semi-standardized](https://bugzilla.mozilla.org/show_bug.cgi?id=745678)
|
||||
`range` property holding a `[start, end]` array with the same
|
||||
numbers, set the `ranges` option to `true`.
|
||||
|
||||
- **program**: It is possible to parse multiple files into a single
|
||||
AST by passing the tree produced by parsing the first file as the
|
||||
`program` option in subsequent parses. This will add the toplevel
|
||||
forms of the parsed file to the "Program" (top) node of an existing
|
||||
parse tree.
|
||||
|
||||
- **sourceFile**: When the `locations` option is `true`, you can pass
|
||||
this option to add a `source` attribute in every node’s `loc`
|
||||
object. Note that the contents of this option are not examined or
|
||||
processed in any way; you are free to use whatever format you
|
||||
choose.
|
||||
|
||||
- **directSourceFile**: Like `sourceFile`, but a `sourceFile` property
|
||||
will be added (regardless of the `location` option) directly to the
|
||||
nodes, rather than the `loc` object.
|
||||
|
||||
- **preserveParens**: If this option is `true`, parenthesized expressions
|
||||
are represented by (non-standard) `ParenthesizedExpression` nodes
|
||||
that have a single `expression` property containing the expression
|
||||
inside parentheses.
|
||||
|
||||
**parseExpressionAt**`(input, offset, options)` will parse a single
|
||||
expression in a string, and return its AST. It will not complain if
|
||||
there is more of the string left after the expression.
|
||||
|
||||
**tokenizer**`(input, options)` returns an object with a `getToken`
|
||||
method that can be called repeatedly to get the next token, a `{start,
|
||||
end, type, value}` object (with added `loc` property when the
|
||||
`locations` option is enabled and `range` property when the `ranges`
|
||||
option is enabled). When the token's type is `tokTypes.eof`, you
|
||||
should stop calling the method, since it will keep returning that same
|
||||
token forever.
|
||||
|
||||
In ES6 environment, returned result can be used as any other
|
||||
protocol-compliant iterable:
|
||||
|
||||
```javascript
|
||||
for (let token of acorn.tokenizer(str)) {
|
||||
// iterate over the tokens
|
||||
}
|
||||
|
||||
// transform code to array of tokens:
|
||||
var tokens = [...acorn.tokenizer(str)];
|
||||
```
|
||||
|
||||
**tokTypes** holds an object mapping names to the token type objects
|
||||
that end up in the `type` properties of tokens.
|
||||
|
||||
**getLineInfo**`(input, offset)` can be used to get a `{line,
|
||||
column}` object for a given program string and offset.
|
||||
|
||||
### The `Parser` class
|
||||
|
||||
Instances of the **`Parser`** class contain all the state and logic
|
||||
that drives a parse. It has static methods `parse`,
|
||||
`parseExpressionAt`, and `tokenizer` that match the top-level
|
||||
functions by the same name.
|
||||
|
||||
When extending the parser with plugins, you need to call these methods
|
||||
on the extended version of the class. To extend a parser with plugins,
|
||||
you can use its static `extend` method.
|
||||
|
||||
```javascript
|
||||
var acorn = require("acorn");
|
||||
var jsx = require("acorn-jsx");
|
||||
var JSXParser = acorn.Parser.extend(jsx());
|
||||
JSXParser.parse("foo(<bar/>)", {ecmaVersion: 2020});
|
||||
```
|
||||
|
||||
The `extend` method takes any number of plugin values, and returns a
|
||||
new `Parser` class that includes the extra parser logic provided by
|
||||
the plugins.
|
||||
|
||||
## Command line interface
|
||||
|
||||
The `bin/acorn` utility can be used to parse a file from the command
|
||||
line. It accepts as arguments its input file and the following
|
||||
options:
|
||||
|
||||
- `--ecma3|--ecma5|--ecma6|--ecma7|--ecma8|--ecma9|--ecma10`: Sets the ECMAScript version
|
||||
to parse. Default is version 9.
|
||||
|
||||
- `--module`: Sets the parsing mode to `"module"`. Is set to `"script"` otherwise.
|
||||
|
||||
- `--locations`: Attaches a "loc" object to each node with "start" and
|
||||
"end" subobjects, each of which contains the one-based line and
|
||||
zero-based column numbers in `{line, column}` form.
|
||||
|
||||
- `--allow-hash-bang`: If the code starts with the characters #! (as
|
||||
in a shellscript), the first line will be treated as a comment.
|
||||
|
||||
- `--allow-await-outside-function`: Allows top-level `await` expressions.
|
||||
See the `allowAwaitOutsideFunction` option for more information.
|
||||
|
||||
- `--compact`: No whitespace is used in the AST output.
|
||||
|
||||
- `--silent`: Do not output the AST, just return the exit status.
|
||||
|
||||
- `--help`: Print the usage information and quit.
|
||||
|
||||
The utility spits out the syntax tree as JSON data.
|
||||
|
||||
## Existing plugins
|
||||
|
||||
- [`acorn-jsx`](https://github.com/RReverser/acorn-jsx): Parse [Facebook JSX syntax extensions](https://github.com/facebook/jsx)
|
||||
|
||||
Plugins for ECMAScript proposals:
|
||||
|
||||
- [`acorn-stage3`](https://github.com/acornjs/acorn-stage3): Parse most stage 3 proposals, bundling:
|
||||
- [`acorn-class-fields`](https://github.com/acornjs/acorn-class-fields): Parse [class fields proposal](https://github.com/tc39/proposal-class-fields)
|
||||
- [`acorn-import-meta`](https://github.com/acornjs/acorn-import-meta): Parse [import.meta proposal](https://github.com/tc39/proposal-import-meta)
|
||||
- [`acorn-private-methods`](https://github.com/acornjs/acorn-private-methods): parse [private methods, getters and setters proposal](https://github.com/tc39/proposal-private-methods)n
|
||||
4
node_modules/ava/node_modules/acorn/bin/acorn
generated
vendored
Executable file
4
node_modules/ava/node_modules/acorn/bin/acorn
generated
vendored
Executable file
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
|
||||
require('../dist/bin.js');
|
||||
214
node_modules/ava/node_modules/acorn/dist/acorn.d.ts
generated
vendored
Normal file
214
node_modules/ava/node_modules/acorn/dist/acorn.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,214 @@
|
|||
export as namespace acorn
|
||||
export = acorn
|
||||
|
||||
declare namespace acorn {
|
||||
function parse(input: string, options: Options): Node
|
||||
|
||||
function parseExpressionAt(input: string, pos: number, options: Options): Node
|
||||
|
||||
function tokenizer(input: string, options: Options): {
|
||||
getToken(): Token
|
||||
[Symbol.iterator](): Iterator<Token>
|
||||
}
|
||||
|
||||
interface Options {
|
||||
ecmaVersion: 3 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 2015 | 2016 | 2017 | 2018 | 2019 | 2020 | 2021 | 2022 | 'latest'
|
||||
sourceType?: 'script' | 'module'
|
||||
onInsertedSemicolon?: (lastTokEnd: number, lastTokEndLoc?: Position) => void
|
||||
onTrailingComma?: (lastTokEnd: number, lastTokEndLoc?: Position) => void
|
||||
allowReserved?: boolean | 'never'
|
||||
allowReturnOutsideFunction?: boolean
|
||||
allowImportExportEverywhere?: boolean
|
||||
allowAwaitOutsideFunction?: boolean
|
||||
allowSuperOutsideMethod?: boolean
|
||||
allowHashBang?: boolean
|
||||
locations?: boolean
|
||||
onToken?: ((token: Token) => any) | Token[]
|
||||
onComment?: ((
|
||||
isBlock: boolean, text: string, start: number, end: number, startLoc?: Position,
|
||||
endLoc?: Position
|
||||
) => void) | Comment[]
|
||||
ranges?: boolean
|
||||
program?: Node
|
||||
sourceFile?: string
|
||||
directSourceFile?: string
|
||||
preserveParens?: boolean
|
||||
}
|
||||
|
||||
class Parser {
|
||||
constructor(options: Options, input: string, startPos?: number)
|
||||
parse(this: Parser): Node
|
||||
static parse(this: typeof Parser, input: string, options: Options): Node
|
||||
static parseExpressionAt(this: typeof Parser, input: string, pos: number, options: Options): Node
|
||||
static tokenizer(this: typeof Parser, input: string, options: Options): {
|
||||
getToken(): Token
|
||||
[Symbol.iterator](): Iterator<Token>
|
||||
}
|
||||
static extend(this: typeof Parser, ...plugins: ((BaseParser: typeof Parser) => typeof Parser)[]): typeof Parser
|
||||
}
|
||||
|
||||
interface Position { line: number; column: number; offset: number }
|
||||
|
||||
const defaultOptions: Options
|
||||
|
||||
function getLineInfo(input: string, offset: number): Position
|
||||
|
||||
class SourceLocation {
|
||||
start: Position
|
||||
end: Position
|
||||
source?: string | null
|
||||
constructor(p: Parser, start: Position, end: Position)
|
||||
}
|
||||
|
||||
class Node {
|
||||
type: string
|
||||
start: number
|
||||
end: number
|
||||
loc?: SourceLocation
|
||||
sourceFile?: string
|
||||
range?: [number, number]
|
||||
constructor(parser: Parser, pos: number, loc?: SourceLocation)
|
||||
}
|
||||
|
||||
class TokenType {
|
||||
label: string
|
||||
keyword: string
|
||||
beforeExpr: boolean
|
||||
startsExpr: boolean
|
||||
isLoop: boolean
|
||||
isAssign: boolean
|
||||
prefix: boolean
|
||||
postfix: boolean
|
||||
binop: number
|
||||
updateContext?: (prevType: TokenType) => void
|
||||
constructor(label: string, conf?: any)
|
||||
}
|
||||
|
||||
const tokTypes: {
|
||||
num: TokenType
|
||||
regexp: TokenType
|
||||
string: TokenType
|
||||
name: TokenType
|
||||
privateId: TokenType
|
||||
eof: TokenType
|
||||
bracketL: TokenType
|
||||
bracketR: TokenType
|
||||
braceL: TokenType
|
||||
braceR: TokenType
|
||||
parenL: TokenType
|
||||
parenR: TokenType
|
||||
comma: TokenType
|
||||
semi: TokenType
|
||||
colon: TokenType
|
||||
dot: TokenType
|
||||
question: TokenType
|
||||
arrow: TokenType
|
||||
template: TokenType
|
||||
ellipsis: TokenType
|
||||
backQuote: TokenType
|
||||
dollarBraceL: TokenType
|
||||
eq: TokenType
|
||||
assign: TokenType
|
||||
incDec: TokenType
|
||||
prefix: TokenType
|
||||
logicalOR: TokenType
|
||||
logicalAND: TokenType
|
||||
bitwiseOR: TokenType
|
||||
bitwiseXOR: TokenType
|
||||
bitwiseAND: TokenType
|
||||
equality: TokenType
|
||||
relational: TokenType
|
||||
bitShift: TokenType
|
||||
plusMin: TokenType
|
||||
modulo: TokenType
|
||||
star: TokenType
|
||||
slash: TokenType
|
||||
starstar: TokenType
|
||||
_break: TokenType
|
||||
_case: TokenType
|
||||
_catch: TokenType
|
||||
_continue: TokenType
|
||||
_debugger: TokenType
|
||||
_default: TokenType
|
||||
_do: TokenType
|
||||
_else: TokenType
|
||||
_finally: TokenType
|
||||
_for: TokenType
|
||||
_function: TokenType
|
||||
_if: TokenType
|
||||
_return: TokenType
|
||||
_switch: TokenType
|
||||
_throw: TokenType
|
||||
_try: TokenType
|
||||
_var: TokenType
|
||||
_const: TokenType
|
||||
_while: TokenType
|
||||
_with: TokenType
|
||||
_new: TokenType
|
||||
_this: TokenType
|
||||
_super: TokenType
|
||||
_class: TokenType
|
||||
_extends: TokenType
|
||||
_export: TokenType
|
||||
_import: TokenType
|
||||
_null: TokenType
|
||||
_true: TokenType
|
||||
_false: TokenType
|
||||
_in: TokenType
|
||||
_instanceof: TokenType
|
||||
_typeof: TokenType
|
||||
_void: TokenType
|
||||
_delete: TokenType
|
||||
}
|
||||
|
||||
class TokContext {
|
||||
constructor(token: string, isExpr: boolean, preserveSpace: boolean, override?: (p: Parser) => void)
|
||||
}
|
||||
|
||||
const tokContexts: {
|
||||
b_stat: TokContext
|
||||
b_expr: TokContext
|
||||
b_tmpl: TokContext
|
||||
p_stat: TokContext
|
||||
p_expr: TokContext
|
||||
q_tmpl: TokContext
|
||||
f_expr: TokContext
|
||||
f_stat: TokContext
|
||||
f_expr_gen: TokContext
|
||||
f_gen: TokContext
|
||||
}
|
||||
|
||||
function isIdentifierStart(code: number, astral?: boolean): boolean
|
||||
|
||||
function isIdentifierChar(code: number, astral?: boolean): boolean
|
||||
|
||||
interface AbstractToken {
|
||||
}
|
||||
|
||||
interface Comment extends AbstractToken {
|
||||
type: string
|
||||
value: string
|
||||
start: number
|
||||
end: number
|
||||
loc?: SourceLocation
|
||||
range?: [number, number]
|
||||
}
|
||||
|
||||
class Token {
|
||||
type: TokenType
|
||||
value: any
|
||||
start: number
|
||||
end: number
|
||||
loc?: SourceLocation
|
||||
range?: [number, number]
|
||||
constructor(p: Parser)
|
||||
}
|
||||
|
||||
function isNewLine(code: number): boolean
|
||||
|
||||
const lineBreak: RegExp
|
||||
|
||||
const lineBreakG: RegExp
|
||||
|
||||
const version: string
|
||||
}
|
||||
5572
node_modules/ava/node_modules/acorn/dist/acorn.js
generated
vendored
Normal file
5572
node_modules/ava/node_modules/acorn/dist/acorn.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
5541
node_modules/ava/node_modules/acorn/dist/acorn.mjs
generated
vendored
Normal file
5541
node_modules/ava/node_modules/acorn/dist/acorn.mjs
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
2
node_modules/ava/node_modules/acorn/dist/acorn.mjs.d.ts
generated
vendored
Normal file
2
node_modules/ava/node_modules/acorn/dist/acorn.mjs.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
import * as acorn from "./acorn";
|
||||
export = acorn;
|
||||
71
node_modules/ava/node_modules/acorn/dist/bin.js
generated
vendored
Normal file
71
node_modules/ava/node_modules/acorn/dist/bin.js
generated
vendored
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
'use strict';
|
||||
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var acorn = require('./acorn.js');
|
||||
|
||||
var inputFilePaths = [], forceFileName = false, fileMode = false, silent = false, compact = false, tokenize = false;
|
||||
var options = {};
|
||||
|
||||
function help(status) {
|
||||
var print = (status === 0) ? console.log : console.error;
|
||||
print("usage: " + path.basename(process.argv[1]) + " [--ecma3|--ecma5|--ecma6|--ecma7|--ecma8|--ecma9|...|--ecma2015|--ecma2016|--ecma2017|--ecma2018|...]");
|
||||
print(" [--tokenize] [--locations] [---allow-hash-bang] [--allow-await-outside-function] [--compact] [--silent] [--module] [--help] [--] [<infile>...]");
|
||||
process.exit(status);
|
||||
}
|
||||
|
||||
for (var i = 2; i < process.argv.length; ++i) {
|
||||
var arg = process.argv[i];
|
||||
if (arg[0] !== "-" || arg === "-") { inputFilePaths.push(arg); }
|
||||
else if (arg === "--") {
|
||||
inputFilePaths.push.apply(inputFilePaths, process.argv.slice(i + 1));
|
||||
forceFileName = true;
|
||||
break
|
||||
} else if (arg === "--locations") { options.locations = true; }
|
||||
else if (arg === "--allow-hash-bang") { options.allowHashBang = true; }
|
||||
else if (arg === "--allow-await-outside-function") { options.allowAwaitOutsideFunction = true; }
|
||||
else if (arg === "--silent") { silent = true; }
|
||||
else if (arg === "--compact") { compact = true; }
|
||||
else if (arg === "--help") { help(0); }
|
||||
else if (arg === "--tokenize") { tokenize = true; }
|
||||
else if (arg === "--module") { options.sourceType = "module"; }
|
||||
else {
|
||||
var match = arg.match(/^--ecma(\d+)$/);
|
||||
if (match)
|
||||
{ options.ecmaVersion = +match[1]; }
|
||||
else
|
||||
{ help(1); }
|
||||
}
|
||||
}
|
||||
|
||||
function run(codeList) {
|
||||
var result = [], fileIdx = 0;
|
||||
try {
|
||||
codeList.forEach(function (code, idx) {
|
||||
fileIdx = idx;
|
||||
if (!tokenize) {
|
||||
result = acorn.parse(code, options);
|
||||
options.program = result;
|
||||
} else {
|
||||
var tokenizer = acorn.tokenizer(code, options), token;
|
||||
do {
|
||||
token = tokenizer.getToken();
|
||||
result.push(token);
|
||||
} while (token.type !== acorn.tokTypes.eof)
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
console.error(fileMode ? e.message.replace(/\(\d+:\d+\)$/, function (m) { return m.slice(0, 1) + inputFilePaths[fileIdx] + " " + m.slice(1); }) : e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
if (!silent) { console.log(JSON.stringify(result, null, compact ? null : 2)); }
|
||||
}
|
||||
|
||||
if (fileMode = inputFilePaths.length && (forceFileName || !inputFilePaths.includes("-") || inputFilePaths.length !== 1)) {
|
||||
run(inputFilePaths.map(function (path) { return fs.readFileSync(path, "utf8"); }));
|
||||
} else {
|
||||
var code = "";
|
||||
process.stdin.resume();
|
||||
process.stdin.on("data", function (chunk) { return code += chunk; });
|
||||
process.stdin.on("end", function () { return run([code]); });
|
||||
}
|
||||
|
|
@ -1,11 +1,22 @@
|
|||
{
|
||||
"name": "acorn-walk",
|
||||
"description": "ECMAScript (ESTree) AST walker",
|
||||
"name": "acorn",
|
||||
"description": "ECMAScript parser",
|
||||
"homepage": "https://github.com/acornjs/acorn",
|
||||
"main": "dist/walk.js",
|
||||
"types": "dist/walk.d.ts",
|
||||
"module": "dist/walk.mjs",
|
||||
"version": "7.1.1",
|
||||
"main": "dist/acorn.js",
|
||||
"types": "dist/acorn.d.ts",
|
||||
"module": "dist/acorn.mjs",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"import": "./dist/acorn.mjs",
|
||||
"require": "./dist/acorn.js",
|
||||
"default": "./dist/acorn.js"
|
||||
},
|
||||
"./dist/acorn.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"version": "8.5.0",
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
},
|
||||
|
|
@ -29,8 +40,11 @@
|
|||
"type": "git",
|
||||
"url": "https://github.com/acornjs/acorn.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"prepare": "cd ..; npm run build:walk"
|
||||
"prepare": "cd ..; npm run build:main && npm run build:bin"
|
||||
},
|
||||
"license": "MIT"
|
||||
"bin": {
|
||||
"acorn": "./bin/acorn"
|
||||
}
|
||||
}
|
||||
104
node_modules/ava/node_modules/ansi-styles/index.d.ts
generated
vendored
104
node_modules/ava/node_modules/ansi-styles/index.d.ts
generated
vendored
|
|
@ -1,66 +1,4 @@
|
|||
import * as cssColors from 'color-name';
|
||||
|
||||
declare namespace ansiStyles {
|
||||
interface ColorConvert {
|
||||
/**
|
||||
The RGB color space.
|
||||
|
||||
@param red - (`0`-`255`)
|
||||
@param green - (`0`-`255`)
|
||||
@param blue - (`0`-`255`)
|
||||
*/
|
||||
rgb(red: number, green: number, blue: number): string;
|
||||
|
||||
/**
|
||||
The RGB HEX color space.
|
||||
|
||||
@param hex - A hexadecimal string containing RGB data.
|
||||
*/
|
||||
hex(hex: string): string;
|
||||
|
||||
/**
|
||||
@param keyword - A CSS color name.
|
||||
*/
|
||||
keyword(keyword: keyof typeof cssColors): string;
|
||||
|
||||
/**
|
||||
The HSL color space.
|
||||
|
||||
@param hue - (`0`-`360`)
|
||||
@param saturation - (`0`-`100`)
|
||||
@param lightness - (`0`-`100`)
|
||||
*/
|
||||
hsl(hue: number, saturation: number, lightness: number): string;
|
||||
|
||||
/**
|
||||
The HSV color space.
|
||||
|
||||
@param hue - (`0`-`360`)
|
||||
@param saturation - (`0`-`100`)
|
||||
@param value - (`0`-`100`)
|
||||
*/
|
||||
hsv(hue: number, saturation: number, value: number): string;
|
||||
|
||||
/**
|
||||
The HSV color space.
|
||||
|
||||
@param hue - (`0`-`360`)
|
||||
@param whiteness - (`0`-`100`)
|
||||
@param blackness - (`0`-`100`)
|
||||
*/
|
||||
hwb(hue: number, whiteness: number, blackness: number): string;
|
||||
|
||||
/**
|
||||
Use a [4-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4-bit) to set text color.
|
||||
*/
|
||||
ansi(ansi: number): string;
|
||||
|
||||
/**
|
||||
Use an [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set text color.
|
||||
*/
|
||||
ansi256(ansi: number): string;
|
||||
}
|
||||
|
||||
interface CSPair {
|
||||
/**
|
||||
The ANSI terminal control sequence for starting this style.
|
||||
|
|
@ -74,14 +12,14 @@ declare namespace ansiStyles {
|
|||
}
|
||||
|
||||
interface ColorBase {
|
||||
readonly ansi: ColorConvert;
|
||||
readonly ansi256: ColorConvert;
|
||||
readonly ansi16m: ColorConvert;
|
||||
|
||||
/**
|
||||
The ANSI terminal control sequence for ending this color.
|
||||
*/
|
||||
readonly close: string;
|
||||
|
||||
ansi256(code: number): string;
|
||||
|
||||
ansi16m(red: number, green: number, blue: number): string;
|
||||
}
|
||||
|
||||
interface Modifier {
|
||||
|
|
@ -110,6 +48,13 @@ declare namespace ansiStyles {
|
|||
*/
|
||||
readonly underline: CSPair;
|
||||
|
||||
/**
|
||||
Make text overline.
|
||||
|
||||
Supported on VTE-based terminals, the GNOME terminal, mintty, and Git Bash.
|
||||
*/
|
||||
readonly overline: CSPair;
|
||||
|
||||
/**
|
||||
Inverse background and foreground colors.
|
||||
*/
|
||||
|
|
@ -185,6 +130,31 @@ declare namespace ansiStyles {
|
|||
readonly bgMagentaBright: CSPair;
|
||||
readonly bgWhiteBright: CSPair;
|
||||
}
|
||||
|
||||
interface ConvertColor {
|
||||
/**
|
||||
Convert from the RGB color space to the ANSI 256 color space.
|
||||
|
||||
@param red - (`0...255`)
|
||||
@param green - (`0...255`)
|
||||
@param blue - (`0...255`)
|
||||
*/
|
||||
rgbToAnsi256(red: number, green: number, blue: number): number;
|
||||
|
||||
/**
|
||||
Convert from the RGB HEX color space to the RGB color space.
|
||||
|
||||
@param hex - A hexadecimal string containing RGB data.
|
||||
*/
|
||||
hexToRgb(hex: string): [red: number, green: number, blue: number];
|
||||
|
||||
/**
|
||||
Convert from the RGB HEX color space to the ANSI 256 color space.
|
||||
|
||||
@param hex - A hexadecimal string containing RGB data.
|
||||
*/
|
||||
hexToAnsi256(hex: string): number;
|
||||
}
|
||||
}
|
||||
|
||||
declare const ansiStyles: {
|
||||
|
|
@ -192,6 +162,6 @@ declare const ansiStyles: {
|
|||
readonly color: ansiStyles.ForegroundColor & ansiStyles.ColorBase;
|
||||
readonly bgColor: ansiStyles.BackgroundColor & ansiStyles.ColorBase;
|
||||
readonly codes: ReadonlyMap<number, number>;
|
||||
} & ansiStyles.BackgroundColor & ansiStyles.ForegroundColor & ansiStyles.Modifier;
|
||||
} & ansiStyles.BackgroundColor & ansiStyles.ForegroundColor & ansiStyles.Modifier & ansiStyles.ConvertColor;
|
||||
|
||||
export = ansiStyles;
|
||||
|
|
|
|||
123
node_modules/ava/node_modules/ansi-styles/index.js
generated
vendored
123
node_modules/ava/node_modules/ansi-styles/index.js
generated
vendored
|
|
@ -1,62 +1,10 @@
|
|||
'use strict';
|
||||
|
||||
const wrapAnsi16 = (fn, offset) => (...args) => {
|
||||
const code = fn(...args);
|
||||
return `\u001B[${code + offset}m`;
|
||||
};
|
||||
const ANSI_BACKGROUND_OFFSET = 10;
|
||||
|
||||
const wrapAnsi256 = (fn, offset) => (...args) => {
|
||||
const code = fn(...args);
|
||||
return `\u001B[${38 + offset};5;${code}m`;
|
||||
};
|
||||
const wrapAnsi256 = (offset = 0) => code => `\u001B[${38 + offset};5;${code}m`;
|
||||
|
||||
const wrapAnsi16m = (fn, offset) => (...args) => {
|
||||
const rgb = fn(...args);
|
||||
return `\u001B[${38 + offset};2;${rgb[0]};${rgb[1]};${rgb[2]}m`;
|
||||
};
|
||||
|
||||
const ansi2ansi = n => n;
|
||||
const rgb2rgb = (r, g, b) => [r, g, b];
|
||||
|
||||
const setLazyProperty = (object, property, get) => {
|
||||
Object.defineProperty(object, property, {
|
||||
get: () => {
|
||||
const value = get();
|
||||
|
||||
Object.defineProperty(object, property, {
|
||||
value,
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
|
||||
return value;
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
};
|
||||
|
||||
/** @type {typeof import('color-convert')} */
|
||||
let colorConvert;
|
||||
const makeDynamicStyles = (wrap, targetSpace, identity, isBackground) => {
|
||||
if (colorConvert === undefined) {
|
||||
colorConvert = require('color-convert');
|
||||
}
|
||||
|
||||
const offset = isBackground ? 10 : 0;
|
||||
const styles = {};
|
||||
|
||||
for (const [sourceSpace, suite] of Object.entries(colorConvert)) {
|
||||
const name = sourceSpace === 'ansi16' ? 'ansi' : sourceSpace;
|
||||
if (sourceSpace === targetSpace) {
|
||||
styles[name] = wrap(identity, offset);
|
||||
} else if (typeof suite === 'object') {
|
||||
styles[name] = wrap(suite[targetSpace], offset);
|
||||
}
|
||||
}
|
||||
|
||||
return styles;
|
||||
};
|
||||
const wrapAnsi16m = (offset = 0) => (red, green, blue) => `\u001B[${38 + offset};2;${red};${green};${blue}m`;
|
||||
|
||||
function assembleStyles() {
|
||||
const codes = new Map();
|
||||
|
|
@ -68,6 +16,7 @@ function assembleStyles() {
|
|||
dim: [2, 22],
|
||||
italic: [3, 23],
|
||||
underline: [4, 24],
|
||||
overline: [53, 55],
|
||||
inverse: [7, 27],
|
||||
hidden: [8, 28],
|
||||
strikethrough: [9, 29]
|
||||
|
|
@ -146,12 +95,64 @@ function assembleStyles() {
|
|||
styles.color.close = '\u001B[39m';
|
||||
styles.bgColor.close = '\u001B[49m';
|
||||
|
||||
setLazyProperty(styles.color, 'ansi', () => makeDynamicStyles(wrapAnsi16, 'ansi16', ansi2ansi, false));
|
||||
setLazyProperty(styles.color, 'ansi256', () => makeDynamicStyles(wrapAnsi256, 'ansi256', ansi2ansi, false));
|
||||
setLazyProperty(styles.color, 'ansi16m', () => makeDynamicStyles(wrapAnsi16m, 'rgb', rgb2rgb, false));
|
||||
setLazyProperty(styles.bgColor, 'ansi', () => makeDynamicStyles(wrapAnsi16, 'ansi16', ansi2ansi, true));
|
||||
setLazyProperty(styles.bgColor, 'ansi256', () => makeDynamicStyles(wrapAnsi256, 'ansi256', ansi2ansi, true));
|
||||
setLazyProperty(styles.bgColor, 'ansi16m', () => makeDynamicStyles(wrapAnsi16m, 'rgb', rgb2rgb, true));
|
||||
styles.color.ansi256 = wrapAnsi256();
|
||||
styles.color.ansi16m = wrapAnsi16m();
|
||||
styles.bgColor.ansi256 = wrapAnsi256(ANSI_BACKGROUND_OFFSET);
|
||||
styles.bgColor.ansi16m = wrapAnsi16m(ANSI_BACKGROUND_OFFSET);
|
||||
|
||||
// From https://github.com/Qix-/color-convert/blob/3f0e0d4e92e235796ccb17f6e85c72094a651f49/conversions.js
|
||||
Object.defineProperties(styles, {
|
||||
rgbToAnsi256: {
|
||||
value: (red, green, blue) => {
|
||||
// We use the extended greyscale palette here, with the exception of
|
||||
// black and white. normal palette only has 4 greyscale shades.
|
||||
if (red === green && green === blue) {
|
||||
if (red < 8) {
|
||||
return 16;
|
||||
}
|
||||
|
||||
if (red > 248) {
|
||||
return 231;
|
||||
}
|
||||
|
||||
return Math.round(((red - 8) / 247) * 24) + 232;
|
||||
}
|
||||
|
||||
return 16 +
|
||||
(36 * Math.round(red / 255 * 5)) +
|
||||
(6 * Math.round(green / 255 * 5)) +
|
||||
Math.round(blue / 255 * 5);
|
||||
},
|
||||
enumerable: false
|
||||
},
|
||||
hexToRgb: {
|
||||
value: hex => {
|
||||
const matches = /(?<colorString>[a-f\d]{6}|[a-f\d]{3})/i.exec(hex.toString(16));
|
||||
if (!matches) {
|
||||
return [0, 0, 0];
|
||||
}
|
||||
|
||||
let {colorString} = matches.groups;
|
||||
|
||||
if (colorString.length === 3) {
|
||||
colorString = colorString.split('').map(character => character + character).join('');
|
||||
}
|
||||
|
||||
const integer = Number.parseInt(colorString, 16);
|
||||
|
||||
return [
|
||||
(integer >> 16) & 0xFF,
|
||||
(integer >> 8) & 0xFF,
|
||||
integer & 0xFF
|
||||
];
|
||||
},
|
||||
enumerable: false
|
||||
},
|
||||
hexToAnsi256: {
|
||||
value: hex => styles.rgbToAnsi256(...styles.hexToRgb(hex)),
|
||||
enumerable: false
|
||||
}
|
||||
});
|
||||
|
||||
return styles;
|
||||
}
|
||||
|
|
|
|||
17
node_modules/ava/node_modules/ansi-styles/package.json
generated
vendored
17
node_modules/ava/node_modules/ansi-styles/package.json
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "ansi-styles",
|
||||
"version": "4.2.1",
|
||||
"version": "5.2.0",
|
||||
"description": "ANSI escape codes for styling strings in the terminal",
|
||||
"license": "MIT",
|
||||
"repository": "chalk/ansi-styles",
|
||||
|
|
@ -8,10 +8,10 @@
|
|||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "sindresorhus.com"
|
||||
"url": "https://sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
"node": ">=10"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava && tsd",
|
||||
|
|
@ -43,15 +43,10 @@
|
|||
"command-line",
|
||||
"text"
|
||||
],
|
||||
"dependencies": {
|
||||
"@types/color-name": "^1.1.1",
|
||||
"color-convert": "^2.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/color-convert": "^1.9.0",
|
||||
"ava": "^2.3.0",
|
||||
"ava": "^2.4.0",
|
||||
"svg-term-cli": "^2.1.1",
|
||||
"tsd": "^0.11.0",
|
||||
"xo": "^0.25.3"
|
||||
"tsd": "^0.14.0",
|
||||
"xo": "^0.37.1"
|
||||
}
|
||||
}
|
||||
46
node_modules/ava/node_modules/ansi-styles/readme.md
generated
vendored
46
node_modules/ava/node_modules/ansi-styles/readme.md
generated
vendored
|
|
@ -1,4 +1,4 @@
|
|||
# ansi-styles [](https://travis-ci.org/chalk/ansi-styles)
|
||||
# ansi-styles
|
||||
|
||||
> [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal
|
||||
|
||||
|
|
@ -20,14 +20,13 @@ const style = require('ansi-styles');
|
|||
console.log(`${style.green.open}Hello world!${style.green.close}`);
|
||||
|
||||
|
||||
// Color conversion between 16/256/truecolor
|
||||
// NOTE: If conversion goes to 16 colors or 256 colors, the original color
|
||||
// may be degraded to fit that color palette. This means terminals
|
||||
// Color conversion between 256/truecolor
|
||||
// NOTE: When converting from truecolor to 256 colors, the original color
|
||||
// may be degraded to fit the new color palette. This means terminals
|
||||
// that do not support 16 million colors will best-match the
|
||||
// original color.
|
||||
console.log(style.bgColor.ansi.hsl(120, 80, 72) + 'Hello world!' + style.bgColor.close);
|
||||
console.log(style.color.ansi256.rgb(199, 20, 250) + 'Hello world!' + style.color.close);
|
||||
console.log(style.color.ansi16m.hex('#abcdef') + 'Hello world!' + style.color.close);
|
||||
console.log(`${style.color.ansi256(style.rgbToAnsi256(199, 20, 250))}Hello World${style.color.close}`)
|
||||
console.log(`${style.color.ansi16m(...style.hexToRgb('#abcdef'))}Hello World${style.color.close}`)
|
||||
```
|
||||
|
||||
## API
|
||||
|
|
@ -43,6 +42,7 @@ Each style has an `open` and `close` property.
|
|||
- `dim`
|
||||
- `italic` *(Not widely supported)*
|
||||
- `underline`
|
||||
- `overline` *Supported on VTE-based terminals, the GNOME terminal, mintty, and Git Bash.*
|
||||
- `inverse`
|
||||
- `hidden`
|
||||
- `strikethrough` *(Not widely supported)*
|
||||
|
|
@ -110,30 +110,22 @@ console.log(style.codes.get(36));
|
|||
|
||||
## [256 / 16 million (TrueColor) support](https://gist.github.com/XVilka/8346728)
|
||||
|
||||
`ansi-styles` uses the [`color-convert`](https://github.com/Qix-/color-convert) package to allow for converting between various colors and ANSI escapes, with support for 256 and 16 million colors.
|
||||
`ansi-styles` allows converting between various color formats and ANSI escapes, with support for 256 and 16 million colors.
|
||||
|
||||
The following color spaces from `color-convert` are supported:
|
||||
|
||||
- `rgb`
|
||||
- `hex`
|
||||
- `keyword`
|
||||
- `hsl`
|
||||
- `hsv`
|
||||
- `hwb`
|
||||
- `ansi`
|
||||
- `ansi256`
|
||||
|
||||
To use these, call the associated conversion function with the intended output, for example:
|
||||
|
||||
```js
|
||||
style.color.ansi.rgb(100, 200, 15); // RGB to 16 color ansi foreground code
|
||||
style.bgColor.ansi.rgb(100, 200, 15); // RGB to 16 color ansi background code
|
||||
style.color.ansi256(style.rgbToAnsi256(100, 200, 15)); // RGB to 256 color ansi foreground code
|
||||
style.bgColor.ansi256(style.hexToAnsi256('#C0FFEE')); // HEX to 256 color ansi foreground code
|
||||
|
||||
style.color.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code
|
||||
style.bgColor.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code
|
||||
|
||||
style.color.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color foreground code
|
||||
style.bgColor.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color background code
|
||||
style.color.ansi16m(100, 200, 15); // RGB to 16 million color foreground code
|
||||
style.bgColor.ansi16m(...style.hexToRgb('#C0FFEE')); // Hex (RGB) to 16 million color foreground code
|
||||
```
|
||||
|
||||
## Related
|
||||
|
|
@ -145,14 +137,8 @@ style.bgColor.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color backgroun
|
|||
- [Sindre Sorhus](https://github.com/sindresorhus)
|
||||
- [Josh Junon](https://github.com/qix-)
|
||||
|
||||
---
|
||||
## For enterprise
|
||||
|
||||
<div align="center">
|
||||
<b>
|
||||
<a href="https://tidelift.com/subscription/pkg/npm-ansi-styles?utm_source=npm-ansi-styles&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
|
||||
</b>
|
||||
<br>
|
||||
<sub>
|
||||
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
|
||||
</sub>
|
||||
</div>
|
||||
Available as part of the Tidelift Subscription.
|
||||
|
||||
The maintainers of `ansi-styles` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-ansi-styles?utm_source=npm-ansi-styles&utm_medium=referral&utm_campaign=enterprise&utm_term=repo)
|
||||
|
|
|
|||
7
node_modules/ava/node_modules/chalk/index.d.ts
generated
vendored
7
node_modules/ava/node_modules/chalk/index.d.ts
generated
vendored
|
|
@ -137,6 +137,13 @@ declare namespace chalk {
|
|||
DISK: {rgb(255,131,0) ${disk.used / disk.total * 100}%}
|
||||
`);
|
||||
```
|
||||
|
||||
@example
|
||||
```
|
||||
import chalk = require('chalk');
|
||||
|
||||
log(chalk.red.bgBlack`2 + 3 = {bold ${2 + 3}}`)
|
||||
```
|
||||
*/
|
||||
(text: TemplateStringsArray, ...placeholders: unknown[]): string;
|
||||
|
||||
|
|
|
|||
345
node_modules/ava/node_modules/chalk/node_modules/ansi-styles/index.d.ts
generated
vendored
Normal file
345
node_modules/ava/node_modules/chalk/node_modules/ansi-styles/index.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,345 @@
|
|||
declare type CSSColor =
|
||||
| 'aliceblue'
|
||||
| 'antiquewhite'
|
||||
| 'aqua'
|
||||
| 'aquamarine'
|
||||
| 'azure'
|
||||
| 'beige'
|
||||
| 'bisque'
|
||||
| 'black'
|
||||
| 'blanchedalmond'
|
||||
| 'blue'
|
||||
| 'blueviolet'
|
||||
| 'brown'
|
||||
| 'burlywood'
|
||||
| 'cadetblue'
|
||||
| 'chartreuse'
|
||||
| 'chocolate'
|
||||
| 'coral'
|
||||
| 'cornflowerblue'
|
||||
| 'cornsilk'
|
||||
| 'crimson'
|
||||
| 'cyan'
|
||||
| 'darkblue'
|
||||
| 'darkcyan'
|
||||
| 'darkgoldenrod'
|
||||
| 'darkgray'
|
||||
| 'darkgreen'
|
||||
| 'darkgrey'
|
||||
| 'darkkhaki'
|
||||
| 'darkmagenta'
|
||||
| 'darkolivegreen'
|
||||
| 'darkorange'
|
||||
| 'darkorchid'
|
||||
| 'darkred'
|
||||
| 'darksalmon'
|
||||
| 'darkseagreen'
|
||||
| 'darkslateblue'
|
||||
| 'darkslategray'
|
||||
| 'darkslategrey'
|
||||
| 'darkturquoise'
|
||||
| 'darkviolet'
|
||||
| 'deeppink'
|
||||
| 'deepskyblue'
|
||||
| 'dimgray'
|
||||
| 'dimgrey'
|
||||
| 'dodgerblue'
|
||||
| 'firebrick'
|
||||
| 'floralwhite'
|
||||
| 'forestgreen'
|
||||
| 'fuchsia'
|
||||
| 'gainsboro'
|
||||
| 'ghostwhite'
|
||||
| 'gold'
|
||||
| 'goldenrod'
|
||||
| 'gray'
|
||||
| 'green'
|
||||
| 'greenyellow'
|
||||
| 'grey'
|
||||
| 'honeydew'
|
||||
| 'hotpink'
|
||||
| 'indianred'
|
||||
| 'indigo'
|
||||
| 'ivory'
|
||||
| 'khaki'
|
||||
| 'lavender'
|
||||
| 'lavenderblush'
|
||||
| 'lawngreen'
|
||||
| 'lemonchiffon'
|
||||
| 'lightblue'
|
||||
| 'lightcoral'
|
||||
| 'lightcyan'
|
||||
| 'lightgoldenrodyellow'
|
||||
| 'lightgray'
|
||||
| 'lightgreen'
|
||||
| 'lightgrey'
|
||||
| 'lightpink'
|
||||
| 'lightsalmon'
|
||||
| 'lightseagreen'
|
||||
| 'lightskyblue'
|
||||
| 'lightslategray'
|
||||
| 'lightslategrey'
|
||||
| 'lightsteelblue'
|
||||
| 'lightyellow'
|
||||
| 'lime'
|
||||
| 'limegreen'
|
||||
| 'linen'
|
||||
| 'magenta'
|
||||
| 'maroon'
|
||||
| 'mediumaquamarine'
|
||||
| 'mediumblue'
|
||||
| 'mediumorchid'
|
||||
| 'mediumpurple'
|
||||
| 'mediumseagreen'
|
||||
| 'mediumslateblue'
|
||||
| 'mediumspringgreen'
|
||||
| 'mediumturquoise'
|
||||
| 'mediumvioletred'
|
||||
| 'midnightblue'
|
||||
| 'mintcream'
|
||||
| 'mistyrose'
|
||||
| 'moccasin'
|
||||
| 'navajowhite'
|
||||
| 'navy'
|
||||
| 'oldlace'
|
||||
| 'olive'
|
||||
| 'olivedrab'
|
||||
| 'orange'
|
||||
| 'orangered'
|
||||
| 'orchid'
|
||||
| 'palegoldenrod'
|
||||
| 'palegreen'
|
||||
| 'paleturquoise'
|
||||
| 'palevioletred'
|
||||
| 'papayawhip'
|
||||
| 'peachpuff'
|
||||
| 'peru'
|
||||
| 'pink'
|
||||
| 'plum'
|
||||
| 'powderblue'
|
||||
| 'purple'
|
||||
| 'rebeccapurple'
|
||||
| 'red'
|
||||
| 'rosybrown'
|
||||
| 'royalblue'
|
||||
| 'saddlebrown'
|
||||
| 'salmon'
|
||||
| 'sandybrown'
|
||||
| 'seagreen'
|
||||
| 'seashell'
|
||||
| 'sienna'
|
||||
| 'silver'
|
||||
| 'skyblue'
|
||||
| 'slateblue'
|
||||
| 'slategray'
|
||||
| 'slategrey'
|
||||
| 'snow'
|
||||
| 'springgreen'
|
||||
| 'steelblue'
|
||||
| 'tan'
|
||||
| 'teal'
|
||||
| 'thistle'
|
||||
| 'tomato'
|
||||
| 'turquoise'
|
||||
| 'violet'
|
||||
| 'wheat'
|
||||
| 'white'
|
||||
| 'whitesmoke'
|
||||
| 'yellow'
|
||||
| 'yellowgreen';
|
||||
|
||||
declare namespace ansiStyles {
|
||||
interface ColorConvert {
|
||||
/**
|
||||
The RGB color space.
|
||||
|
||||
@param red - (`0`-`255`)
|
||||
@param green - (`0`-`255`)
|
||||
@param blue - (`0`-`255`)
|
||||
*/
|
||||
rgb(red: number, green: number, blue: number): string;
|
||||
|
||||
/**
|
||||
The RGB HEX color space.
|
||||
|
||||
@param hex - A hexadecimal string containing RGB data.
|
||||
*/
|
||||
hex(hex: string): string;
|
||||
|
||||
/**
|
||||
@param keyword - A CSS color name.
|
||||
*/
|
||||
keyword(keyword: CSSColor): string;
|
||||
|
||||
/**
|
||||
The HSL color space.
|
||||
|
||||
@param hue - (`0`-`360`)
|
||||
@param saturation - (`0`-`100`)
|
||||
@param lightness - (`0`-`100`)
|
||||
*/
|
||||
hsl(hue: number, saturation: number, lightness: number): string;
|
||||
|
||||
/**
|
||||
The HSV color space.
|
||||
|
||||
@param hue - (`0`-`360`)
|
||||
@param saturation - (`0`-`100`)
|
||||
@param value - (`0`-`100`)
|
||||
*/
|
||||
hsv(hue: number, saturation: number, value: number): string;
|
||||
|
||||
/**
|
||||
The HSV color space.
|
||||
|
||||
@param hue - (`0`-`360`)
|
||||
@param whiteness - (`0`-`100`)
|
||||
@param blackness - (`0`-`100`)
|
||||
*/
|
||||
hwb(hue: number, whiteness: number, blackness: number): string;
|
||||
|
||||
/**
|
||||
Use a [4-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4-bit) to set text color.
|
||||
*/
|
||||
ansi(ansi: number): string;
|
||||
|
||||
/**
|
||||
Use an [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set text color.
|
||||
*/
|
||||
ansi256(ansi: number): string;
|
||||
}
|
||||
|
||||
interface CSPair {
|
||||
/**
|
||||
The ANSI terminal control sequence for starting this style.
|
||||
*/
|
||||
readonly open: string;
|
||||
|
||||
/**
|
||||
The ANSI terminal control sequence for ending this style.
|
||||
*/
|
||||
readonly close: string;
|
||||
}
|
||||
|
||||
interface ColorBase {
|
||||
readonly ansi: ColorConvert;
|
||||
readonly ansi256: ColorConvert;
|
||||
readonly ansi16m: ColorConvert;
|
||||
|
||||
/**
|
||||
The ANSI terminal control sequence for ending this color.
|
||||
*/
|
||||
readonly close: string;
|
||||
}
|
||||
|
||||
interface Modifier {
|
||||
/**
|
||||
Resets the current color chain.
|
||||
*/
|
||||
readonly reset: CSPair;
|
||||
|
||||
/**
|
||||
Make text bold.
|
||||
*/
|
||||
readonly bold: CSPair;
|
||||
|
||||
/**
|
||||
Emitting only a small amount of light.
|
||||
*/
|
||||
readonly dim: CSPair;
|
||||
|
||||
/**
|
||||
Make text italic. (Not widely supported)
|
||||
*/
|
||||
readonly italic: CSPair;
|
||||
|
||||
/**
|
||||
Make text underline. (Not widely supported)
|
||||
*/
|
||||
readonly underline: CSPair;
|
||||
|
||||
/**
|
||||
Inverse background and foreground colors.
|
||||
*/
|
||||
readonly inverse: CSPair;
|
||||
|
||||
/**
|
||||
Prints the text, but makes it invisible.
|
||||
*/
|
||||
readonly hidden: CSPair;
|
||||
|
||||
/**
|
||||
Puts a horizontal line through the center of the text. (Not widely supported)
|
||||
*/
|
||||
readonly strikethrough: CSPair;
|
||||
}
|
||||
|
||||
interface ForegroundColor {
|
||||
readonly black: CSPair;
|
||||
readonly red: CSPair;
|
||||
readonly green: CSPair;
|
||||
readonly yellow: CSPair;
|
||||
readonly blue: CSPair;
|
||||
readonly cyan: CSPair;
|
||||
readonly magenta: CSPair;
|
||||
readonly white: CSPair;
|
||||
|
||||
/**
|
||||
Alias for `blackBright`.
|
||||
*/
|
||||
readonly gray: CSPair;
|
||||
|
||||
/**
|
||||
Alias for `blackBright`.
|
||||
*/
|
||||
readonly grey: CSPair;
|
||||
|
||||
readonly blackBright: CSPair;
|
||||
readonly redBright: CSPair;
|
||||
readonly greenBright: CSPair;
|
||||
readonly yellowBright: CSPair;
|
||||
readonly blueBright: CSPair;
|
||||
readonly cyanBright: CSPair;
|
||||
readonly magentaBright: CSPair;
|
||||
readonly whiteBright: CSPair;
|
||||
}
|
||||
|
||||
interface BackgroundColor {
|
||||
readonly bgBlack: CSPair;
|
||||
readonly bgRed: CSPair;
|
||||
readonly bgGreen: CSPair;
|
||||
readonly bgYellow: CSPair;
|
||||
readonly bgBlue: CSPair;
|
||||
readonly bgCyan: CSPair;
|
||||
readonly bgMagenta: CSPair;
|
||||
readonly bgWhite: CSPair;
|
||||
|
||||
/**
|
||||
Alias for `bgBlackBright`.
|
||||
*/
|
||||
readonly bgGray: CSPair;
|
||||
|
||||
/**
|
||||
Alias for `bgBlackBright`.
|
||||
*/
|
||||
readonly bgGrey: CSPair;
|
||||
|
||||
readonly bgBlackBright: CSPair;
|
||||
readonly bgRedBright: CSPair;
|
||||
readonly bgGreenBright: CSPair;
|
||||
readonly bgYellowBright: CSPair;
|
||||
readonly bgBlueBright: CSPair;
|
||||
readonly bgCyanBright: CSPair;
|
||||
readonly bgMagentaBright: CSPair;
|
||||
readonly bgWhiteBright: CSPair;
|
||||
}
|
||||
}
|
||||
|
||||
declare const ansiStyles: {
|
||||
readonly modifier: ansiStyles.Modifier;
|
||||
readonly color: ansiStyles.ForegroundColor & ansiStyles.ColorBase;
|
||||
readonly bgColor: ansiStyles.BackgroundColor & ansiStyles.ColorBase;
|
||||
readonly codes: ReadonlyMap<number, number>;
|
||||
} & ansiStyles.BackgroundColor & ansiStyles.ForegroundColor & ansiStyles.Modifier;
|
||||
|
||||
export = ansiStyles;
|
||||
163
node_modules/ava/node_modules/chalk/node_modules/ansi-styles/index.js
generated
vendored
Normal file
163
node_modules/ava/node_modules/chalk/node_modules/ansi-styles/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,163 @@
|
|||
'use strict';
|
||||
|
||||
const wrapAnsi16 = (fn, offset) => (...args) => {
|
||||
const code = fn(...args);
|
||||
return `\u001B[${code + offset}m`;
|
||||
};
|
||||
|
||||
const wrapAnsi256 = (fn, offset) => (...args) => {
|
||||
const code = fn(...args);
|
||||
return `\u001B[${38 + offset};5;${code}m`;
|
||||
};
|
||||
|
||||
const wrapAnsi16m = (fn, offset) => (...args) => {
|
||||
const rgb = fn(...args);
|
||||
return `\u001B[${38 + offset};2;${rgb[0]};${rgb[1]};${rgb[2]}m`;
|
||||
};
|
||||
|
||||
const ansi2ansi = n => n;
|
||||
const rgb2rgb = (r, g, b) => [r, g, b];
|
||||
|
||||
const setLazyProperty = (object, property, get) => {
|
||||
Object.defineProperty(object, property, {
|
||||
get: () => {
|
||||
const value = get();
|
||||
|
||||
Object.defineProperty(object, property, {
|
||||
value,
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
|
||||
return value;
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
};
|
||||
|
||||
/** @type {typeof import('color-convert')} */
|
||||
let colorConvert;
|
||||
const makeDynamicStyles = (wrap, targetSpace, identity, isBackground) => {
|
||||
if (colorConvert === undefined) {
|
||||
colorConvert = require('color-convert');
|
||||
}
|
||||
|
||||
const offset = isBackground ? 10 : 0;
|
||||
const styles = {};
|
||||
|
||||
for (const [sourceSpace, suite] of Object.entries(colorConvert)) {
|
||||
const name = sourceSpace === 'ansi16' ? 'ansi' : sourceSpace;
|
||||
if (sourceSpace === targetSpace) {
|
||||
styles[name] = wrap(identity, offset);
|
||||
} else if (typeof suite === 'object') {
|
||||
styles[name] = wrap(suite[targetSpace], offset);
|
||||
}
|
||||
}
|
||||
|
||||
return styles;
|
||||
};
|
||||
|
||||
function assembleStyles() {
|
||||
const codes = new Map();
|
||||
const styles = {
|
||||
modifier: {
|
||||
reset: [0, 0],
|
||||
// 21 isn't widely supported and 22 does the same thing
|
||||
bold: [1, 22],
|
||||
dim: [2, 22],
|
||||
italic: [3, 23],
|
||||
underline: [4, 24],
|
||||
inverse: [7, 27],
|
||||
hidden: [8, 28],
|
||||
strikethrough: [9, 29]
|
||||
},
|
||||
color: {
|
||||
black: [30, 39],
|
||||
red: [31, 39],
|
||||
green: [32, 39],
|
||||
yellow: [33, 39],
|
||||
blue: [34, 39],
|
||||
magenta: [35, 39],
|
||||
cyan: [36, 39],
|
||||
white: [37, 39],
|
||||
|
||||
// Bright color
|
||||
blackBright: [90, 39],
|
||||
redBright: [91, 39],
|
||||
greenBright: [92, 39],
|
||||
yellowBright: [93, 39],
|
||||
blueBright: [94, 39],
|
||||
magentaBright: [95, 39],
|
||||
cyanBright: [96, 39],
|
||||
whiteBright: [97, 39]
|
||||
},
|
||||
bgColor: {
|
||||
bgBlack: [40, 49],
|
||||
bgRed: [41, 49],
|
||||
bgGreen: [42, 49],
|
||||
bgYellow: [43, 49],
|
||||
bgBlue: [44, 49],
|
||||
bgMagenta: [45, 49],
|
||||
bgCyan: [46, 49],
|
||||
bgWhite: [47, 49],
|
||||
|
||||
// Bright color
|
||||
bgBlackBright: [100, 49],
|
||||
bgRedBright: [101, 49],
|
||||
bgGreenBright: [102, 49],
|
||||
bgYellowBright: [103, 49],
|
||||
bgBlueBright: [104, 49],
|
||||
bgMagentaBright: [105, 49],
|
||||
bgCyanBright: [106, 49],
|
||||
bgWhiteBright: [107, 49]
|
||||
}
|
||||
};
|
||||
|
||||
// Alias bright black as gray (and grey)
|
||||
styles.color.gray = styles.color.blackBright;
|
||||
styles.bgColor.bgGray = styles.bgColor.bgBlackBright;
|
||||
styles.color.grey = styles.color.blackBright;
|
||||
styles.bgColor.bgGrey = styles.bgColor.bgBlackBright;
|
||||
|
||||
for (const [groupName, group] of Object.entries(styles)) {
|
||||
for (const [styleName, style] of Object.entries(group)) {
|
||||
styles[styleName] = {
|
||||
open: `\u001B[${style[0]}m`,
|
||||
close: `\u001B[${style[1]}m`
|
||||
};
|
||||
|
||||
group[styleName] = styles[styleName];
|
||||
|
||||
codes.set(style[0], style[1]);
|
||||
}
|
||||
|
||||
Object.defineProperty(styles, groupName, {
|
||||
value: group,
|
||||
enumerable: false
|
||||
});
|
||||
}
|
||||
|
||||
Object.defineProperty(styles, 'codes', {
|
||||
value: codes,
|
||||
enumerable: false
|
||||
});
|
||||
|
||||
styles.color.close = '\u001B[39m';
|
||||
styles.bgColor.close = '\u001B[49m';
|
||||
|
||||
setLazyProperty(styles.color, 'ansi', () => makeDynamicStyles(wrapAnsi16, 'ansi16', ansi2ansi, false));
|
||||
setLazyProperty(styles.color, 'ansi256', () => makeDynamicStyles(wrapAnsi256, 'ansi256', ansi2ansi, false));
|
||||
setLazyProperty(styles.color, 'ansi16m', () => makeDynamicStyles(wrapAnsi16m, 'rgb', rgb2rgb, false));
|
||||
setLazyProperty(styles.bgColor, 'ansi', () => makeDynamicStyles(wrapAnsi16, 'ansi16', ansi2ansi, true));
|
||||
setLazyProperty(styles.bgColor, 'ansi256', () => makeDynamicStyles(wrapAnsi256, 'ansi256', ansi2ansi, true));
|
||||
setLazyProperty(styles.bgColor, 'ansi16m', () => makeDynamicStyles(wrapAnsi16m, 'rgb', rgb2rgb, true));
|
||||
|
||||
return styles;
|
||||
}
|
||||
|
||||
// Make the export immutable
|
||||
Object.defineProperty(module, 'exports', {
|
||||
enumerable: true,
|
||||
get: assembleStyles
|
||||
});
|
||||
56
node_modules/ava/node_modules/chalk/node_modules/ansi-styles/package.json
generated
vendored
Normal file
56
node_modules/ava/node_modules/chalk/node_modules/ansi-styles/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
{
|
||||
"name": "ansi-styles",
|
||||
"version": "4.3.0",
|
||||
"description": "ANSI escape codes for styling strings in the terminal",
|
||||
"license": "MIT",
|
||||
"repository": "chalk/ansi-styles",
|
||||
"funding": "https://github.com/chalk/ansi-styles?sponsor=1",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava && tsd",
|
||||
"screenshot": "svg-term --command='node screenshot' --out=screenshot.svg --padding=3 --width=55 --height=3 --at=1000 --no-cursor"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"keywords": [
|
||||
"ansi",
|
||||
"styles",
|
||||
"color",
|
||||
"colour",
|
||||
"colors",
|
||||
"terminal",
|
||||
"console",
|
||||
"cli",
|
||||
"string",
|
||||
"tty",
|
||||
"escape",
|
||||
"formatting",
|
||||
"rgb",
|
||||
"256",
|
||||
"shell",
|
||||
"xterm",
|
||||
"log",
|
||||
"logging",
|
||||
"command-line",
|
||||
"text"
|
||||
],
|
||||
"dependencies": {
|
||||
"color-convert": "^2.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/color-convert": "^1.9.0",
|
||||
"ava": "^2.3.0",
|
||||
"svg-term-cli": "^2.1.1",
|
||||
"tsd": "^0.11.0",
|
||||
"xo": "^0.25.3"
|
||||
}
|
||||
}
|
||||
152
node_modules/ava/node_modules/chalk/node_modules/ansi-styles/readme.md
generated
vendored
Normal file
152
node_modules/ava/node_modules/chalk/node_modules/ansi-styles/readme.md
generated
vendored
Normal file
|
|
@ -0,0 +1,152 @@
|
|||
# ansi-styles [](https://travis-ci.org/chalk/ansi-styles)
|
||||
|
||||
> [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal
|
||||
|
||||
You probably want the higher-level [chalk](https://github.com/chalk/chalk) module for styling your strings.
|
||||
|
||||
<img src="screenshot.svg" width="900">
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install ansi-styles
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const style = require('ansi-styles');
|
||||
|
||||
console.log(`${style.green.open}Hello world!${style.green.close}`);
|
||||
|
||||
|
||||
// Color conversion between 16/256/truecolor
|
||||
// NOTE: If conversion goes to 16 colors or 256 colors, the original color
|
||||
// may be degraded to fit that color palette. This means terminals
|
||||
// that do not support 16 million colors will best-match the
|
||||
// original color.
|
||||
console.log(style.bgColor.ansi.hsl(120, 80, 72) + 'Hello world!' + style.bgColor.close);
|
||||
console.log(style.color.ansi256.rgb(199, 20, 250) + 'Hello world!' + style.color.close);
|
||||
console.log(style.color.ansi16m.hex('#abcdef') + 'Hello world!' + style.color.close);
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
Each style has an `open` and `close` property.
|
||||
|
||||
## Styles
|
||||
|
||||
### Modifiers
|
||||
|
||||
- `reset`
|
||||
- `bold`
|
||||
- `dim`
|
||||
- `italic` *(Not widely supported)*
|
||||
- `underline`
|
||||
- `inverse`
|
||||
- `hidden`
|
||||
- `strikethrough` *(Not widely supported)*
|
||||
|
||||
### Colors
|
||||
|
||||
- `black`
|
||||
- `red`
|
||||
- `green`
|
||||
- `yellow`
|
||||
- `blue`
|
||||
- `magenta`
|
||||
- `cyan`
|
||||
- `white`
|
||||
- `blackBright` (alias: `gray`, `grey`)
|
||||
- `redBright`
|
||||
- `greenBright`
|
||||
- `yellowBright`
|
||||
- `blueBright`
|
||||
- `magentaBright`
|
||||
- `cyanBright`
|
||||
- `whiteBright`
|
||||
|
||||
### Background colors
|
||||
|
||||
- `bgBlack`
|
||||
- `bgRed`
|
||||
- `bgGreen`
|
||||
- `bgYellow`
|
||||
- `bgBlue`
|
||||
- `bgMagenta`
|
||||
- `bgCyan`
|
||||
- `bgWhite`
|
||||
- `bgBlackBright` (alias: `bgGray`, `bgGrey`)
|
||||
- `bgRedBright`
|
||||
- `bgGreenBright`
|
||||
- `bgYellowBright`
|
||||
- `bgBlueBright`
|
||||
- `bgMagentaBright`
|
||||
- `bgCyanBright`
|
||||
- `bgWhiteBright`
|
||||
|
||||
## Advanced usage
|
||||
|
||||
By default, you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module.
|
||||
|
||||
- `style.modifier`
|
||||
- `style.color`
|
||||
- `style.bgColor`
|
||||
|
||||
###### Example
|
||||
|
||||
```js
|
||||
console.log(style.color.green.open);
|
||||
```
|
||||
|
||||
Raw escape codes (i.e. without the CSI escape prefix `\u001B[` and render mode postfix `m`) are available under `style.codes`, which returns a `Map` with the open codes as keys and close codes as values.
|
||||
|
||||
###### Example
|
||||
|
||||
```js
|
||||
console.log(style.codes.get(36));
|
||||
//=> 39
|
||||
```
|
||||
|
||||
## [256 / 16 million (TrueColor) support](https://gist.github.com/XVilka/8346728)
|
||||
|
||||
`ansi-styles` uses the [`color-convert`](https://github.com/Qix-/color-convert) package to allow for converting between various colors and ANSI escapes, with support for 256 and 16 million colors.
|
||||
|
||||
The following color spaces from `color-convert` are supported:
|
||||
|
||||
- `rgb`
|
||||
- `hex`
|
||||
- `keyword`
|
||||
- `hsl`
|
||||
- `hsv`
|
||||
- `hwb`
|
||||
- `ansi`
|
||||
- `ansi256`
|
||||
|
||||
To use these, call the associated conversion function with the intended output, for example:
|
||||
|
||||
```js
|
||||
style.color.ansi.rgb(100, 200, 15); // RGB to 16 color ansi foreground code
|
||||
style.bgColor.ansi.rgb(100, 200, 15); // RGB to 16 color ansi background code
|
||||
|
||||
style.color.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code
|
||||
style.bgColor.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code
|
||||
|
||||
style.color.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color foreground code
|
||||
style.bgColor.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color background code
|
||||
```
|
||||
|
||||
## Related
|
||||
|
||||
- [ansi-escapes](https://github.com/sindresorhus/ansi-escapes) - ANSI escape codes for manipulating the terminal
|
||||
|
||||
## Maintainers
|
||||
|
||||
- [Sindre Sorhus](https://github.com/sindresorhus)
|
||||
- [Josh Junon](https://github.com/qix-)
|
||||
|
||||
## For enterprise
|
||||
|
||||
Available as part of the Tidelift Subscription.
|
||||
|
||||
The maintainers of `ansi-styles` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-ansi-styles?utm_source=npm-ansi-styles&utm_medium=referral&utm_campaign=enterprise&utm_term=repo)
|
||||
2
node_modules/ava/node_modules/chalk/package.json
generated
vendored
2
node_modules/ava/node_modules/chalk/package.json
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "chalk",
|
||||
"version": "4.0.0",
|
||||
"version": "4.1.2",
|
||||
"description": "Terminal string styling done right",
|
||||
"license": "MIT",
|
||||
"repository": "chalk/chalk",
|
||||
|
|
|
|||
55
node_modules/ava/node_modules/chalk/readme.md
generated
vendored
55
node_modules/ava/node_modules/chalk/readme.md
generated
vendored
|
|
@ -9,10 +9,58 @@
|
|||
|
||||
> Terminal string styling done right
|
||||
|
||||
[](https://travis-ci.org/chalk/chalk) [](https://coveralls.io/github/chalk/chalk?branch=master) [](https://www.npmjs.com/package/chalk?activeTab=dependents) [](https://www.npmjs.com/package/chalk) [](https://www.youtube.com/watch?v=9auOCbH5Ns4) [](https://github.com/xojs/xo)  [](https://repl.it/github/chalk/chalk)
|
||||
[](https://travis-ci.org/chalk/chalk) [](https://coveralls.io/github/chalk/chalk?branch=master) [](https://www.npmjs.com/package/chalk?activeTab=dependents) [](https://www.npmjs.com/package/chalk) [](https://www.youtube.com/watch?v=9auOCbH5Ns4) [](https://github.com/xojs/xo)  [](https://repl.it/github/chalk/chalk)
|
||||
|
||||
<img src="https://cdn.jsdelivr.net/gh/chalk/ansi-styles@8261697c95bf34b6c7767e2cbe9941a851d59385/screenshot.svg" width="900">
|
||||
|
||||
<br>
|
||||
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<p>
|
||||
<p>
|
||||
<sup>
|
||||
Sindre Sorhus' open source work is supported by the community on <a href="https://github.com/sponsors/sindresorhus">GitHub Sponsors</a> and <a href="https://stakes.social/0x44d871aebF0126Bf646753E2C976Aa7e68A66c15">Dev</a>
|
||||
</sup>
|
||||
</p>
|
||||
<sup>Special thanks to:</sup>
|
||||
<br>
|
||||
<br>
|
||||
<a href="https://standardresume.co/tech">
|
||||
<img src="https://sindresorhus.com/assets/thanks/standard-resume-logo.svg" width="160"/>
|
||||
</a>
|
||||
<br>
|
||||
<br>
|
||||
<a href="https://retool.com/?utm_campaign=sindresorhus">
|
||||
<img src="https://sindresorhus.com/assets/thanks/retool-logo.svg" width="230"/>
|
||||
</a>
|
||||
<br>
|
||||
<br>
|
||||
<a href="https://doppler.com/?utm_campaign=github_repo&utm_medium=referral&utm_content=chalk&utm_source=github">
|
||||
<div>
|
||||
<img src="https://dashboard.doppler.com/imgs/logo-long.svg" width="240" alt="Doppler">
|
||||
</div>
|
||||
<b>All your environment variables, in one place</b>
|
||||
<div>
|
||||
<span>Stop struggling with scattered API keys, hacking together home-brewed tools,</span>
|
||||
<br>
|
||||
<span>and avoiding access controls. Keep your team and servers in sync with Doppler.</span>
|
||||
</div>
|
||||
</a>
|
||||
<br>
|
||||
<a href="https://uibakery.io/?utm_source=chalk&utm_medium=sponsor&utm_campaign=github">
|
||||
<div>
|
||||
<img src="https://sindresorhus.com/assets/thanks/uibakery-logo.jpg" width="270" alt="UI Bakery">
|
||||
</div>
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
<br>
|
||||
|
||||
## Highlights
|
||||
|
||||
- Expressive API
|
||||
|
|
@ -199,7 +247,7 @@ Explicit 256/Truecolor mode can be enabled using the `--color=256` and `--color=
|
|||
|
||||
## Tagged template literal
|
||||
|
||||
Chalk can be used as a [tagged template literal](http://exploringjs.com/es6/ch_template-literals.html#_tagged-template-literals).
|
||||
Chalk can be used as a [tagged template literal](https://exploringjs.com/es6/ch_template-literals.html#_tagged-template-literals).
|
||||
|
||||
```js
|
||||
const chalk = require('chalk');
|
||||
|
|
@ -215,10 +263,11 @@ console.log(chalk`
|
|||
|
||||
Blocks are delimited by an opening curly brace (`{`), a style, some content, and a closing curly brace (`}`).
|
||||
|
||||
Template styles are chained exactly like normal Chalk styles. The following two statements are equivalent:
|
||||
Template styles are chained exactly like normal Chalk styles. The following three statements are equivalent:
|
||||
|
||||
```js
|
||||
console.log(chalk.bold.rgb(10, 100, 200)('Hello!'));
|
||||
console.log(chalk.bold.rgb(10, 100, 200)`Hello!`);
|
||||
console.log(chalk`{bold.rgb(10,100,200) Hello!}`);
|
||||
```
|
||||
|
||||
|
|
|
|||
9
node_modules/ava/node_modules/chalk/source/index.js
generated
vendored
9
node_modules/ava/node_modules/chalk/source/index.js
generated
vendored
|
|
@ -6,6 +6,8 @@ const {
|
|||
stringEncaseCRLFWithFirstIndex
|
||||
} = require('./util');
|
||||
|
||||
const {isArray} = Array;
|
||||
|
||||
// `supportsColor.level` → `ansiStyles.color[name]` mapping
|
||||
const levelMapping = [
|
||||
'ansi',
|
||||
|
|
@ -135,6 +137,11 @@ const createStyler = (open, close, parent) => {
|
|||
|
||||
const createBuilder = (self, _styler, _isEmpty) => {
|
||||
const builder = (...arguments_) => {
|
||||
if (isArray(arguments_[0]) && isArray(arguments_[0].raw)) {
|
||||
// Called as a template literal, for example: chalk.red`2 + 3 = {bold ${2+3}}`
|
||||
return applyStyle(builder, chalkTag(builder, ...arguments_));
|
||||
}
|
||||
|
||||
// Single argument is hot path, implicit coercion is faster than anything
|
||||
// eslint-disable-next-line no-implicit-coercion
|
||||
return applyStyle(builder, (arguments_.length === 1) ? ('' + arguments_[0]) : arguments_.join(' '));
|
||||
|
|
@ -189,7 +196,7 @@ let template;
|
|||
const chalkTag = (chalk, ...strings) => {
|
||||
const [firstString] = strings;
|
||||
|
||||
if (!Array.isArray(firstString)) {
|
||||
if (!isArray(firstString) || !isArray(firstString.raw)) {
|
||||
// If chalk() was called by itself or with a string,
|
||||
// return the string itself as a string.
|
||||
return strings.join(' ');
|
||||
|
|
|
|||
18
node_modules/ava/node_modules/escape-string-regexp/index.d.ts
generated
vendored
18
node_modules/ava/node_modules/escape-string-regexp/index.d.ts
generated
vendored
|
|
@ -1,18 +0,0 @@
|
|||
/**
|
||||
Escape RegExp special characters.
|
||||
|
||||
You can also use this to escape a string that is inserted into the middle of a regex, for example, into a character class.
|
||||
|
||||
@example
|
||||
```
|
||||
import escapeStringRegexp = require('escape-string-regexp');
|
||||
|
||||
const escapedString = escapeStringRegexp('How much $ for a 🦄?');
|
||||
//=> 'How much \\$ for a 🦄\\?'
|
||||
|
||||
new RegExp(escapedString);
|
||||
```
|
||||
*/
|
||||
declare const escapeStringRegexp: (string: string) => string;
|
||||
|
||||
export = escapeStringRegexp;
|
||||
11
node_modules/ava/node_modules/escape-string-regexp/index.js
generated
vendored
11
node_modules/ava/node_modules/escape-string-regexp/index.js
generated
vendored
|
|
@ -1,11 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
const matchOperatorsRegex = /[|\\{}()[\]^$+*?.-]/g;
|
||||
|
||||
module.exports = string => {
|
||||
if (typeof string !== 'string') {
|
||||
throw new TypeError('Expected a string');
|
||||
}
|
||||
|
||||
return string.replace(matchOperatorsRegex, '\\$&');
|
||||
};
|
||||
43
node_modules/ava/node_modules/escape-string-regexp/package.json
generated
vendored
43
node_modules/ava/node_modules/escape-string-regexp/package.json
generated
vendored
|
|
@ -1,43 +0,0 @@
|
|||
{
|
||||
"name": "escape-string-regexp",
|
||||
"version": "2.0.0",
|
||||
"description": "Escape RegExp special characters",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/escape-string-regexp",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "sindresorhus.com"
|
||||
},
|
||||
"maintainers": [
|
||||
"Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)",
|
||||
"Joshua Boy Nicolai Appelman <joshua@jbna.nl> (jbna.nl)"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava && tsd"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"keywords": [
|
||||
"escape",
|
||||
"regex",
|
||||
"regexp",
|
||||
"re",
|
||||
"regular",
|
||||
"expression",
|
||||
"string",
|
||||
"str",
|
||||
"special",
|
||||
"characters"
|
||||
],
|
||||
"devDependencies": {
|
||||
"ava": "^1.4.1",
|
||||
"tsd": "^0.7.2",
|
||||
"xo": "^0.24.0"
|
||||
}
|
||||
}
|
||||
29
node_modules/ava/node_modules/escape-string-regexp/readme.md
generated
vendored
29
node_modules/ava/node_modules/escape-string-regexp/readme.md
generated
vendored
|
|
@ -1,29 +0,0 @@
|
|||
# escape-string-regexp [](https://travis-ci.org/sindresorhus/escape-string-regexp)
|
||||
|
||||
> Escape RegExp special characters
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install escape-string-regexp
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const escapeStringRegexp = require('escape-string-regexp');
|
||||
|
||||
const escapedString = escapeStringRegexp('How much $ for a 🦄?');
|
||||
//=> 'How much \\$ for a 🦄\\?'
|
||||
|
||||
new RegExp(escapedString);
|
||||
```
|
||||
|
||||
You can also use this to escape a string that is inserted into the middle of a regex, for example, into a character class.
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](https://sindresorhus.com)
|
||||
162
node_modules/ava/node_modules/ms/index.js
generated
vendored
Normal file
162
node_modules/ava/node_modules/ms/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,162 @@
|
|||
/**
|
||||
* Helpers.
|
||||
*/
|
||||
|
||||
var s = 1000;
|
||||
var m = s * 60;
|
||||
var h = m * 60;
|
||||
var d = h * 24;
|
||||
var w = d * 7;
|
||||
var y = d * 365.25;
|
||||
|
||||
/**
|
||||
* Parse or format the given `val`.
|
||||
*
|
||||
* Options:
|
||||
*
|
||||
* - `long` verbose formatting [false]
|
||||
*
|
||||
* @param {String|Number} val
|
||||
* @param {Object} [options]
|
||||
* @throws {Error} throw an error if val is not a non-empty string or a number
|
||||
* @return {String|Number}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
module.exports = function (val, options) {
|
||||
options = options || {};
|
||||
var type = typeof val;
|
||||
if (type === 'string' && val.length > 0) {
|
||||
return parse(val);
|
||||
} else if (type === 'number' && isFinite(val)) {
|
||||
return options.long ? fmtLong(val) : fmtShort(val);
|
||||
}
|
||||
throw new Error(
|
||||
'val is not a non-empty string or a valid number. val=' +
|
||||
JSON.stringify(val)
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse the given `str` and return milliseconds.
|
||||
*
|
||||
* @param {String} str
|
||||
* @return {Number}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function parse(str) {
|
||||
str = String(str);
|
||||
if (str.length > 100) {
|
||||
return;
|
||||
}
|
||||
var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(
|
||||
str
|
||||
);
|
||||
if (!match) {
|
||||
return;
|
||||
}
|
||||
var n = parseFloat(match[1]);
|
||||
var type = (match[2] || 'ms').toLowerCase();
|
||||
switch (type) {
|
||||
case 'years':
|
||||
case 'year':
|
||||
case 'yrs':
|
||||
case 'yr':
|
||||
case 'y':
|
||||
return n * y;
|
||||
case 'weeks':
|
||||
case 'week':
|
||||
case 'w':
|
||||
return n * w;
|
||||
case 'days':
|
||||
case 'day':
|
||||
case 'd':
|
||||
return n * d;
|
||||
case 'hours':
|
||||
case 'hour':
|
||||
case 'hrs':
|
||||
case 'hr':
|
||||
case 'h':
|
||||
return n * h;
|
||||
case 'minutes':
|
||||
case 'minute':
|
||||
case 'mins':
|
||||
case 'min':
|
||||
case 'm':
|
||||
return n * m;
|
||||
case 'seconds':
|
||||
case 'second':
|
||||
case 'secs':
|
||||
case 'sec':
|
||||
case 's':
|
||||
return n * s;
|
||||
case 'milliseconds':
|
||||
case 'millisecond':
|
||||
case 'msecs':
|
||||
case 'msec':
|
||||
case 'ms':
|
||||
return n;
|
||||
default:
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Short format for `ms`.
|
||||
*
|
||||
* @param {Number} ms
|
||||
* @return {String}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function fmtShort(ms) {
|
||||
var msAbs = Math.abs(ms);
|
||||
if (msAbs >= d) {
|
||||
return Math.round(ms / d) + 'd';
|
||||
}
|
||||
if (msAbs >= h) {
|
||||
return Math.round(ms / h) + 'h';
|
||||
}
|
||||
if (msAbs >= m) {
|
||||
return Math.round(ms / m) + 'm';
|
||||
}
|
||||
if (msAbs >= s) {
|
||||
return Math.round(ms / s) + 's';
|
||||
}
|
||||
return ms + 'ms';
|
||||
}
|
||||
|
||||
/**
|
||||
* Long format for `ms`.
|
||||
*
|
||||
* @param {Number} ms
|
||||
* @return {String}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function fmtLong(ms) {
|
||||
var msAbs = Math.abs(ms);
|
||||
if (msAbs >= d) {
|
||||
return plural(ms, msAbs, d, 'day');
|
||||
}
|
||||
if (msAbs >= h) {
|
||||
return plural(ms, msAbs, h, 'hour');
|
||||
}
|
||||
if (msAbs >= m) {
|
||||
return plural(ms, msAbs, m, 'minute');
|
||||
}
|
||||
if (msAbs >= s) {
|
||||
return plural(ms, msAbs, s, 'second');
|
||||
}
|
||||
return ms + ' ms';
|
||||
}
|
||||
|
||||
/**
|
||||
* Pluralization helper.
|
||||
*/
|
||||
|
||||
function plural(ms, msAbs, n, name) {
|
||||
var isPlural = msAbs >= n * 1.5;
|
||||
return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');
|
||||
}
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter <i@izs.me>, James Talmage <james@talmage.io> (github.com/jamestalmage), and Contributors
|
||||
Copyright (c) 2020 Vercel, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
@ -9,13 +9,13 @@ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
38
node_modules/ava/node_modules/ms/package.json
generated
vendored
Normal file
38
node_modules/ava/node_modules/ms/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
{
|
||||
"name": "ms",
|
||||
"version": "2.1.3",
|
||||
"description": "Tiny millisecond conversion utility",
|
||||
"repository": "vercel/ms",
|
||||
"main": "./index",
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"scripts": {
|
||||
"precommit": "lint-staged",
|
||||
"lint": "eslint lib/* bin/*",
|
||||
"test": "mocha tests.js"
|
||||
},
|
||||
"eslintConfig": {
|
||||
"extends": "eslint:recommended",
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true
|
||||
}
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.js": [
|
||||
"npm run lint",
|
||||
"prettier --single-quote --write",
|
||||
"git add"
|
||||
]
|
||||
},
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"eslint": "4.18.2",
|
||||
"expect.js": "0.3.1",
|
||||
"husky": "0.14.3",
|
||||
"lint-staged": "5.0.0",
|
||||
"mocha": "4.0.1",
|
||||
"prettier": "2.0.5"
|
||||
}
|
||||
}
|
||||
59
node_modules/ava/node_modules/ms/readme.md
generated
vendored
Normal file
59
node_modules/ava/node_modules/ms/readme.md
generated
vendored
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
# ms
|
||||
|
||||

|
||||
|
||||
Use this package to easily convert various time formats to milliseconds.
|
||||
|
||||
## Examples
|
||||
|
||||
```js
|
||||
ms('2 days') // 172800000
|
||||
ms('1d') // 86400000
|
||||
ms('10h') // 36000000
|
||||
ms('2.5 hrs') // 9000000
|
||||
ms('2h') // 7200000
|
||||
ms('1m') // 60000
|
||||
ms('5s') // 5000
|
||||
ms('1y') // 31557600000
|
||||
ms('100') // 100
|
||||
ms('-3 days') // -259200000
|
||||
ms('-1h') // -3600000
|
||||
ms('-200') // -200
|
||||
```
|
||||
|
||||
### Convert from Milliseconds
|
||||
|
||||
```js
|
||||
ms(60000) // "1m"
|
||||
ms(2 * 60000) // "2m"
|
||||
ms(-3 * 60000) // "-3m"
|
||||
ms(ms('10 hours')) // "10h"
|
||||
```
|
||||
|
||||
### Time Format Written-Out
|
||||
|
||||
```js
|
||||
ms(60000, { long: true }) // "1 minute"
|
||||
ms(2 * 60000, { long: true }) // "2 minutes"
|
||||
ms(-3 * 60000, { long: true }) // "-3 minutes"
|
||||
ms(ms('10 hours'), { long: true }) // "10 hours"
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
- Works both in [Node.js](https://nodejs.org) and in the browser
|
||||
- If a number is supplied to `ms`, a string with a unit is returned
|
||||
- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`)
|
||||
- If you pass a string with a number and a valid unit, the number of equivalent milliseconds is returned
|
||||
|
||||
## Related Packages
|
||||
|
||||
- [ms.macro](https://github.com/knpwrs/ms.macro) - Run `ms` as a macro at build-time.
|
||||
|
||||
## Caught a Bug?
|
||||
|
||||
1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device
|
||||
2. Link the package to the global module directory: `npm link`
|
||||
3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, Node.js will now use your clone of ms!
|
||||
|
||||
As always, you can run the tests using: `npm test`
|
||||
334
node_modules/ava/node_modules/stack-utils/index.js
generated
vendored
334
node_modules/ava/node_modules/stack-utils/index.js
generated
vendored
|
|
@ -1,334 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
const escapeStringRegexp = require('escape-string-regexp');
|
||||
|
||||
const natives = [].concat(
|
||||
require('module').builtinModules,
|
||||
'bootstrap_node',
|
||||
'node',
|
||||
).map(n => new RegExp(`(?:\\(${n}\\.js:\\d+:\\d+\\)$|^\\s*at ${n}\\.js:\\d+:\\d+$)`));
|
||||
|
||||
natives.push(
|
||||
/\(internal\/[^:]+:\d+:\d+\)$/,
|
||||
/\s*at internal\/[^:]+:\d+:\d+$/,
|
||||
/\/\.node-spawn-wrap-\w+-\w+\/node:\d+:\d+\)?$/
|
||||
);
|
||||
|
||||
class StackUtils {
|
||||
constructor (opts) {
|
||||
opts = {
|
||||
ignoredPackages: [],
|
||||
...opts
|
||||
};
|
||||
|
||||
if ('internals' in opts === false) {
|
||||
opts.internals = StackUtils.nodeInternals();
|
||||
}
|
||||
|
||||
if ('cwd' in opts === false) {
|
||||
opts.cwd = process.cwd()
|
||||
}
|
||||
|
||||
this._cwd = opts.cwd.replace(/\\/g, '/');
|
||||
this._internals = [].concat(
|
||||
opts.internals,
|
||||
ignoredPackagesRegExp(opts.ignoredPackages)
|
||||
);
|
||||
|
||||
this._wrapCallSite = opts.wrapCallSite || false;
|
||||
}
|
||||
|
||||
static nodeInternals () {
|
||||
return [...natives];
|
||||
}
|
||||
|
||||
clean (stack, indent = 0) {
|
||||
indent = ' '.repeat(indent);
|
||||
|
||||
if (!Array.isArray(stack)) {
|
||||
stack = stack.split('\n');
|
||||
}
|
||||
|
||||
if (!(/^\s*at /.test(stack[0])) && (/^\s*at /.test(stack[1]))) {
|
||||
stack = stack.slice(1);
|
||||
}
|
||||
|
||||
let outdent = false;
|
||||
let lastNonAtLine = null;
|
||||
const result = [];
|
||||
|
||||
stack.forEach(st => {
|
||||
st = st.replace(/\\/g, '/');
|
||||
|
||||
if (this._internals.some(internal => internal.test(st))) {
|
||||
return;
|
||||
}
|
||||
|
||||
const isAtLine = /^\s*at /.test(st);
|
||||
|
||||
if (outdent) {
|
||||
st = st.trimEnd().replace(/^(\s+)at /, '$1');
|
||||
} else {
|
||||
st = st.trim();
|
||||
if (isAtLine) {
|
||||
st = st.slice(3);
|
||||
}
|
||||
}
|
||||
|
||||
st = st.replace(`${this._cwd}/`, '');
|
||||
|
||||
if (st) {
|
||||
if (isAtLine) {
|
||||
if (lastNonAtLine) {
|
||||
result.push(lastNonAtLine);
|
||||
lastNonAtLine = null;
|
||||
}
|
||||
|
||||
result.push(st);
|
||||
} else {
|
||||
outdent = true;
|
||||
lastNonAtLine = st;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result.map(line => `${indent}${line}\n`).join('');
|
||||
}
|
||||
|
||||
captureString (limit, fn = this.captureString) {
|
||||
if (typeof limit === 'function') {
|
||||
fn = limit;
|
||||
limit = Infinity;
|
||||
}
|
||||
|
||||
const {stackTraceLimit} = Error;
|
||||
if (limit) {
|
||||
Error.stackTraceLimit = limit;
|
||||
}
|
||||
|
||||
const obj = {};
|
||||
|
||||
Error.captureStackTrace(obj, fn);
|
||||
const {stack} = obj;
|
||||
Error.stackTraceLimit = stackTraceLimit;
|
||||
|
||||
return this.clean(stack);
|
||||
}
|
||||
|
||||
capture (limit, fn = this.capture) {
|
||||
if (typeof limit === 'function') {
|
||||
fn = limit;
|
||||
limit = Infinity;
|
||||
}
|
||||
|
||||
const {prepareStackTrace, stackTraceLimit} = Error;
|
||||
Error.prepareStackTrace = (obj, site) => {
|
||||
if (this._wrapCallSite) {
|
||||
return site.map(this._wrapCallSite);
|
||||
}
|
||||
|
||||
return site;
|
||||
};
|
||||
|
||||
if (limit) {
|
||||
Error.stackTraceLimit = limit;
|
||||
}
|
||||
|
||||
const obj = {};
|
||||
Error.captureStackTrace(obj, fn);
|
||||
const { stack } = obj;
|
||||
Object.assign(Error, {prepareStackTrace, stackTraceLimit});
|
||||
|
||||
return stack;
|
||||
}
|
||||
|
||||
at (fn = this.at) {
|
||||
const [site] = this.capture(1, fn);
|
||||
|
||||
if (!site) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const res = {
|
||||
line: site.getLineNumber(),
|
||||
column: site.getColumnNumber()
|
||||
};
|
||||
|
||||
setFile(res, site.getFileName(), this._cwd);
|
||||
|
||||
if (site.isConstructor()) {
|
||||
res.constructor = true;
|
||||
}
|
||||
|
||||
if (site.isEval()) {
|
||||
res.evalOrigin = site.getEvalOrigin();
|
||||
}
|
||||
|
||||
// Node v10 stopped with the isNative() on callsites, apparently
|
||||
/* istanbul ignore next */
|
||||
if (site.isNative()) {
|
||||
res.native = true;
|
||||
}
|
||||
|
||||
let typename;
|
||||
try {
|
||||
typename = site.getTypeName();
|
||||
} catch (_) {
|
||||
}
|
||||
|
||||
if (typename && typename !== 'Object' && typename !== '[object Object]') {
|
||||
res.type = typename;
|
||||
}
|
||||
|
||||
const fname = site.getFunctionName();
|
||||
if (fname) {
|
||||
res.function = fname;
|
||||
}
|
||||
|
||||
const meth = site.getMethodName();
|
||||
if (meth && fname !== meth) {
|
||||
res.method = meth;
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
parseLine (line) {
|
||||
const match = line && line.match(re);
|
||||
if (!match) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const ctor = match[1] === 'new';
|
||||
let fname = match[2];
|
||||
const evalOrigin = match[3];
|
||||
const evalFile = match[4];
|
||||
const evalLine = Number(match[5]);
|
||||
const evalCol = Number(match[6]);
|
||||
let file = match[7];
|
||||
const lnum = match[8];
|
||||
const col = match[9];
|
||||
const native = match[10] === 'native';
|
||||
const closeParen = match[11] === ')';
|
||||
let method;
|
||||
|
||||
const res = {};
|
||||
|
||||
if (lnum) {
|
||||
res.line = Number(lnum);
|
||||
}
|
||||
|
||||
if (col) {
|
||||
res.column = Number(col);
|
||||
}
|
||||
|
||||
if (closeParen && file) {
|
||||
// make sure parens are balanced
|
||||
// if we have a file like "asdf) [as foo] (xyz.js", then odds are
|
||||
// that the fname should be += " (asdf) [as foo]" and the file
|
||||
// should be just "xyz.js"
|
||||
// walk backwards from the end to find the last unbalanced (
|
||||
let closes = 0;
|
||||
for (let i = file.length - 1; i > 0; i--) {
|
||||
if (file.charAt(i) === ')') {
|
||||
closes++;
|
||||
} else if (file.charAt(i) === '(' && file.charAt(i - 1) === ' ') {
|
||||
closes--;
|
||||
if (closes === -1 && file.charAt(i - 1) === ' ') {
|
||||
const before = file.slice(0, i - 1);
|
||||
const after = file.slice(i + 1);
|
||||
file = after;
|
||||
fname += ` (${before}`;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (fname) {
|
||||
const methodMatch = fname.match(methodRe);
|
||||
if (methodMatch) {
|
||||
fname = methodMatch[1];
|
||||
method = methodMatch[2];
|
||||
}
|
||||
}
|
||||
|
||||
setFile(res, file, this._cwd);
|
||||
|
||||
if (ctor) {
|
||||
res.constructor = true;
|
||||
}
|
||||
|
||||
if (evalOrigin) {
|
||||
res.evalOrigin = evalOrigin;
|
||||
res.evalLine = evalLine;
|
||||
res.evalColumn = evalCol;
|
||||
res.evalFile = evalFile && evalFile.replace(/\\/g, '/');
|
||||
}
|
||||
|
||||
if (native) {
|
||||
res.native = true;
|
||||
}
|
||||
|
||||
if (fname) {
|
||||
res.function = fname;
|
||||
}
|
||||
|
||||
if (method && fname !== method) {
|
||||
res.method = method;
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
function setFile (result, filename, cwd) {
|
||||
if (filename) {
|
||||
filename = filename.replace(/\\/g, '/');
|
||||
if (filename.startsWith(`${cwd}/`)) {
|
||||
filename = filename.slice(cwd.length + 1);
|
||||
}
|
||||
|
||||
result.file = filename;
|
||||
}
|
||||
}
|
||||
|
||||
function ignoredPackagesRegExp(ignoredPackages) {
|
||||
if (ignoredPackages.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const packages = ignoredPackages.map(mod => escapeStringRegexp(mod));
|
||||
|
||||
return new RegExp(`[\/\\\\]node_modules[\/\\\\](?:${packages.join('|')})[\/\\\\][^:]+:\\d+:\\d+`)
|
||||
}
|
||||
|
||||
const re = new RegExp(
|
||||
'^' +
|
||||
// Sometimes we strip out the ' at' because it's noisy
|
||||
'(?:\\s*at )?' +
|
||||
// $1 = ctor if 'new'
|
||||
'(?:(new) )?' +
|
||||
// $2 = function name (can be literally anything)
|
||||
// May contain method at the end as [as xyz]
|
||||
'(?:(.*?) \\()?' +
|
||||
// (eval at <anonymous> (file.js:1:1),
|
||||
// $3 = eval origin
|
||||
// $4:$5:$6 are eval file/line/col, but not normally reported
|
||||
'(?:eval at ([^ ]+) \\((.+?):(\\d+):(\\d+)\\), )?' +
|
||||
// file:line:col
|
||||
// $7:$8:$9
|
||||
// $10 = 'native' if native
|
||||
'(?:(.+?):(\\d+):(\\d+)|(native))' +
|
||||
// maybe close the paren, then end
|
||||
// if $11 is ), then we only allow balanced parens in the filename
|
||||
// any imbalance is placed on the fname. This is a heuristic, and
|
||||
// bound to be incorrect in some edge cases. The bet is that
|
||||
// having weird characters in method names is more common than
|
||||
// having weird characters in filenames, which seems reasonable.
|
||||
'(\\)?)$'
|
||||
);
|
||||
|
||||
const methodRe = /^(.*?) \[as (.*?)\]$/;
|
||||
|
||||
module.exports = StackUtils;
|
||||
35
node_modules/ava/node_modules/stack-utils/package.json
generated
vendored
35
node_modules/ava/node_modules/stack-utils/package.json
generated
vendored
|
|
@ -1,35 +0,0 @@
|
|||
{
|
||||
"name": "stack-utils",
|
||||
"version": "2.0.2",
|
||||
"description": "Captures and cleans stack traces",
|
||||
"license": "MIT",
|
||||
"repository": "tapjs/stack-utils",
|
||||
"author": {
|
||||
"name": "James Talmage",
|
||||
"email": "james@talmage.io",
|
||||
"url": "github.com/jamestalmage"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap --no-esm --100",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"prepublishOnly": "git push origin --follow-tags"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"dependencies": {
|
||||
"escape-string-regexp": "^2.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"bluebird": "^3.7.2",
|
||||
"coveralls": "^3.0.9",
|
||||
"nested-error-stacks": "^2.1.0",
|
||||
"pify": "^4.0.1",
|
||||
"q": "^1.5.1",
|
||||
"tap": "=14.10.2-unbundled"
|
||||
}
|
||||
}
|
||||
143
node_modules/ava/node_modules/stack-utils/readme.md
generated
vendored
143
node_modules/ava/node_modules/stack-utils/readme.md
generated
vendored
|
|
@ -1,143 +0,0 @@
|
|||
# stack-utils
|
||||
|
||||
> Captures and cleans stack traces.
|
||||
|
||||
[](https://travis-ci.org/tapjs/stack-utils) [](https://ci.appveyor.com/project/jamestalmage/stack-utils-oiw96/branch/master) [](https://coveralls.io/github/tapjs/stack-utils?branch=master)
|
||||
|
||||
|
||||
Extracted from `lib/stack.js` in the [`node-tap` project](https://github.com/tapjs/node-tap)
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install --save stack-utils
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const StackUtils = require('stack-utils');
|
||||
const stack = new StackUtils({cwd: process.cwd(), internals: StackUtils.nodeInternals()});
|
||||
|
||||
console.log(stack.clean(new Error().stack));
|
||||
// outputs a beautified stack trace
|
||||
```
|
||||
|
||||
|
||||
## API
|
||||
|
||||
|
||||
### new StackUtils([options])
|
||||
|
||||
Creates a new `stackUtils` instance.
|
||||
|
||||
#### options
|
||||
|
||||
##### internals
|
||||
|
||||
Type: `array` of `RegularExpression`s
|
||||
|
||||
A set of regular expressions that match internal stack stack trace lines which should be culled from the stack trace.
|
||||
The default is `StackUtils.nodeInternals()`, this can be disabled by setting `[]` or appended using
|
||||
`StackUtils.nodeInternals().concat(additionalRegExp)`. See also `ignoredPackages`.
|
||||
|
||||
##### ignoredPackages
|
||||
|
||||
Type: `array` of `string`s
|
||||
|
||||
An array of npm modules to be culled from the stack trace. This list will mapped to regular
|
||||
expressions and merged with the `internals`.
|
||||
|
||||
Default `''`.
|
||||
|
||||
##### cwd
|
||||
|
||||
Type: `string`
|
||||
|
||||
The path to the current working directory. File names in the stack trace will be shown relative to this directory.
|
||||
|
||||
##### wrapCallSite
|
||||
|
||||
Type: `function(CallSite)`
|
||||
|
||||
A mapping function for manipulating CallSites before processing. The first argument is a CallSite instance, and the function should return a modified CallSite. This is useful for providing source map support.
|
||||
|
||||
|
||||
### StackUtils.nodeInternals()
|
||||
|
||||
Returns an array of regular expressions that be used to cull lines from the stack trace that reference common Node.js internal files.
|
||||
|
||||
|
||||
### stackUtils.clean(stack, indent = 0)
|
||||
|
||||
Cleans up a stack trace by deleting any lines that match the `internals` passed to the constructor, and shortening file names relative to `cwd`.
|
||||
|
||||
Returns a `string` with the cleaned up stack (always terminated with a `\n` newline character).
|
||||
Spaces at the start of each line are trimmed, indentation can be added by setting `indent` to the desired number of spaces.
|
||||
|
||||
#### stack
|
||||
|
||||
*Required*
|
||||
Type: `string` or an `array` of `string`s
|
||||
|
||||
|
||||
### stackUtils.capture([limit], [startStackFunction])
|
||||
|
||||
Captures the current stack trace, returning an array of `CallSite`s. There are good overviews of the available CallSite methods [here](https://github.com/v8/v8/wiki/Stack%20Trace%20API#customizing-stack-traces), and [here](https://github.com/sindresorhus/callsites#api).
|
||||
|
||||
#### limit
|
||||
|
||||
Type: `number`
|
||||
Default: `Infinity`
|
||||
|
||||
Limits the number of lines returned by dropping all lines in excess of the limit. This removes lines from the stack trace.
|
||||
|
||||
#### startStackFunction
|
||||
|
||||
Type: `function`
|
||||
|
||||
The function where the stack trace should start. The first line of the stack trace will be the function that called `startStackFunction`. This removes lines from the end of the stack trace.
|
||||
|
||||
|
||||
### stackUtils.captureString([limit], [startStackFunction])
|
||||
|
||||
Captures the current stack trace, cleans it using `stackUtils.clean(stack)`, and returns a string with the cleaned stack trace. It takes the same arguments as `stackUtils.capture`.
|
||||
|
||||
|
||||
### stackUtils.at([startStackFunction])
|
||||
|
||||
Captures the first line of the stack trace (or the first line after `startStackFunction` if supplied), and returns a `CallSite` like object that is serialization friendly (properties are actual values instead of getter functions).
|
||||
|
||||
The available properties are:
|
||||
|
||||
- `line`: `number`
|
||||
- `column`: `number`
|
||||
- `file`: `string`
|
||||
- `constructor`: `boolean`
|
||||
- `evalOrigin`: `string`
|
||||
- `native`: `boolean`
|
||||
- `type`: `string`
|
||||
- `function`: `string`
|
||||
- `method`: `string`
|
||||
|
||||
### stackUtils.parseLine(line)
|
||||
|
||||
Parses a `string` (which should be a single line from a stack trace), and generates an object with the following properties:
|
||||
|
||||
- `line`: `number`
|
||||
- `column`: `number`
|
||||
- `file`: `string`
|
||||
- `constructor`: `boolean`
|
||||
- `evalOrigin`: `string`
|
||||
- `evalLine`: `number`
|
||||
- `evalColumn`: `number`
|
||||
- `evalFile`: `string`
|
||||
- `native`: `boolean`
|
||||
- `function`: `string`
|
||||
- `method`: `string`
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Isaac Z. Schlueter](http://github.com/isaacs), [James Talmage](http://github.com/jamestalmage)
|
||||
6
node_modules/ava/node_modules/supports-color/index.js
generated
vendored
6
node_modules/ava/node_modules/supports-color/index.js
generated
vendored
|
|
@ -81,7 +81,7 @@ function supportsColor(haveStream, streamIsTTY) {
|
|||
}
|
||||
|
||||
if ('CI' in env) {
|
||||
if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI'].some(sign => sign in env) || env.CI_NAME === 'codeship') {
|
||||
if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') {
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
|
@ -92,10 +92,6 @@ function supportsColor(haveStream, streamIsTTY) {
|
|||
return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0;
|
||||
}
|
||||
|
||||
if ('GITHUB_ACTIONS' in env) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (env.COLORTERM === 'truecolor') {
|
||||
return 3;
|
||||
}
|
||||
|
|
|
|||
2
node_modules/ava/node_modules/supports-color/package.json
generated
vendored
2
node_modules/ava/node_modules/supports-color/package.json
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "supports-color",
|
||||
"version": "7.1.0",
|
||||
"version": "7.2.0",
|
||||
"description": "Detect whether a terminal supports color",
|
||||
"license": "MIT",
|
||||
"repository": "chalk/supports-color",
|
||||
|
|
|
|||
93
node_modules/ava/package.json
generated
vendored
93
node_modules/ava/package.json
generated
vendored
|
|
@ -1,22 +1,24 @@
|
|||
{
|
||||
"name": "ava",
|
||||
"version": "3.8.1",
|
||||
"description": "Testing can be a drag. AVA helps you get it done.",
|
||||
"version": "3.15.0",
|
||||
"description": "Node.js test runner that lets you develop with confidence.",
|
||||
"license": "MIT",
|
||||
"repository": "avajs/ava",
|
||||
"homepage": "https://avajs.dev",
|
||||
"bin": "cli.js",
|
||||
"engines": {
|
||||
"node": ">=10.18.0 <11 || >=12.14.0 <13 || >=13.5.0 <14 || >=14.0.0"
|
||||
"node": ">=10.18.0 <11 || >=12.14.0 <12.17.0 || >=12.17.0 <13 || >=14.0.0 <15 || >=15"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && tsd && c8 tap"
|
||||
"cover": "c8 --report=none tap && c8 --report=none --no-clean test-ava && c8 report",
|
||||
"test": "xo && tsd && npm run -s cover"
|
||||
},
|
||||
"files": [
|
||||
"lib",
|
||||
"*.js",
|
||||
"!*.config.js",
|
||||
"index.d.ts"
|
||||
"index.d.ts",
|
||||
"*.d.ts"
|
||||
],
|
||||
"keywords": [
|
||||
"🦄",
|
||||
|
|
@ -56,85 +58,88 @@
|
|||
],
|
||||
"dependencies": {
|
||||
"@concordance/react": "^2.0.0",
|
||||
"acorn": "^7.1.1",
|
||||
"acorn-walk": "^7.1.1",
|
||||
"ansi-styles": "^4.2.1",
|
||||
"acorn": "^8.0.4",
|
||||
"acorn-walk": "^8.0.0",
|
||||
"ansi-styles": "^5.0.0",
|
||||
"arrgv": "^1.0.2",
|
||||
"arrify": "^2.0.1",
|
||||
"callsites": "^3.1.0",
|
||||
"chalk": "^4.0.0",
|
||||
"chokidar": "^3.4.0",
|
||||
"chalk": "^4.1.0",
|
||||
"chokidar": "^3.4.3",
|
||||
"chunkd": "^2.0.1",
|
||||
"ci-info": "^2.0.0",
|
||||
"ci-parallel-vars": "^1.0.0",
|
||||
"ci-parallel-vars": "^1.0.1",
|
||||
"clean-yaml-object": "^0.1.0",
|
||||
"cli-cursor": "^3.1.0",
|
||||
"cli-truncate": "^2.1.0",
|
||||
"code-excerpt": "^2.1.1",
|
||||
"code-excerpt": "^3.0.0",
|
||||
"common-path-prefix": "^3.0.0",
|
||||
"concordance": "^4.0.0",
|
||||
"concordance": "^5.0.1",
|
||||
"convert-source-map": "^1.7.0",
|
||||
"currently-unhandled": "^0.4.1",
|
||||
"debug": "^4.1.1",
|
||||
"del": "^5.1.0",
|
||||
"emittery": "^0.6.0",
|
||||
"debug": "^4.3.1",
|
||||
"del": "^6.0.0",
|
||||
"emittery": "^0.8.0",
|
||||
"equal-length": "^1.0.0",
|
||||
"figures": "^3.2.0",
|
||||
"globby": "^11.0.0",
|
||||
"ignore-by-default": "^1.0.0",
|
||||
"globby": "^11.0.1",
|
||||
"ignore-by-default": "^2.0.0",
|
||||
"import-local": "^3.0.2",
|
||||
"indent-string": "^4.0.0",
|
||||
"is-error": "^2.2.2",
|
||||
"is-plain-object": "^3.0.0",
|
||||
"is-promise": "^3.0.0",
|
||||
"lodash": "^4.17.15",
|
||||
"is-plain-object": "^5.0.0",
|
||||
"is-promise": "^4.0.0",
|
||||
"lodash": "^4.17.20",
|
||||
"matcher": "^3.0.0",
|
||||
"md5-hex": "^3.0.1",
|
||||
"mem": "^6.1.0",
|
||||
"ms": "^2.1.2",
|
||||
"ora": "^4.0.4",
|
||||
"mem": "^8.0.0",
|
||||
"ms": "^2.1.3",
|
||||
"ora": "^5.2.0",
|
||||
"p-event": "^4.2.0",
|
||||
"p-map": "^4.0.0",
|
||||
"picomatch": "^2.2.2",
|
||||
"pkg-conf": "^3.1.0",
|
||||
"plur": "^4.0.0",
|
||||
"pretty-ms": "^6.0.1",
|
||||
"pretty-ms": "^7.0.1",
|
||||
"read-pkg": "^5.2.0",
|
||||
"resolve-cwd": "^3.0.0",
|
||||
"slash": "^3.0.0",
|
||||
"source-map-support": "^0.5.19",
|
||||
"stack-utils": "^2.0.1",
|
||||
"stack-utils": "^2.0.3",
|
||||
"strip-ansi": "^6.0.0",
|
||||
"supertap": "^1.0.0",
|
||||
"supertap": "^2.0.0",
|
||||
"temp-dir": "^2.0.0",
|
||||
"trim-off-newlines": "^1.0.1",
|
||||
"update-notifier": "^4.1.0",
|
||||
"update-notifier": "^5.0.1",
|
||||
"write-file-atomic": "^3.0.3",
|
||||
"yargs": "^15.3.1"
|
||||
"yargs": "^16.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@ava/babel": "^1.0.1",
|
||||
"@babel/plugin-proposal-do-expressions": "^7.8.3",
|
||||
"@ava/test": "github:avajs/test",
|
||||
"@babel/plugin-proposal-do-expressions": "^7.12.1",
|
||||
"@sinonjs/fake-timers": "^6.0.1",
|
||||
"ansi-escapes": "^4.3.1",
|
||||
"c8": "^7.1.0",
|
||||
"delay": "^4.3.0",
|
||||
"c8": "^7.4.0",
|
||||
"delay": "^4.4.0",
|
||||
"esm": "^3.2.25",
|
||||
"execa": "^4.0.0",
|
||||
"get-stream": "^5.1.0",
|
||||
"p-event": "^4.1.0",
|
||||
"execa": "^5.0.0",
|
||||
"fs-extra": "^9.0.1",
|
||||
"get-stream": "^6.0.0",
|
||||
"it-first": "^1.0.4",
|
||||
"proxyquire": "^2.1.3",
|
||||
"react": "^16.13.1",
|
||||
"react-test-renderer": "^16.13.1",
|
||||
"replace-string": "^3.0.0",
|
||||
"sinon": "^9.0.2",
|
||||
"react": "^16.14.0",
|
||||
"react-test-renderer": "^16.14.0",
|
||||
"replace-string": "^3.1.0",
|
||||
"sinon": "^9.2.2",
|
||||
"source-map-fixtures": "^2.1.0",
|
||||
"tap": "^14.10.7",
|
||||
"tap": "^14.11.0",
|
||||
"temp-write": "^4.0.0",
|
||||
"tempy": "^0.5.0",
|
||||
"tempy": "^1.0.0",
|
||||
"touch": "^3.1.0",
|
||||
"tsd": "^0.11.0",
|
||||
"typescript": "^3.8.3",
|
||||
"xo": "^0.30.0",
|
||||
"tsd": "^0.14.0",
|
||||
"typescript": "^4.1.3",
|
||||
"xo": "^0.36.1",
|
||||
"zen-observable": "^0.8.15"
|
||||
}
|
||||
}
|
||||
79
node_modules/ava/plugin.d.ts
generated
vendored
Normal file
79
node_modules/ava/plugin.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
export namespace SharedWorker {
|
||||
export type ProtocolIdentifier = 'experimental';
|
||||
|
||||
export type FactoryOptions = {
|
||||
negotiateProtocol <Data = unknown>(supported: readonly ['experimental']): Experimental.Protocol<Data>;
|
||||
// Add overloads for additional protocols.
|
||||
};
|
||||
|
||||
export type Factory = (options: FactoryOptions) => void;
|
||||
|
||||
export namespace Experimental {
|
||||
export type Protocol<Data = unknown> = {
|
||||
readonly initialData: Data;
|
||||
readonly protocol: 'experimental';
|
||||
broadcast: (data: Data) => BroadcastMessage<Data>;
|
||||
ready: () => Protocol<Data>;
|
||||
subscribe: () => AsyncIterableIterator<ReceivedMessage<Data>>;
|
||||
testWorkers: () => AsyncIterableIterator<TestWorker<Data>>;
|
||||
};
|
||||
|
||||
export type BroadcastMessage<Data = unknown> = {
|
||||
readonly id: string;
|
||||
replies: () => AsyncIterableIterator<ReceivedMessage<Data>>;
|
||||
};
|
||||
|
||||
export type PublishedMessage<Data = unknown> = {
|
||||
readonly id: string;
|
||||
replies: () => AsyncIterableIterator<ReceivedMessage<Data>>;
|
||||
};
|
||||
|
||||
export type ReceivedMessage<Data = unknown> = {
|
||||
readonly data: Data;
|
||||
readonly id: string;
|
||||
readonly testWorker: TestWorker;
|
||||
reply: (data: Data) => PublishedMessage<Data>;
|
||||
};
|
||||
|
||||
export type TestWorker<Data = unknown> = {
|
||||
readonly id: string;
|
||||
readonly file: string;
|
||||
publish: (data: Data) => PublishedMessage<Data>;
|
||||
subscribe: () => AsyncIterableIterator<ReceivedMessage<Data>>;
|
||||
teardown: <TeardownFn extends () => void> (fn: TeardownFn) => TeardownFn;
|
||||
};
|
||||
}
|
||||
|
||||
export namespace Plugin {
|
||||
export type RegistrationOptions<Identifier extends ProtocolIdentifier, Data = unknown> = {
|
||||
readonly filename: string;
|
||||
readonly initialData?: Data;
|
||||
readonly supportedProtocols: readonly Identifier[];
|
||||
readonly teardown?: () => void;
|
||||
};
|
||||
|
||||
export namespace Experimental {
|
||||
export type Protocol<Data = unknown> = {
|
||||
readonly available: Promise<void>;
|
||||
readonly currentlyAvailable: boolean;
|
||||
readonly protocol: 'experimental';
|
||||
publish: (data: Data) => PublishedMessage<Data>;
|
||||
subscribe: () => AsyncIterableIterator<ReceivedMessage<Data>>;
|
||||
};
|
||||
|
||||
export type PublishedMessage<Data = unknown> = {
|
||||
readonly id: string;
|
||||
replies: () => AsyncIterableIterator<ReceivedMessage<Data>>;
|
||||
};
|
||||
|
||||
export type ReceivedMessage<Data = unknown> = {
|
||||
readonly data: Data;
|
||||
readonly id: string;
|
||||
reply: (data: Data) => PublishedMessage<Data>;
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function registerSharedWorker<Data = unknown>(options: SharedWorker.Plugin.RegistrationOptions<'experimental', Data>): SharedWorker.Plugin.Experimental.Protocol<Data>;
|
||||
// Add overloads for additional protocols.
|
||||
9
node_modules/ava/plugin.js
generated
vendored
Normal file
9
node_modules/ava/plugin.js
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
'use strict';
|
||||
const path = require('path');
|
||||
|
||||
// Ensure the same AVA install is loaded by the test file as by the test worker
|
||||
if (process.env.AVA_PATH && process.env.AVA_PATH !== __dirname) {
|
||||
module.exports = require(path.join(process.env.AVA_PATH, 'plugin'));
|
||||
} else {
|
||||
module.exports = require('./lib/worker/plugin');
|
||||
}
|
||||
18
node_modules/ava/readme.md
generated
vendored
18
node_modules/ava/readme.md
generated
vendored
|
|
@ -1,9 +1,6 @@
|
|||
# <img src="media/header.png" title="AVA" alt="AVA logo" width="530">
|
||||
|
||||
[](https://travis-ci.org/avajs/ava) [](https://codecov.io/gh/avajs/ava/branch/master) [](https://github.com/xojs/xo) [](https://spectrum.chat/ava)
|
||||
[](https://github.com/sindresorhus/awesome-nodejs)
|
||||
|
||||
Testing can be a drag. AVA helps you get it done. AVA is a test runner for Node.js with a concise API, detailed error output, embrace of new language features and process isolation that let you write tests more effectively. So you can ship more awesome code. 🚀
|
||||
AVA is a test runner for Node.js with a concise API, detailed error output, embrace of new language features and process isolation that lets you develop with confidence 🚀
|
||||
|
||||
Follow the [AVA Twitter account](https://twitter.com/ava__js) for updates.
|
||||
|
||||
|
|
@ -143,6 +140,7 @@ We have a growing list of [common pitfalls](docs/08-common-pitfalls.md) you may
|
|||
|
||||
### Recipes
|
||||
|
||||
- [Shared workers](docs/recipes/shared-workers.md)
|
||||
- [Test setup](docs/recipes/test-setup.md)
|
||||
- [Code coverage](docs/recipes/code-coverage.md)
|
||||
- [Watch mode](docs/recipes/watch-mode.md)
|
||||
|
|
@ -187,9 +185,7 @@ It's the [Andromeda galaxy](https://simple.wikipedia.org/wiki/Andromeda_galaxy).
|
|||
|
||||
## Support
|
||||
|
||||
- [Stack Overflow](https://stackoverflow.com/questions/tagged/ava)
|
||||
- [Spectrum](https://spectrum.chat/ava)
|
||||
- [Twitter](https://twitter.com/ava__js)
|
||||
- [GitHub Discussions](https://github.com/avajs/ava/discussions)
|
||||
|
||||
## Related
|
||||
|
||||
|
|
@ -205,15 +201,14 @@ It's the [Andromeda galaxy](https://simple.wikipedia.org/wiki/Andromeda_galaxy).
|
|||
|
||||
- [AVA stickers, t-shirts, etc](https://www.redbubble.com/people/sindresorhus/works/30330590-ava-logo)
|
||||
- [Awesome list](https://github.com/avajs/awesome-ava)
|
||||
- [AVA Casts](http://avacasts.com)
|
||||
- [Do you like AVA? Donate here!](https://opencollective.com/ava)
|
||||
- [More…](https://github.com/avajs/awesome-ava)
|
||||
|
||||
## Team
|
||||
|
||||
[](https://github.com/novemberborn) | [](https://github.com/sindresorhus) | [](https://github.com/vadimdemedes)
|
||||
---|---|---
|
||||
[Mark Wubben](https://novemberborn.net) | [Sindre Sorhus](https://sindresorhus.com) | [Vadim Demedes](https://github.com/vadimdemedes)
|
||||
[](https://github.com/novemberborn) | [](https://github.com/sindresorhus)
|
||||
---|---
|
||||
[Mark Wubben](https://novemberborn.net) | [Sindre Sorhus](https://sindresorhus.com)
|
||||
|
||||
###### Former
|
||||
|
||||
|
|
@ -221,6 +216,7 @@ It's the [Andromeda galaxy](https://simple.wikipedia.org/wiki/Andromeda_galaxy).
|
|||
- [James Talmage](https://github.com/jamestalmage)
|
||||
- [Juan Soto](https://github.com/sotojuan)
|
||||
- [Jeroen Engels](https://github.com/jfmengels)
|
||||
- [Vadim Demedes](https://github.com/vadimdemedes)
|
||||
|
||||
|
||||
<div align="center">
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue