replace jest with ava
This commit is contained in:
parent
27cc8b23fe
commit
0347b72305
11775 changed files with 84546 additions and 1440575 deletions
275
node_modules/ava/lib/api.js
generated
vendored
Normal file
275
node_modules/ava/lib/api.js
generated
vendored
Normal file
|
|
@ -0,0 +1,275 @@
|
|||
'use strict';
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const commonPathPrefix = require('common-path-prefix');
|
||||
const resolveCwd = require('resolve-cwd');
|
||||
const debounce = require('lodash/debounce');
|
||||
const arrify = require('arrify');
|
||||
const ms = require('ms');
|
||||
const chunkd = require('chunkd');
|
||||
const Emittery = require('emittery');
|
||||
const pMap = require('p-map');
|
||||
const tempDir = require('temp-dir');
|
||||
const globs = require('./globs');
|
||||
const isCi = require('./is-ci');
|
||||
const RunStatus = require('./run-status');
|
||||
const fork = require('./fork');
|
||||
const serializeError = require('./serialize-error');
|
||||
const {getApplicableLineNumbers} = require('./line-numbers');
|
||||
|
||||
function resolveModules(modules) {
|
||||
return arrify(modules).map(name => {
|
||||
const modulePath = resolveCwd.silent(name);
|
||||
|
||||
if (modulePath === undefined) {
|
||||
throw new Error(`Could not resolve required module ’${name}’`);
|
||||
}
|
||||
|
||||
return modulePath;
|
||||
});
|
||||
}
|
||||
|
||||
function getFilePathPrefix(files) {
|
||||
if (files.length === 1) {
|
||||
// Get the correct prefix up to the basename.
|
||||
return commonPathPrefix([files[0], path.dirname(files[0])]);
|
||||
}
|
||||
|
||||
return commonPathPrefix(files);
|
||||
}
|
||||
|
||||
class Api extends Emittery {
|
||||
constructor(options) {
|
||||
super();
|
||||
|
||||
this.options = {match: [], moduleTypes: {}, ...options};
|
||||
this.options.require = resolveModules(this.options.require);
|
||||
|
||||
this._cacheDir = null;
|
||||
this._interruptHandler = () => {};
|
||||
|
||||
if (options.ranFromCli) {
|
||||
process.on('SIGINT', () => this._interruptHandler());
|
||||
}
|
||||
}
|
||||
|
||||
async run({files: selectedFiles = [], filter = [], runtimeOptions = {}} = {}) {
|
||||
let setupOrGlobError;
|
||||
|
||||
const apiOptions = this.options;
|
||||
|
||||
// Each run will have its own status. It can only be created when test files
|
||||
// have been found.
|
||||
let runStatus;
|
||||
// Irrespectively, perform some setup now, before finding test files.
|
||||
|
||||
// Track active forks and manage timeouts.
|
||||
const failFast = apiOptions.failFast === true;
|
||||
let bailed = false;
|
||||
const pendingWorkers = new Set();
|
||||
const timedOutWorkerFiles = new Set();
|
||||
let restartTimer;
|
||||
if (apiOptions.timeout && !apiOptions.debug) {
|
||||
const timeout = ms(apiOptions.timeout);
|
||||
|
||||
restartTimer = debounce(() => {
|
||||
// If failFast is active, prevent new test files from running after
|
||||
// the current ones are exited.
|
||||
if (failFast) {
|
||||
bailed = true;
|
||||
}
|
||||
|
||||
runStatus.emitStateChange({type: 'timeout', period: timeout});
|
||||
|
||||
for (const worker of pendingWorkers) {
|
||||
timedOutWorkerFiles.add(worker.file);
|
||||
worker.exit();
|
||||
}
|
||||
}, timeout);
|
||||
} else {
|
||||
restartTimer = Object.assign(() => {}, {cancel() {}});
|
||||
}
|
||||
|
||||
this._interruptHandler = () => {
|
||||
if (bailed) {
|
||||
// Exiting already
|
||||
return;
|
||||
}
|
||||
|
||||
// Prevent new test files from running
|
||||
bailed = true;
|
||||
|
||||
// Make sure we don't run the timeout handler
|
||||
restartTimer.cancel();
|
||||
|
||||
runStatus.emitStateChange({type: 'interrupt'});
|
||||
|
||||
for (const worker of pendingWorkers) {
|
||||
worker.exit();
|
||||
}
|
||||
};
|
||||
|
||||
let cacheDir;
|
||||
let testFiles;
|
||||
try {
|
||||
cacheDir = this._createCacheDir();
|
||||
testFiles = await globs.findTests({cwd: this.options.projectDir, ...apiOptions.globs});
|
||||
if (selectedFiles.length === 0) {
|
||||
if (filter.length === 0) {
|
||||
selectedFiles = testFiles;
|
||||
} else {
|
||||
selectedFiles = globs.applyTestFileFilter({
|
||||
cwd: this.options.projectDir,
|
||||
filter: filter.map(({pattern}) => pattern),
|
||||
testFiles
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
selectedFiles = [];
|
||||
setupOrGlobError = error;
|
||||
}
|
||||
|
||||
try {
|
||||
if (this.options.parallelRuns) {
|
||||
const {currentIndex, totalRuns} = this.options.parallelRuns;
|
||||
const fileCount = selectedFiles.length;
|
||||
|
||||
// The files must be in the same order across all runs, so sort them.
|
||||
selectedFiles = selectedFiles.sort((a, b) => a.localeCompare(b, [], {numeric: true}));
|
||||
selectedFiles = chunkd(selectedFiles, currentIndex, totalRuns);
|
||||
|
||||
const currentFileCount = selectedFiles.length;
|
||||
|
||||
runStatus = new RunStatus(fileCount, {currentFileCount, currentIndex, totalRuns});
|
||||
} else {
|
||||
runStatus = new RunStatus(selectedFiles.length, null);
|
||||
}
|
||||
|
||||
const debugWithoutSpecificFile = Boolean(this.options.debug) && selectedFiles.length !== 1;
|
||||
|
||||
await this.emit('run', {
|
||||
bailWithoutReporting: debugWithoutSpecificFile,
|
||||
clearLogOnNextRun: runtimeOptions.clearLogOnNextRun === true,
|
||||
debug: Boolean(this.options.debug),
|
||||
failFastEnabled: failFast,
|
||||
filePathPrefix: getFilePathPrefix(selectedFiles),
|
||||
files: selectedFiles,
|
||||
matching: apiOptions.match.length > 0,
|
||||
previousFailures: runtimeOptions.previousFailures || 0,
|
||||
runOnlyExclusive: runtimeOptions.runOnlyExclusive === true,
|
||||
runVector: runtimeOptions.runVector || 0,
|
||||
status: runStatus
|
||||
});
|
||||
|
||||
if (setupOrGlobError) {
|
||||
throw setupOrGlobError;
|
||||
}
|
||||
|
||||
// Bail out early if no files were found, or when debugging and there is not a single specific test file to debug.
|
||||
if (selectedFiles.length === 0 || debugWithoutSpecificFile) {
|
||||
return runStatus;
|
||||
}
|
||||
|
||||
runStatus.on('stateChange', record => {
|
||||
if (record.testFile && !timedOutWorkerFiles.has(record.testFile)) {
|
||||
// Restart the timer whenever there is activity from workers that
|
||||
// haven't already timed out.
|
||||
restartTimer();
|
||||
}
|
||||
|
||||
if (failFast && (record.type === 'hook-failed' || record.type === 'test-failed' || record.type === 'worker-failed')) {
|
||||
// Prevent new test files from running once a test has failed.
|
||||
bailed = true;
|
||||
|
||||
// Try to stop currently scheduled tests.
|
||||
for (const worker of pendingWorkers) {
|
||||
worker.notifyOfPeerFailure();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const {providers = []} = this.options;
|
||||
const providerStates = (await Promise.all(providers.map(async ({type, main}) => {
|
||||
const state = await main.compile({cacheDir, files: testFiles});
|
||||
return state === null ? null : {type, state};
|
||||
}))).filter(state => state !== null);
|
||||
|
||||
// Resolve the correct concurrency value.
|
||||
let concurrency = Math.min(os.cpus().length, isCi ? 2 : Infinity);
|
||||
if (apiOptions.concurrency > 0) {
|
||||
concurrency = apiOptions.concurrency;
|
||||
}
|
||||
|
||||
if (apiOptions.serial) {
|
||||
concurrency = 1;
|
||||
}
|
||||
|
||||
// Try and run each file, limited by `concurrency`.
|
||||
await pMap(selectedFiles, async file => {
|
||||
// No new files should be run once a test has timed out or failed,
|
||||
// and failFast is enabled.
|
||||
if (bailed) {
|
||||
return;
|
||||
}
|
||||
|
||||
const lineNumbers = getApplicableLineNumbers(globs.normalizeFileForMatching(apiOptions.projectDir, file), filter);
|
||||
const options = {
|
||||
...apiOptions,
|
||||
providerStates,
|
||||
lineNumbers,
|
||||
recordNewSnapshots: !isCi,
|
||||
// If we're looking for matches, run every single test process in exclusive-only mode
|
||||
runOnlyExclusive: apiOptions.match.length > 0 || runtimeOptions.runOnlyExclusive === true
|
||||
};
|
||||
|
||||
if (runtimeOptions.updateSnapshots) {
|
||||
// Don't use in Object.assign() since it'll override options.updateSnapshots even when false.
|
||||
options.updateSnapshots = true;
|
||||
}
|
||||
|
||||
const worker = fork(file, options, apiOptions.nodeArguments);
|
||||
runStatus.observeWorker(worker, file, {selectingLines: lineNumbers.length > 0});
|
||||
|
||||
pendingWorkers.add(worker);
|
||||
worker.promise.then(() => {
|
||||
pendingWorkers.delete(worker);
|
||||
});
|
||||
restartTimer();
|
||||
|
||||
return worker.promise;
|
||||
}, {concurrency, stopOnError: false});
|
||||
} catch (error) {
|
||||
if (error && error.name === 'AggregateError') {
|
||||
for (const err of error) {
|
||||
runStatus.emitStateChange({type: 'internal-error', err: serializeError('Internal error', false, err)});
|
||||
}
|
||||
} else {
|
||||
runStatus.emitStateChange({type: 'internal-error', err: serializeError('Internal error', false, error)});
|
||||
}
|
||||
}
|
||||
|
||||
restartTimer.cancel();
|
||||
return runStatus;
|
||||
}
|
||||
|
||||
_createCacheDir() {
|
||||
if (this._cacheDir) {
|
||||
return this._cacheDir;
|
||||
}
|
||||
|
||||
const cacheDir = this.options.cacheEnabled === false ?
|
||||
fs.mkdtempSync(`${tempDir}${path.sep}`) :
|
||||
path.join(this.options.projectDir, 'node_modules', '.cache', 'ava');
|
||||
|
||||
// Ensure cacheDir exists
|
||||
fs.mkdirSync(cacheDir, {recursive: true});
|
||||
|
||||
this._cacheDir = cacheDir;
|
||||
|
||||
return cacheDir;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Api;
|
||||
880
node_modules/ava/lib/assert.js
generated
vendored
Normal file
880
node_modules/ava/lib/assert.js
generated
vendored
Normal file
|
|
@ -0,0 +1,880 @@
|
|||
'use strict';
|
||||
const concordance = require('concordance');
|
||||
const isError = require('is-error');
|
||||
const isPromise = require('is-promise');
|
||||
const concordanceOptions = require('./concordance-options').default;
|
||||
const concordanceDiffOptions = require('./concordance-options').diff;
|
||||
const snapshotManager = require('./snapshot-manager');
|
||||
|
||||
function formatDescriptorDiff(actualDescriptor, expectedDescriptor, options) {
|
||||
options = {...options, ...concordanceDiffOptions};
|
||||
return {
|
||||
label: 'Difference:',
|
||||
formatted: concordance.diffDescriptors(actualDescriptor, expectedDescriptor, options)
|
||||
};
|
||||
}
|
||||
|
||||
function formatDescriptorWithLabel(label, descriptor) {
|
||||
return {
|
||||
label,
|
||||
formatted: concordance.formatDescriptor(descriptor, concordanceOptions)
|
||||
};
|
||||
}
|
||||
|
||||
function formatWithLabel(label, value) {
|
||||
return formatDescriptorWithLabel(label, concordance.describe(value, concordanceOptions));
|
||||
}
|
||||
|
||||
function formatPowerAssertValue(value) {
|
||||
return concordance.format(value, concordanceOptions);
|
||||
}
|
||||
|
||||
const hasOwnProperty = (object, prop) => Object.prototype.hasOwnProperty.call(object, prop);
|
||||
const noop = () => {};
|
||||
const notImplemented = () => {
|
||||
throw new Error('not implemented');
|
||||
};
|
||||
|
||||
class AssertionError extends Error {
|
||||
constructor(options) {
|
||||
super(options.message || '');
|
||||
this.name = 'AssertionError';
|
||||
|
||||
this.assertion = options.assertion;
|
||||
this.fixedSource = options.fixedSource;
|
||||
this.improperUsage = options.improperUsage || false;
|
||||
this.actualStack = options.actualStack;
|
||||
this.operator = options.operator;
|
||||
this.values = options.values || [];
|
||||
|
||||
// Raw expected and actual objects are stored for custom reporters
|
||||
// (such as wallaby.js), that manage worker processes directly and
|
||||
// use the values for custom diff views
|
||||
this.raw = options.raw;
|
||||
|
||||
// Reserved for power-assert statements
|
||||
this.statements = [];
|
||||
|
||||
if (options.savedError) {
|
||||
this.savedError = options.savedError;
|
||||
} else {
|
||||
this.savedError = getErrorWithLongStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.AssertionError = AssertionError;
|
||||
|
||||
function getErrorWithLongStackTrace() {
|
||||
const limitBefore = Error.stackTraceLimit;
|
||||
Error.stackTraceLimit = Infinity;
|
||||
const err = new Error();
|
||||
Error.stackTraceLimit = limitBefore;
|
||||
return err;
|
||||
}
|
||||
|
||||
function validateExpectations(assertion, expectations, numberArgs) { // eslint-disable-line complexity
|
||||
if (numberArgs === 1 || expectations === null || expectations === undefined) {
|
||||
expectations = {};
|
||||
} else if (
|
||||
typeof expectations === 'function' ||
|
||||
typeof expectations === 'string' ||
|
||||
expectations instanceof RegExp ||
|
||||
typeof expectations !== 'object' ||
|
||||
Array.isArray(expectations) ||
|
||||
Object.keys(expectations).length === 0
|
||||
) {
|
||||
throw new AssertionError({
|
||||
assertion,
|
||||
message: `The second argument to \`t.${assertion}()\` must be an expectation object, \`null\` or \`undefined\``,
|
||||
values: [formatWithLabel('Called with:', expectations)]
|
||||
});
|
||||
} else {
|
||||
if (hasOwnProperty(expectations, 'instanceOf') && typeof expectations.instanceOf !== 'function') {
|
||||
throw new AssertionError({
|
||||
assertion,
|
||||
message: `The \`instanceOf\` property of the second argument to \`t.${assertion}()\` must be a function`,
|
||||
values: [formatWithLabel('Called with:', expectations)]
|
||||
});
|
||||
}
|
||||
|
||||
if (hasOwnProperty(expectations, 'message') && typeof expectations.message !== 'string' && !(expectations.message instanceof RegExp)) {
|
||||
throw new AssertionError({
|
||||
assertion,
|
||||
message: `The \`message\` property of the second argument to \`t.${assertion}()\` must be a string or regular expression`,
|
||||
values: [formatWithLabel('Called with:', expectations)]
|
||||
});
|
||||
}
|
||||
|
||||
if (hasOwnProperty(expectations, 'name') && typeof expectations.name !== 'string') {
|
||||
throw new AssertionError({
|
||||
assertion,
|
||||
message: `The \`name\` property of the second argument to \`t.${assertion}()\` must be a string`,
|
||||
values: [formatWithLabel('Called with:', expectations)]
|
||||
});
|
||||
}
|
||||
|
||||
if (hasOwnProperty(expectations, 'code') && typeof expectations.code !== 'string' && typeof expectations.code !== 'number') {
|
||||
throw new AssertionError({
|
||||
assertion,
|
||||
message: `The \`code\` property of the second argument to \`t.${assertion}()\` must be a string or number`,
|
||||
values: [formatWithLabel('Called with:', expectations)]
|
||||
});
|
||||
}
|
||||
|
||||
for (const key of Object.keys(expectations)) {
|
||||
switch (key) {
|
||||
case 'instanceOf':
|
||||
case 'is':
|
||||
case 'message':
|
||||
case 'name':
|
||||
case 'code':
|
||||
continue;
|
||||
default:
|
||||
throw new AssertionError({
|
||||
assertion,
|
||||
message: `The second argument to \`t.${assertion}()\` contains unexpected properties`,
|
||||
values: [formatWithLabel('Called with:', expectations)]
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return expectations;
|
||||
}
|
||||
|
||||
// Note: this function *must* throw exceptions, since it can be used
|
||||
// as part of a pending assertion for promises.
|
||||
function assertExpectations({assertion, actual, expectations, message, prefix, savedError}) {
|
||||
if (!isError(actual)) {
|
||||
throw new AssertionError({
|
||||
assertion,
|
||||
message,
|
||||
savedError,
|
||||
values: [formatWithLabel(`${prefix} exception that is not an error:`, actual)]
|
||||
});
|
||||
}
|
||||
|
||||
const actualStack = actual.stack;
|
||||
|
||||
if (hasOwnProperty(expectations, 'is') && actual !== expectations.is) {
|
||||
throw new AssertionError({
|
||||
assertion,
|
||||
message,
|
||||
savedError,
|
||||
actualStack,
|
||||
values: [
|
||||
formatWithLabel(`${prefix} unexpected exception:`, actual),
|
||||
formatWithLabel('Expected to be strictly equal to:', expectations.is)
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
if (expectations.instanceOf && !(actual instanceof expectations.instanceOf)) {
|
||||
throw new AssertionError({
|
||||
assertion,
|
||||
message,
|
||||
savedError,
|
||||
actualStack,
|
||||
values: [
|
||||
formatWithLabel(`${prefix} unexpected exception:`, actual),
|
||||
formatWithLabel('Expected instance of:', expectations.instanceOf)
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof expectations.name === 'string' && actual.name !== expectations.name) {
|
||||
throw new AssertionError({
|
||||
assertion,
|
||||
message,
|
||||
savedError,
|
||||
actualStack,
|
||||
values: [
|
||||
formatWithLabel(`${prefix} unexpected exception:`, actual),
|
||||
formatWithLabel('Expected name to equal:', expectations.name)
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof expectations.message === 'string' && actual.message !== expectations.message) {
|
||||
throw new AssertionError({
|
||||
assertion,
|
||||
message,
|
||||
savedError,
|
||||
actualStack,
|
||||
values: [
|
||||
formatWithLabel(`${prefix} unexpected exception:`, actual),
|
||||
formatWithLabel('Expected message to equal:', expectations.message)
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
if (expectations.message instanceof RegExp && !expectations.message.test(actual.message)) {
|
||||
throw new AssertionError({
|
||||
assertion,
|
||||
message,
|
||||
savedError,
|
||||
actualStack,
|
||||
values: [
|
||||
formatWithLabel(`${prefix} unexpected exception:`, actual),
|
||||
formatWithLabel('Expected message to match:', expectations.message)
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof expectations.code !== 'undefined' && actual.code !== expectations.code) {
|
||||
throw new AssertionError({
|
||||
assertion,
|
||||
message,
|
||||
savedError,
|
||||
actualStack,
|
||||
values: [
|
||||
formatWithLabel(`${prefix} unexpected exception:`, actual),
|
||||
formatWithLabel('Expected code to equal:', expectations.code)
|
||||
]
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
class Assertions {
|
||||
constructor({
|
||||
pass = notImplemented,
|
||||
pending = notImplemented,
|
||||
fail = notImplemented,
|
||||
skip = notImplemented,
|
||||
compareWithSnapshot = notImplemented,
|
||||
powerAssert
|
||||
} = {}) {
|
||||
const withSkip = assertionFn => {
|
||||
assertionFn.skip = skip;
|
||||
return assertionFn;
|
||||
};
|
||||
|
||||
// When adding new enhanced functions with new patterns, don't forget to
|
||||
// enable the pattern in the power-assert compilation step in @ava/babel.
|
||||
const withPowerAssert = (pattern, assertionFn) => powerAssert.empower(assertionFn, {
|
||||
onError: event => {
|
||||
if (event.powerAssertContext) {
|
||||
event.error.statements = powerAssert.format(event.powerAssertContext, formatPowerAssertValue);
|
||||
}
|
||||
|
||||
fail(event.error);
|
||||
},
|
||||
onSuccess: () => {
|
||||
pass();
|
||||
},
|
||||
bindReceiver: false,
|
||||
patterns: [pattern]
|
||||
});
|
||||
|
||||
const checkMessage = (assertion, message, powerAssert = false) => {
|
||||
if (typeof message === 'undefined' || typeof message === 'string') {
|
||||
return true;
|
||||
}
|
||||
|
||||
const error = new AssertionError({
|
||||
assertion,
|
||||
improperUsage: true,
|
||||
message: 'The assertion message must be a string',
|
||||
values: [formatWithLabel('Called with:', message)]
|
||||
});
|
||||
|
||||
if (powerAssert) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
fail(error);
|
||||
return false;
|
||||
};
|
||||
|
||||
this.pass = withSkip(() => {
|
||||
pass();
|
||||
});
|
||||
|
||||
this.fail = withSkip(message => {
|
||||
if (!checkMessage('fail', message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
fail(new AssertionError({
|
||||
assertion: 'fail',
|
||||
message: message || 'Test failed via `t.fail()`'
|
||||
}));
|
||||
});
|
||||
|
||||
this.is = withSkip((actual, expected, message) => {
|
||||
if (!checkMessage('is', message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (Object.is(actual, expected)) {
|
||||
pass();
|
||||
} else {
|
||||
const result = concordance.compare(actual, expected, concordanceOptions);
|
||||
const actualDescriptor = result.actual || concordance.describe(actual, concordanceOptions);
|
||||
const expectedDescriptor = result.expected || concordance.describe(expected, concordanceOptions);
|
||||
|
||||
if (result.pass) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'is',
|
||||
message,
|
||||
raw: {actual, expected},
|
||||
values: [formatDescriptorWithLabel('Values are deeply equal to each other, but they are not the same:', actualDescriptor)]
|
||||
}));
|
||||
} else {
|
||||
fail(new AssertionError({
|
||||
assertion: 'is',
|
||||
message,
|
||||
raw: {actual, expected},
|
||||
values: [formatDescriptorDiff(actualDescriptor, expectedDescriptor)]
|
||||
}));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
this.not = withSkip((actual, expected, message) => {
|
||||
if (!checkMessage('not', message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (Object.is(actual, expected)) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'not',
|
||||
message,
|
||||
raw: {actual, expected},
|
||||
values: [formatWithLabel('Value is the same as:', actual)]
|
||||
}));
|
||||
} else {
|
||||
pass();
|
||||
}
|
||||
});
|
||||
|
||||
this.deepEqual = withSkip((actual, expected, message) => {
|
||||
if (!checkMessage('deepEqual', message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const result = concordance.compare(actual, expected, concordanceOptions);
|
||||
if (result.pass) {
|
||||
pass();
|
||||
} else {
|
||||
const actualDescriptor = result.actual || concordance.describe(actual, concordanceOptions);
|
||||
const expectedDescriptor = result.expected || concordance.describe(expected, concordanceOptions);
|
||||
fail(new AssertionError({
|
||||
assertion: 'deepEqual',
|
||||
message,
|
||||
raw: {actual, expected},
|
||||
values: [formatDescriptorDiff(actualDescriptor, expectedDescriptor)]
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
this.notDeepEqual = withSkip((actual, expected, message) => {
|
||||
if (!checkMessage('notDeepEqual', message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const result = concordance.compare(actual, expected, concordanceOptions);
|
||||
if (result.pass) {
|
||||
const actualDescriptor = result.actual || concordance.describe(actual, concordanceOptions);
|
||||
fail(new AssertionError({
|
||||
assertion: 'notDeepEqual',
|
||||
message,
|
||||
raw: {actual, expected},
|
||||
values: [formatDescriptorWithLabel('Value is deeply equal:', actualDescriptor)]
|
||||
}));
|
||||
} else {
|
||||
pass();
|
||||
}
|
||||
});
|
||||
|
||||
this.throws = withSkip((...args) => {
|
||||
// Since arrow functions do not support 'arguments', we are using rest
|
||||
// operator, so we can determine the total number of arguments passed
|
||||
// to the function.
|
||||
let [fn, expectations, message] = args;
|
||||
|
||||
if (!checkMessage('throws', message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof fn !== 'function') {
|
||||
fail(new AssertionError({
|
||||
assertion: 'throws',
|
||||
improperUsage: true,
|
||||
message: '`t.throws()` must be called with a function',
|
||||
values: [formatWithLabel('Called with:', fn)]
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
expectations = validateExpectations('throws', expectations, args.length);
|
||||
} catch (error) {
|
||||
fail(error);
|
||||
return;
|
||||
}
|
||||
|
||||
let retval;
|
||||
let actual = null;
|
||||
try {
|
||||
retval = fn();
|
||||
if (isPromise(retval)) {
|
||||
// Here isPromise() checks if something is "promise like". Cast to an actual promise.
|
||||
Promise.resolve(retval).catch(noop);
|
||||
fail(new AssertionError({
|
||||
assertion: 'throws',
|
||||
message,
|
||||
values: [formatWithLabel('Function returned a promise. Use `t.throwsAsync()` instead:', retval)]
|
||||
}));
|
||||
return;
|
||||
}
|
||||
} catch (error) {
|
||||
actual = error;
|
||||
}
|
||||
|
||||
if (!actual) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'throws',
|
||||
message,
|
||||
values: [formatWithLabel('Function returned:', retval)]
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
assertExpectations({
|
||||
assertion: 'throws',
|
||||
actual,
|
||||
expectations,
|
||||
message,
|
||||
prefix: 'Function threw'
|
||||
});
|
||||
pass();
|
||||
return actual;
|
||||
} catch (error) {
|
||||
fail(error);
|
||||
}
|
||||
});
|
||||
|
||||
this.throwsAsync = withSkip((...args) => {
|
||||
let [thrower, expectations, message] = args;
|
||||
|
||||
if (!checkMessage('throwsAsync', message)) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
if (typeof thrower !== 'function' && !isPromise(thrower)) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'throwsAsync',
|
||||
improperUsage: true,
|
||||
message: '`t.throwsAsync()` must be called with a function or promise',
|
||||
values: [formatWithLabel('Called with:', thrower)]
|
||||
}));
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
try {
|
||||
expectations = validateExpectations('throwsAsync', expectations, args.length);
|
||||
} catch (error) {
|
||||
fail(error);
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
const handlePromise = (promise, wasReturned) => {
|
||||
// Create an error object to record the stack before it gets lost in the promise chain.
|
||||
const savedError = getErrorWithLongStackTrace();
|
||||
// Handle "promise like" objects by casting to a real Promise.
|
||||
const intermediate = Promise.resolve(promise).then(value => { // eslint-disable-line promise/prefer-await-to-then
|
||||
throw new AssertionError({
|
||||
assertion: 'throwsAsync',
|
||||
message,
|
||||
savedError,
|
||||
values: [formatWithLabel(`${wasReturned ? 'Returned promise' : 'Promise'} resolved with:`, value)]
|
||||
});
|
||||
}, error => {
|
||||
assertExpectations({
|
||||
assertion: 'throwsAsync',
|
||||
actual: error,
|
||||
expectations,
|
||||
message,
|
||||
prefix: `${wasReturned ? 'Returned promise' : 'Promise'} rejected with`,
|
||||
savedError
|
||||
});
|
||||
return error;
|
||||
});
|
||||
|
||||
pending(intermediate);
|
||||
// Don't reject the returned promise, even if the assertion fails.
|
||||
return intermediate.catch(noop);
|
||||
};
|
||||
|
||||
if (isPromise(thrower)) {
|
||||
return handlePromise(thrower, false);
|
||||
}
|
||||
|
||||
let retval;
|
||||
let actual = null;
|
||||
try {
|
||||
retval = thrower();
|
||||
} catch (error) {
|
||||
actual = error;
|
||||
}
|
||||
|
||||
if (actual) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'throwsAsync',
|
||||
message,
|
||||
actualStack: actual.stack,
|
||||
values: [formatWithLabel('Function threw synchronously. Use `t.throws()` instead:', actual)]
|
||||
}));
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
if (isPromise(retval)) {
|
||||
return handlePromise(retval, true);
|
||||
}
|
||||
|
||||
fail(new AssertionError({
|
||||
assertion: 'throwsAsync',
|
||||
message,
|
||||
values: [formatWithLabel('Function returned:', retval)]
|
||||
}));
|
||||
return Promise.resolve();
|
||||
});
|
||||
|
||||
this.notThrows = withSkip((fn, message) => {
|
||||
if (!checkMessage('notThrows', message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof fn !== 'function') {
|
||||
fail(new AssertionError({
|
||||
assertion: 'notThrows',
|
||||
improperUsage: true,
|
||||
message: '`t.notThrows()` must be called with a function',
|
||||
values: [formatWithLabel('Called with:', fn)]
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
fn();
|
||||
} catch (error) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'notThrows',
|
||||
message,
|
||||
actualStack: error.stack,
|
||||
values: [formatWithLabel('Function threw:', error)]
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
pass();
|
||||
});
|
||||
|
||||
this.notThrowsAsync = withSkip((nonThrower, message) => {
|
||||
if (!checkMessage('notThrowsAsync', message)) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
if (typeof nonThrower !== 'function' && !isPromise(nonThrower)) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'notThrowsAsync',
|
||||
improperUsage: true,
|
||||
message: '`t.notThrowsAsync()` must be called with a function or promise',
|
||||
values: [formatWithLabel('Called with:', nonThrower)]
|
||||
}));
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
const handlePromise = (promise, wasReturned) => {
|
||||
// Create an error object to record the stack before it gets lost in the promise chain.
|
||||
const savedError = getErrorWithLongStackTrace();
|
||||
// Handle "promise like" objects by casting to a real Promise.
|
||||
const intermediate = Promise.resolve(promise).then(noop, error => { // eslint-disable-line promise/prefer-await-to-then
|
||||
throw new AssertionError({
|
||||
assertion: 'notThrowsAsync',
|
||||
message,
|
||||
savedError,
|
||||
values: [formatWithLabel(`${wasReturned ? 'Returned promise' : 'Promise'} rejected with:`, error)]
|
||||
});
|
||||
});
|
||||
pending(intermediate);
|
||||
// Don't reject the returned promise, even if the assertion fails.
|
||||
return intermediate.catch(noop);
|
||||
};
|
||||
|
||||
if (isPromise(nonThrower)) {
|
||||
return handlePromise(nonThrower, false);
|
||||
}
|
||||
|
||||
let retval;
|
||||
try {
|
||||
retval = nonThrower();
|
||||
} catch (error) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'notThrowsAsync',
|
||||
message,
|
||||
actualStack: error.stack,
|
||||
values: [formatWithLabel('Function threw:', error)]
|
||||
}));
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
if (!isPromise(retval)) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'notThrowsAsync',
|
||||
message,
|
||||
values: [formatWithLabel('Function did not return a promise. Use `t.notThrows()` instead:', retval)]
|
||||
}));
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
return handlePromise(retval, true);
|
||||
});
|
||||
|
||||
this.snapshot = withSkip((expected, ...rest) => {
|
||||
let message;
|
||||
let snapshotOptions;
|
||||
if (rest.length > 1) {
|
||||
[snapshotOptions, message] = rest;
|
||||
} else {
|
||||
const [optionsOrMessage] = rest;
|
||||
if (typeof optionsOrMessage === 'object') {
|
||||
snapshotOptions = optionsOrMessage;
|
||||
} else {
|
||||
message = optionsOrMessage;
|
||||
}
|
||||
}
|
||||
|
||||
if (!checkMessage('snapshot', message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
let result;
|
||||
try {
|
||||
result = compareWithSnapshot({
|
||||
expected,
|
||||
id: snapshotOptions ? snapshotOptions.id : undefined,
|
||||
message
|
||||
});
|
||||
} catch (error) {
|
||||
if (!(error instanceof snapshotManager.SnapshotError)) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
const improperUsage = {name: error.name, snapPath: error.snapPath};
|
||||
if (error instanceof snapshotManager.VersionMismatchError) {
|
||||
improperUsage.snapVersion = error.snapVersion;
|
||||
improperUsage.expectedVersion = error.expectedVersion;
|
||||
}
|
||||
|
||||
fail(new AssertionError({
|
||||
assertion: 'snapshot',
|
||||
message: message || 'Could not compare snapshot',
|
||||
improperUsage
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
if (result.pass) {
|
||||
pass();
|
||||
} else if (result.actual) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'snapshot',
|
||||
message: message || 'Did not match snapshot',
|
||||
values: [formatDescriptorDiff(result.actual, result.expected, {invert: true})]
|
||||
}));
|
||||
} else {
|
||||
// This can only occur in CI environments.
|
||||
fail(new AssertionError({
|
||||
assertion: 'snapshot',
|
||||
message: message || 'No snapshot available — new snapshots are not created in CI environments'
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
this.truthy = withSkip((actual, message) => {
|
||||
if (!checkMessage('truthy', message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (actual) {
|
||||
pass();
|
||||
} else {
|
||||
fail(new AssertionError({
|
||||
assertion: 'truthy',
|
||||
message,
|
||||
operator: '!!',
|
||||
values: [formatWithLabel('Value is not truthy:', actual)]
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
this.falsy = withSkip((actual, message) => {
|
||||
if (!checkMessage('falsy', message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (actual) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'falsy',
|
||||
message,
|
||||
operator: '!',
|
||||
values: [formatWithLabel('Value is not falsy:', actual)]
|
||||
}));
|
||||
} else {
|
||||
pass();
|
||||
}
|
||||
});
|
||||
|
||||
this.true = withSkip((actual, message) => {
|
||||
if (!checkMessage('true', message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (actual === true) {
|
||||
pass();
|
||||
} else {
|
||||
fail(new AssertionError({
|
||||
assertion: 'true',
|
||||
message,
|
||||
values: [formatWithLabel('Value is not `true`:', actual)]
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
this.false = withSkip((actual, message) => {
|
||||
if (!checkMessage('false', message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (actual === false) {
|
||||
pass();
|
||||
} else {
|
||||
fail(new AssertionError({
|
||||
assertion: 'false',
|
||||
message,
|
||||
values: [formatWithLabel('Value is not `false`:', actual)]
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
this.regex = withSkip((string, regex, message) => {
|
||||
if (!checkMessage('regex', message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof string !== 'string') {
|
||||
fail(new AssertionError({
|
||||
assertion: 'regex',
|
||||
improperUsage: true,
|
||||
message: '`t.regex()` must be called with a string',
|
||||
values: [formatWithLabel('Called with:', string)]
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
if (!(regex instanceof RegExp)) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'regex',
|
||||
improperUsage: true,
|
||||
message: '`t.regex()` must be called with a regular expression',
|
||||
values: [formatWithLabel('Called with:', regex)]
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
if (!regex.test(string)) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'regex',
|
||||
message,
|
||||
values: [
|
||||
formatWithLabel('Value must match expression:', string),
|
||||
formatWithLabel('Regular expression:', regex)
|
||||
]
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
pass();
|
||||
});
|
||||
|
||||
this.notRegex = withSkip((string, regex, message) => {
|
||||
if (!checkMessage('notRegex', message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof string !== 'string') {
|
||||
fail(new AssertionError({
|
||||
assertion: 'notRegex',
|
||||
improperUsage: true,
|
||||
message: '`t.notRegex()` must be called with a string',
|
||||
values: [formatWithLabel('Called with:', string)]
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
if (!(regex instanceof RegExp)) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'notRegex',
|
||||
improperUsage: true,
|
||||
message: '`t.notRegex()` must be called with a regular expression',
|
||||
values: [formatWithLabel('Called with:', regex)]
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
if (regex.test(string)) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'notRegex',
|
||||
message,
|
||||
values: [
|
||||
formatWithLabel('Value must not match expression:', string),
|
||||
formatWithLabel('Regular expression:', regex)
|
||||
]
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
pass();
|
||||
});
|
||||
|
||||
if (powerAssert === undefined) {
|
||||
this.assert = withSkip((actual, message) => {
|
||||
if (!checkMessage('assert', message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!actual) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'assert',
|
||||
message,
|
||||
operator: '!!',
|
||||
values: [formatWithLabel('Value is not truthy:', actual)]
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
pass();
|
||||
});
|
||||
} else {
|
||||
this.assert = withSkip(withPowerAssert(
|
||||
'assert(value, [message])',
|
||||
(actual, message) => {
|
||||
checkMessage('assert', message, true);
|
||||
|
||||
if (!actual) {
|
||||
throw new AssertionError({
|
||||
assertion: 'assert',
|
||||
message,
|
||||
operator: '!!',
|
||||
values: [formatWithLabel('Value is not truthy:', actual)]
|
||||
});
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.Assertions = Assertions;
|
||||
20
node_modules/ava/lib/chalk.js
generated
vendored
Normal file
20
node_modules/ava/lib/chalk.js
generated
vendored
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
'use strict';
|
||||
const chalk = require('chalk');
|
||||
|
||||
let ctx = null;
|
||||
exports.get = () => {
|
||||
if (!ctx) {
|
||||
throw new Error('Chalk has not yet been configured');
|
||||
}
|
||||
|
||||
return ctx;
|
||||
};
|
||||
|
||||
exports.set = options => {
|
||||
if (ctx) {
|
||||
throw new Error('Chalk has already been configured');
|
||||
}
|
||||
|
||||
ctx = new chalk.Instance(options);
|
||||
return ctx;
|
||||
};
|
||||
449
node_modules/ava/lib/cli.js
generated
vendored
Normal file
449
node_modules/ava/lib/cli.js
generated
vendored
Normal file
|
|
@ -0,0 +1,449 @@
|
|||
'use strict';
|
||||
const path = require('path');
|
||||
const del = require('del');
|
||||
const updateNotifier = require('update-notifier');
|
||||
const figures = require('figures');
|
||||
const arrify = require('arrify');
|
||||
const yargs = require('yargs');
|
||||
const readPkg = require('read-pkg');
|
||||
const isCi = require('./is-ci');
|
||||
const loadConfig = require('./load-config');
|
||||
|
||||
function exit(message) {
|
||||
console.error(`\n ${require('./chalk').get().red(figures.cross)} ${message}`);
|
||||
process.exit(1); // eslint-disable-line unicorn/no-process-exit
|
||||
}
|
||||
|
||||
const coerceLastValue = value => {
|
||||
return Array.isArray(value) ? value.pop() : value;
|
||||
};
|
||||
|
||||
const FLAGS = {
|
||||
concurrency: {
|
||||
alias: 'c',
|
||||
coerce: coerceLastValue,
|
||||
description: 'Max number of test files running at the same time (default: CPU cores)',
|
||||
type: 'number'
|
||||
},
|
||||
'fail-fast': {
|
||||
coerce: coerceLastValue,
|
||||
description: 'Stop after first test failure',
|
||||
type: 'boolean'
|
||||
},
|
||||
match: {
|
||||
alias: 'm',
|
||||
description: 'Only run tests with matching title (can be repeated)',
|
||||
type: 'string'
|
||||
},
|
||||
'node-arguments': {
|
||||
coerce: coerceLastValue,
|
||||
description: 'Additional Node.js arguments for launching worker processes (specify as a single string)',
|
||||
type: 'string'
|
||||
},
|
||||
serial: {
|
||||
alias: 's',
|
||||
coerce: coerceLastValue,
|
||||
description: 'Run tests serially',
|
||||
type: 'boolean'
|
||||
},
|
||||
tap: {
|
||||
alias: 't',
|
||||
coerce: coerceLastValue,
|
||||
description: 'Generate TAP output',
|
||||
type: 'boolean'
|
||||
},
|
||||
timeout: {
|
||||
alias: 'T',
|
||||
coerce: coerceLastValue,
|
||||
description: 'Set global timeout (milliseconds or human-readable, e.g. 10s, 2m)',
|
||||
type: 'string'
|
||||
},
|
||||
'update-snapshots': {
|
||||
alias: 'u',
|
||||
coerce: coerceLastValue,
|
||||
description: 'Update snapshots',
|
||||
type: 'boolean'
|
||||
},
|
||||
verbose: {
|
||||
alias: 'v',
|
||||
coerce: coerceLastValue,
|
||||
description: 'Enable verbose output',
|
||||
type: 'boolean'
|
||||
},
|
||||
watch: {
|
||||
alias: 'w',
|
||||
coerce: coerceLastValue,
|
||||
description: 'Re-run tests when files change',
|
||||
type: 'boolean'
|
||||
}
|
||||
};
|
||||
|
||||
exports.run = async () => { // eslint-disable-line complexity
|
||||
let conf = {};
|
||||
let confError = null;
|
||||
try {
|
||||
const {argv: {config: configFile}} = yargs.help(false);
|
||||
conf = loadConfig({configFile});
|
||||
} catch (error) {
|
||||
confError = error;
|
||||
}
|
||||
|
||||
let debug = null;
|
||||
let resetCache = false;
|
||||
const {argv} = yargs
|
||||
.parserConfiguration({
|
||||
'boolean-negation': true,
|
||||
'camel-case-expansion': false,
|
||||
'combine-arrays': false,
|
||||
'dot-notation': false,
|
||||
'duplicate-arguments-array': true,
|
||||
'flatten-duplicate-arrays': true,
|
||||
'negation-prefix': 'no-',
|
||||
'parse-numbers': true,
|
||||
'populate--': true,
|
||||
'set-placeholder-key': false,
|
||||
'short-option-groups': true,
|
||||
'strip-aliased': true,
|
||||
'unknown-options-as-args': false
|
||||
})
|
||||
.usage('$0 [<pattern>...]')
|
||||
.usage('$0 debug [<pattern>...]')
|
||||
.usage('$0 reset-cache')
|
||||
.options({
|
||||
color: {
|
||||
description: 'Force color output',
|
||||
type: 'boolean'
|
||||
},
|
||||
config: {
|
||||
description: 'Specific JavaScript file for AVA to read its config from, instead of using package.json or ava.config.* files'
|
||||
}
|
||||
})
|
||||
.command('* [<pattern>...]', 'Run tests', yargs => yargs.options(FLAGS).positional('pattern', {
|
||||
array: true,
|
||||
describe: 'Glob patterns to select what test files to run. Leave empty if you want AVA to run all test files instead. Add a colon and specify line numbers of specific tests to run',
|
||||
type: 'string'
|
||||
}))
|
||||
.command(
|
||||
'debug [<pattern>...]',
|
||||
'Activate Node.js inspector and run a single test file',
|
||||
yargs => yargs.options(FLAGS).options({
|
||||
break: {
|
||||
description: 'Break before the test file is loaded',
|
||||
type: 'boolean'
|
||||
},
|
||||
host: {
|
||||
default: '127.0.0.1',
|
||||
description: 'Address or hostname through which you can connect to the inspector',
|
||||
type: 'string'
|
||||
},
|
||||
port: {
|
||||
default: 9229,
|
||||
description: 'Port on which you can connect to the inspector',
|
||||
type: 'number'
|
||||
}
|
||||
}).positional('pattern', {
|
||||
demand: true,
|
||||
describe: 'Glob patterns to select a single test file to debug. Add a colon and specify line numbers of specific tests to run',
|
||||
type: 'string'
|
||||
}),
|
||||
argv => {
|
||||
debug = {
|
||||
break: argv.break === true,
|
||||
files: argv.pattern,
|
||||
host: argv.host,
|
||||
port: argv.port
|
||||
};
|
||||
})
|
||||
.command(
|
||||
'reset-cache',
|
||||
'Reset AVA’s compilation cache and exit',
|
||||
yargs => yargs,
|
||||
() => {
|
||||
resetCache = true;
|
||||
})
|
||||
.example('$0')
|
||||
.example('$0 test.js')
|
||||
.example('$0 test.js:4,7-9')
|
||||
.help();
|
||||
|
||||
const combined = {...conf};
|
||||
for (const flag of Object.keys(FLAGS)) {
|
||||
if (Reflect.has(argv, flag)) {
|
||||
if (flag === 'fail-fast') {
|
||||
combined.failFast = argv[flag];
|
||||
} else if (flag === 'update-snapshots') {
|
||||
combined.updateSnapshots = argv[flag];
|
||||
} else if (flag !== 'node-arguments') {
|
||||
combined[flag] = argv[flag];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const chalkOptions = {level: combined.color === false ? 0 : require('chalk').level};
|
||||
const chalk = require('./chalk').set(chalkOptions);
|
||||
|
||||
if (confError) {
|
||||
if (confError.parent) {
|
||||
exit(`${confError.message}\n\n${chalk.gray((confError.parent && confError.parent.stack) || confError.parent)}`);
|
||||
} else {
|
||||
exit(confError.message);
|
||||
}
|
||||
}
|
||||
|
||||
updateNotifier({pkg: require('../package.json')}).notify();
|
||||
|
||||
const {nonSemVerExperiments: experiments, projectDir} = conf;
|
||||
if (resetCache) {
|
||||
const cacheDir = path.join(projectDir, 'node_modules', '.cache', 'ava');
|
||||
try {
|
||||
await del('*', {
|
||||
cwd: cacheDir,
|
||||
nodir: true
|
||||
});
|
||||
console.error(`\n${chalk.green(figures.tick)} Removed AVA cache files in ${cacheDir}`);
|
||||
process.exit(0); // eslint-disable-line unicorn/no-process-exit
|
||||
} catch (error) {
|
||||
exit(`Error removing AVA cache files in ${cacheDir}\n\n${chalk.gray((error && error.stack) || error)}`);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (argv.watch) {
|
||||
if (argv.tap && !conf.tap) {
|
||||
exit('The TAP reporter is not available when using watch mode.');
|
||||
}
|
||||
|
||||
if (isCi) {
|
||||
exit('Watch mode is not available in CI, as it prevents AVA from terminating.');
|
||||
}
|
||||
|
||||
if (debug !== null) {
|
||||
exit('Watch mode is not available when debugging.');
|
||||
}
|
||||
}
|
||||
|
||||
if (debug !== null) {
|
||||
if (argv.tap && !conf.tap) {
|
||||
exit('The TAP reporter is not available when debugging.');
|
||||
}
|
||||
|
||||
if (isCi) {
|
||||
exit('Debugging is not available in CI.');
|
||||
}
|
||||
|
||||
if (combined.timeout) {
|
||||
console.log(chalk.magenta(` ${figures.warning} The timeout option has been disabled to help with debugging.`));
|
||||
}
|
||||
}
|
||||
|
||||
if (Reflect.has(combined, 'concurrency') && (!Number.isInteger(combined.concurrency) || combined.concurrency < 0)) {
|
||||
exit('The --concurrency or -c flag must be provided with a nonnegative integer.');
|
||||
}
|
||||
|
||||
if (!combined.tap && Object.keys(experiments).length > 0) {
|
||||
console.log(chalk.magenta(` ${figures.warning} Experiments are enabled. These are unsupported and may change or be removed at any time.`));
|
||||
}
|
||||
|
||||
if (Reflect.has(conf, 'compileEnhancements')) {
|
||||
exit('Enhancement compilation must be configured in AVA’s Babel options.');
|
||||
}
|
||||
|
||||
if (Reflect.has(conf, 'helpers')) {
|
||||
exit('AVA no longer compiles helpers. Add exclusion patterns to the ’files’ configuration and specify ’compileAsTests’ in the Babel options instead.');
|
||||
}
|
||||
|
||||
if (Reflect.has(conf, 'sources')) {
|
||||
exit('’sources’ has been removed. Use ’ignoredByWatcher’ to provide glob patterns of files that the watcher should ignore.');
|
||||
}
|
||||
|
||||
const ciParallelVars = require('ci-parallel-vars');
|
||||
const Api = require('./api');
|
||||
const VerboseReporter = require('./reporters/verbose');
|
||||
const MiniReporter = require('./reporters/mini');
|
||||
const TapReporter = require('./reporters/tap');
|
||||
const Watcher = require('./watcher');
|
||||
const normalizeExtensions = require('./extensions');
|
||||
const {normalizeGlobs, normalizePattern} = require('./globs');
|
||||
const normalizeNodeArguments = require('./node-arguments');
|
||||
const validateEnvironmentVariables = require('./environment-variables');
|
||||
const {splitPatternAndLineNumbers} = require('./line-numbers');
|
||||
const providerManager = require('./provider-manager');
|
||||
|
||||
let pkg;
|
||||
try {
|
||||
pkg = readPkg.sync({cwd: projectDir});
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
const {type: defaultModuleType = 'commonjs'} = pkg || {};
|
||||
|
||||
const moduleTypes = {
|
||||
cjs: 'commonjs',
|
||||
mjs: 'module',
|
||||
js: defaultModuleType
|
||||
};
|
||||
|
||||
const providers = [];
|
||||
if (Reflect.has(conf, 'babel')) {
|
||||
try {
|
||||
const {level, main} = providerManager.babel(projectDir);
|
||||
providers.push({
|
||||
level,
|
||||
main: main({config: conf.babel}),
|
||||
type: 'babel'
|
||||
});
|
||||
} catch (error) {
|
||||
exit(error.message);
|
||||
}
|
||||
}
|
||||
|
||||
if (Reflect.has(conf, 'typescript')) {
|
||||
try {
|
||||
const {level, main} = providerManager.typescript(projectDir);
|
||||
providers.push({
|
||||
level,
|
||||
main: main({config: conf.typescript}),
|
||||
type: 'typescript'
|
||||
});
|
||||
} catch (error) {
|
||||
exit(error.message);
|
||||
}
|
||||
}
|
||||
|
||||
let environmentVariables;
|
||||
try {
|
||||
environmentVariables = validateEnvironmentVariables(conf.environmentVariables);
|
||||
} catch (error) {
|
||||
exit(error.message);
|
||||
}
|
||||
|
||||
let extensions;
|
||||
try {
|
||||
extensions = normalizeExtensions(conf.extensions, providers);
|
||||
} catch (error) {
|
||||
exit(error.message);
|
||||
}
|
||||
|
||||
let globs;
|
||||
try {
|
||||
globs = normalizeGlobs({files: conf.files, ignoredByWatcher: conf.ignoredByWatcher, extensions, providers});
|
||||
} catch (error) {
|
||||
exit(error.message);
|
||||
}
|
||||
|
||||
let nodeArguments;
|
||||
try {
|
||||
nodeArguments = normalizeNodeArguments(conf.nodeArguments, argv['node-arguments']);
|
||||
} catch (error) {
|
||||
exit(error.message);
|
||||
}
|
||||
|
||||
let parallelRuns = null;
|
||||
if (isCi && ciParallelVars) {
|
||||
const {index: currentIndex, total: totalRuns} = ciParallelVars;
|
||||
parallelRuns = {currentIndex, totalRuns};
|
||||
}
|
||||
|
||||
const match = combined.match === '' ? [] : arrify(combined.match);
|
||||
|
||||
const input = debug ? debug.files : (argv.pattern || []);
|
||||
const filter = input
|
||||
.map(pattern => splitPatternAndLineNumbers(pattern))
|
||||
.map(({pattern, ...rest}) => ({
|
||||
pattern: normalizePattern(path.relative(projectDir, path.resolve(process.cwd(), pattern))),
|
||||
...rest
|
||||
}));
|
||||
|
||||
const api = new Api({
|
||||
cacheEnabled: combined.cache !== false,
|
||||
chalkOptions,
|
||||
concurrency: combined.concurrency || 0,
|
||||
debug,
|
||||
environmentVariables,
|
||||
experiments,
|
||||
extensions,
|
||||
failFast: combined.failFast,
|
||||
failWithoutAssertions: combined.failWithoutAssertions !== false,
|
||||
globs,
|
||||
match,
|
||||
moduleTypes,
|
||||
nodeArguments,
|
||||
parallelRuns,
|
||||
projectDir,
|
||||
providers,
|
||||
ranFromCli: true,
|
||||
require: arrify(combined.require),
|
||||
serial: combined.serial,
|
||||
snapshotDir: combined.snapshotDir ? path.resolve(projectDir, combined.snapshotDir) : null,
|
||||
timeout: combined.timeout || '10s',
|
||||
updateSnapshots: combined.updateSnapshots,
|
||||
workerArgv: argv['--']
|
||||
});
|
||||
|
||||
let reporter;
|
||||
if (combined.tap && !combined.watch && debug === null) {
|
||||
reporter = new TapReporter({
|
||||
projectDir,
|
||||
reportStream: process.stdout,
|
||||
stdStream: process.stderr
|
||||
});
|
||||
} else if (debug !== null || combined.verbose || isCi || !process.stdout.isTTY) {
|
||||
reporter = new VerboseReporter({
|
||||
projectDir,
|
||||
reportStream: process.stdout,
|
||||
stdStream: process.stderr,
|
||||
watching: combined.watch
|
||||
});
|
||||
} else {
|
||||
reporter = new MiniReporter({
|
||||
projectDir,
|
||||
reportStream: process.stdout,
|
||||
stdStream: process.stderr,
|
||||
watching: combined.watch
|
||||
});
|
||||
}
|
||||
|
||||
api.on('run', plan => {
|
||||
reporter.startRun(plan);
|
||||
|
||||
plan.status.on('stateChange', evt => {
|
||||
if (evt.type === 'interrupt') {
|
||||
reporter.endRun();
|
||||
process.exit(1); // eslint-disable-line unicorn/no-process-exit
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (combined.watch) {
|
||||
const watcher = new Watcher({
|
||||
api,
|
||||
filter,
|
||||
globs,
|
||||
projectDir,
|
||||
providers,
|
||||
reporter
|
||||
});
|
||||
watcher.observeStdin(process.stdin);
|
||||
} else {
|
||||
let debugWithoutSpecificFile = false;
|
||||
api.on('run', plan => {
|
||||
if (plan.debug && plan.files.length !== 1) {
|
||||
debugWithoutSpecificFile = true;
|
||||
}
|
||||
});
|
||||
|
||||
const runStatus = await api.run({filter});
|
||||
|
||||
if (debugWithoutSpecificFile) {
|
||||
exit('Provide the path to the test file you wish to debug');
|
||||
return;
|
||||
}
|
||||
|
||||
process.exitCode = runStatus.suggestExitCode({matching: match.length > 0});
|
||||
reporter.endRun();
|
||||
}
|
||||
};
|
||||
54
node_modules/ava/lib/code-excerpt.js
generated
vendored
Normal file
54
node_modules/ava/lib/code-excerpt.js
generated
vendored
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
'use strict';
|
||||
const fs = require('fs');
|
||||
const equalLength = require('equal-length');
|
||||
const codeExcerpt = require('code-excerpt');
|
||||
const truncate = require('cli-truncate');
|
||||
const chalk = require('./chalk').get();
|
||||
|
||||
const formatLineNumber = (lineNumber, maxLineNumber) =>
|
||||
' '.repeat(Math.max(0, String(maxLineNumber).length - String(lineNumber).length)) + lineNumber;
|
||||
|
||||
module.exports = (source, options = {}) => {
|
||||
if (!source.isWithinProject || source.isDependency) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const {file, line} = source;
|
||||
const maxWidth = options.maxWidth || 80;
|
||||
|
||||
let contents;
|
||||
try {
|
||||
contents = fs.readFileSync(file, 'utf8');
|
||||
} catch (_) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const excerpt = codeExcerpt(contents, line, {around: 1});
|
||||
if (!excerpt) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const lines = excerpt.map(item => ({
|
||||
line: item.line,
|
||||
value: truncate(item.value, maxWidth - String(line).length - 5)
|
||||
}));
|
||||
|
||||
const joinedLines = lines.map(line => line.value).join('\n');
|
||||
const extendedLines = equalLength(joinedLines).split('\n');
|
||||
|
||||
return lines
|
||||
.map((item, index) => ({
|
||||
line: item.line,
|
||||
value: extendedLines[index]
|
||||
}))
|
||||
.map(item => {
|
||||
const isErrorSource = item.line === line;
|
||||
|
||||
const lineNumber = formatLineNumber(item.line, line) + ':';
|
||||
const coloredLineNumber = isErrorSource ? lineNumber : chalk.grey(lineNumber);
|
||||
const result = ` ${coloredLineNumber} ${item.value}`;
|
||||
|
||||
return isErrorSource ? chalk.bgRed(result) : result;
|
||||
})
|
||||
.join('\n');
|
||||
};
|
||||
139
node_modules/ava/lib/concordance-options.js
generated
vendored
Normal file
139
node_modules/ava/lib/concordance-options.js
generated
vendored
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
'use strict';
|
||||
const util = require('util');
|
||||
const ansiStyles = require('ansi-styles');
|
||||
const stripAnsi = require('strip-ansi');
|
||||
const cloneDeepWith = require('lodash/cloneDeepWith');
|
||||
const reactPlugin = require('@concordance/react');
|
||||
const chalk = require('./chalk').get();
|
||||
|
||||
// Wrap Concordance's React plugin. Change the name to avoid collisions if in
|
||||
// the future users can register plugins themselves.
|
||||
const avaReactPlugin = {...reactPlugin, name: 'ava-plugin-react'};
|
||||
const plugins = [avaReactPlugin];
|
||||
|
||||
const forceColor = new chalk.Instance({level: Math.max(chalk.level, 1)});
|
||||
|
||||
const colorTheme = {
|
||||
boolean: ansiStyles.yellow,
|
||||
circular: forceColor.grey('[Circular]'),
|
||||
date: {
|
||||
invalid: forceColor.red('invalid'),
|
||||
value: ansiStyles.blue
|
||||
},
|
||||
diffGutters: {
|
||||
actual: forceColor.red('-') + ' ',
|
||||
expected: forceColor.green('+') + ' ',
|
||||
padding: ' '
|
||||
},
|
||||
error: {
|
||||
ctor: {open: ansiStyles.grey.open + '(', close: ')' + ansiStyles.grey.close},
|
||||
name: ansiStyles.magenta
|
||||
},
|
||||
function: {
|
||||
name: ansiStyles.blue,
|
||||
stringTag: ansiStyles.magenta
|
||||
},
|
||||
global: ansiStyles.magenta,
|
||||
item: {after: forceColor.grey(',')},
|
||||
list: {openBracket: forceColor.grey('['), closeBracket: forceColor.grey(']')},
|
||||
mapEntry: {after: forceColor.grey(',')},
|
||||
maxDepth: forceColor.grey('…'),
|
||||
null: ansiStyles.yellow,
|
||||
number: ansiStyles.yellow,
|
||||
object: {
|
||||
openBracket: forceColor.grey('{'),
|
||||
closeBracket: forceColor.grey('}'),
|
||||
ctor: ansiStyles.magenta,
|
||||
stringTag: {open: ansiStyles.magenta.open + '@', close: ansiStyles.magenta.close},
|
||||
secondaryStringTag: {open: ansiStyles.grey.open + '@', close: ansiStyles.grey.close}
|
||||
},
|
||||
property: {
|
||||
after: forceColor.grey(','),
|
||||
keyBracket: {open: forceColor.grey('['), close: forceColor.grey(']')},
|
||||
valueFallback: forceColor.grey('…')
|
||||
},
|
||||
react: {
|
||||
functionType: forceColor.grey('\u235F'),
|
||||
openTag: {
|
||||
start: forceColor.grey('<'),
|
||||
end: forceColor.grey('>'),
|
||||
selfClose: forceColor.grey('/'),
|
||||
selfCloseVoid: ' ' + forceColor.grey('/')
|
||||
},
|
||||
closeTag: {
|
||||
open: forceColor.grey('</'),
|
||||
close: forceColor.grey('>')
|
||||
},
|
||||
tagName: ansiStyles.magenta,
|
||||
attribute: {
|
||||
separator: '=',
|
||||
value: {
|
||||
openBracket: forceColor.grey('{'),
|
||||
closeBracket: forceColor.grey('}'),
|
||||
string: {
|
||||
line: {open: forceColor.blue('"'), close: forceColor.blue('"'), escapeQuote: '"'}
|
||||
}
|
||||
}
|
||||
},
|
||||
child: {
|
||||
openBracket: forceColor.grey('{'),
|
||||
closeBracket: forceColor.grey('}')
|
||||
}
|
||||
},
|
||||
regexp: {
|
||||
source: {open: ansiStyles.blue.open + '/', close: '/' + ansiStyles.blue.close},
|
||||
flags: ansiStyles.yellow
|
||||
},
|
||||
stats: {separator: forceColor.grey('---')},
|
||||
string: {
|
||||
open: ansiStyles.blue.open,
|
||||
close: ansiStyles.blue.close,
|
||||
line: {open: forceColor.blue('\''), close: forceColor.blue('\'')},
|
||||
multiline: {start: forceColor.blue('`'), end: forceColor.blue('`')},
|
||||
controlPicture: ansiStyles.grey,
|
||||
diff: {
|
||||
insert: {
|
||||
open: ansiStyles.bgGreen.open + ansiStyles.black.open,
|
||||
close: ansiStyles.black.close + ansiStyles.bgGreen.close
|
||||
},
|
||||
delete: {
|
||||
open: ansiStyles.bgRed.open + ansiStyles.black.open,
|
||||
close: ansiStyles.black.close + ansiStyles.bgRed.close
|
||||
},
|
||||
equal: ansiStyles.blue,
|
||||
insertLine: {
|
||||
open: ansiStyles.green.open,
|
||||
close: ansiStyles.green.close
|
||||
},
|
||||
deleteLine: {
|
||||
open: ansiStyles.red.open,
|
||||
close: ansiStyles.red.close
|
||||
}
|
||||
}
|
||||
},
|
||||
symbol: ansiStyles.yellow,
|
||||
typedArray: {
|
||||
bytes: ansiStyles.yellow
|
||||
},
|
||||
undefined: ansiStyles.yellow
|
||||
};
|
||||
|
||||
const plainTheme = cloneDeepWith(colorTheme, value => {
|
||||
if (typeof value === 'string') {
|
||||
return stripAnsi(value);
|
||||
}
|
||||
});
|
||||
|
||||
const theme = chalk.level > 0 ? colorTheme : plainTheme;
|
||||
|
||||
exports.default = {
|
||||
// Use Node's object inspection depth, clamped to a minimum of 3
|
||||
get maxDepth() {
|
||||
return Math.max(3, util.inspect.defaultOptions.depth);
|
||||
},
|
||||
plugins,
|
||||
theme
|
||||
};
|
||||
|
||||
exports.diff = {maxDepth: 1, plugins, theme};
|
||||
exports.snapshotManager = {plugins, theme: plainTheme};
|
||||
42
node_modules/ava/lib/context-ref.js
generated
vendored
Normal file
42
node_modules/ava/lib/context-ref.js
generated
vendored
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
'use strict';
|
||||
const clone = require('lodash/clone');
|
||||
|
||||
class ContextRef {
|
||||
constructor() {
|
||||
this.value = {};
|
||||
}
|
||||
|
||||
get() {
|
||||
return this.value;
|
||||
}
|
||||
|
||||
set(newValue) {
|
||||
this.value = newValue;
|
||||
}
|
||||
|
||||
copy() {
|
||||
return new LateBinding(this);
|
||||
}
|
||||
}
|
||||
module.exports = ContextRef;
|
||||
|
||||
class LateBinding extends ContextRef {
|
||||
constructor(ref) {
|
||||
super();
|
||||
this.ref = ref;
|
||||
this.bound = false;
|
||||
}
|
||||
|
||||
get() {
|
||||
if (!this.bound) {
|
||||
this.set(clone(this.ref.get()));
|
||||
}
|
||||
|
||||
return super.get();
|
||||
}
|
||||
|
||||
set(newValue) {
|
||||
this.bound = true;
|
||||
super.set(newValue);
|
||||
}
|
||||
}
|
||||
116
node_modules/ava/lib/create-chain.js
generated
vendored
Normal file
116
node_modules/ava/lib/create-chain.js
generated
vendored
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
'use strict';
|
||||
const chainRegistry = new WeakMap();
|
||||
|
||||
function startChain(name, call, defaults) {
|
||||
const fn = (...args) => {
|
||||
call({...defaults}, args);
|
||||
};
|
||||
|
||||
Object.defineProperty(fn, 'name', {value: name});
|
||||
chainRegistry.set(fn, {call, defaults, fullName: name});
|
||||
return fn;
|
||||
}
|
||||
|
||||
function extendChain(previous, name, flag) {
|
||||
if (!flag) {
|
||||
flag = name;
|
||||
}
|
||||
|
||||
const fn = (...args) => {
|
||||
callWithFlag(previous, flag, args);
|
||||
};
|
||||
|
||||
const fullName = `${chainRegistry.get(previous).fullName}.${name}`;
|
||||
Object.defineProperty(fn, 'name', {value: fullName});
|
||||
previous[name] = fn;
|
||||
|
||||
chainRegistry.set(fn, {flag, fullName, prev: previous});
|
||||
return fn;
|
||||
}
|
||||
|
||||
function callWithFlag(previous, flag, args) {
|
||||
const combinedFlags = {[flag]: true};
|
||||
do {
|
||||
const step = chainRegistry.get(previous);
|
||||
if (step.call) {
|
||||
step.call({...step.defaults, ...combinedFlags}, args);
|
||||
previous = null;
|
||||
} else {
|
||||
combinedFlags[step.flag] = true;
|
||||
previous = step.prev;
|
||||
}
|
||||
} while (previous);
|
||||
}
|
||||
|
||||
function createHookChain(hook, isAfterHook) {
|
||||
// Hook chaining rules:
|
||||
// * `always` comes immediately after "after hooks"
|
||||
// * `skip` must come at the end
|
||||
// * no `only`
|
||||
// * no repeating
|
||||
extendChain(hook, 'cb', 'callback');
|
||||
extendChain(hook, 'skip', 'skipped');
|
||||
extendChain(hook.cb, 'skip', 'skipped');
|
||||
if (isAfterHook) {
|
||||
extendChain(hook, 'always');
|
||||
extendChain(hook.always, 'cb', 'callback');
|
||||
extendChain(hook.always, 'skip', 'skipped');
|
||||
extendChain(hook.always.cb, 'skip', 'skipped');
|
||||
}
|
||||
|
||||
return hook;
|
||||
}
|
||||
|
||||
function createChain(fn, defaults, meta) {
|
||||
// Test chaining rules:
|
||||
// * `serial` must come at the start
|
||||
// * `only` and `skip` must come at the end
|
||||
// * `failing` must come at the end, but can be followed by `only` and `skip`
|
||||
// * `only` and `skip` cannot be chained together
|
||||
// * no repeating
|
||||
const root = startChain('test', fn, {...defaults, type: 'test'});
|
||||
extendChain(root, 'cb', 'callback');
|
||||
extendChain(root, 'failing');
|
||||
extendChain(root, 'only', 'exclusive');
|
||||
extendChain(root, 'serial');
|
||||
extendChain(root, 'skip', 'skipped');
|
||||
extendChain(root.cb, 'failing');
|
||||
extendChain(root.cb, 'only', 'exclusive');
|
||||
extendChain(root.cb, 'skip', 'skipped');
|
||||
extendChain(root.cb.failing, 'only', 'exclusive');
|
||||
extendChain(root.cb.failing, 'skip', 'skipped');
|
||||
extendChain(root.failing, 'only', 'exclusive');
|
||||
extendChain(root.failing, 'skip', 'skipped');
|
||||
extendChain(root.serial, 'cb', 'callback');
|
||||
extendChain(root.serial, 'failing');
|
||||
extendChain(root.serial, 'only', 'exclusive');
|
||||
extendChain(root.serial, 'skip', 'skipped');
|
||||
extendChain(root.serial.cb, 'failing');
|
||||
extendChain(root.serial.cb, 'only', 'exclusive');
|
||||
extendChain(root.serial.cb, 'skip', 'skipped');
|
||||
extendChain(root.serial.cb.failing, 'only', 'exclusive');
|
||||
extendChain(root.serial.cb.failing, 'skip', 'skipped');
|
||||
extendChain(root.serial.failing, 'only', 'exclusive');
|
||||
extendChain(root.serial.failing, 'skip', 'skipped');
|
||||
|
||||
root.after = createHookChain(startChain('test.after', fn, {...defaults, type: 'after'}), true);
|
||||
root.afterEach = createHookChain(startChain('test.afterEach', fn, {...defaults, type: 'afterEach'}), true);
|
||||
root.before = createHookChain(startChain('test.before', fn, {...defaults, type: 'before'}), false);
|
||||
root.beforeEach = createHookChain(startChain('test.beforeEach', fn, {...defaults, type: 'beforeEach'}), false);
|
||||
|
||||
root.serial.after = createHookChain(startChain('test.after', fn, {...defaults, serial: true, type: 'after'}), true);
|
||||
root.serial.afterEach = createHookChain(startChain('test.afterEach', fn, {...defaults, serial: true, type: 'afterEach'}), true);
|
||||
root.serial.before = createHookChain(startChain('test.before', fn, {...defaults, serial: true, type: 'before'}), false);
|
||||
root.serial.beforeEach = createHookChain(startChain('test.beforeEach', fn, {...defaults, serial: true, type: 'beforeEach'}), false);
|
||||
|
||||
// "todo" tests cannot be chained. Allow todo tests to be flagged as needing
|
||||
// to be serial.
|
||||
root.todo = startChain('test.todo', fn, {...defaults, type: 'test', todo: true});
|
||||
root.serial.todo = startChain('test.serial.todo', fn, {...defaults, serial: true, type: 'test', todo: true});
|
||||
|
||||
root.meta = meta;
|
||||
|
||||
return root;
|
||||
}
|
||||
|
||||
module.exports = createChain;
|
||||
16
node_modules/ava/lib/environment-variables.js
generated
vendored
Normal file
16
node_modules/ava/lib/environment-variables.js
generated
vendored
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
'use strict';
|
||||
function validateEnvironmentVariables(environmentVariables) {
|
||||
if (!environmentVariables) {
|
||||
return {};
|
||||
}
|
||||
|
||||
for (const value of Object.values(environmentVariables)) {
|
||||
if (typeof value !== 'string') {
|
||||
throw new TypeError('The ’environmentVariables’ configuration must be an object containing string values.');
|
||||
}
|
||||
}
|
||||
|
||||
return environmentVariables;
|
||||
}
|
||||
|
||||
module.exports = validateEnvironmentVariables;
|
||||
43
node_modules/ava/lib/extensions.js
generated
vendored
Normal file
43
node_modules/ava/lib/extensions.js
generated
vendored
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
module.exports = (configuredExtensions, providers = []) => {
|
||||
// Combine all extensions possible for testing. Remove duplicate extensions.
|
||||
const duplicates = new Set();
|
||||
const seen = new Set();
|
||||
const combine = extensions => {
|
||||
for (const ext of extensions) {
|
||||
if (seen.has(ext)) {
|
||||
duplicates.add(ext);
|
||||
} else {
|
||||
seen.add(ext);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (configuredExtensions !== undefined) {
|
||||
combine(configuredExtensions);
|
||||
}
|
||||
|
||||
for (const {main} of providers) {
|
||||
combine(main.extensions);
|
||||
}
|
||||
|
||||
if (duplicates.size > 0) {
|
||||
throw new Error(`Unexpected duplicate extensions in options: ’${[...duplicates].join('’, ’')}’.`);
|
||||
}
|
||||
|
||||
// Unless the default was used by providers, as long as the extensions aren't explicitly set, set the default.
|
||||
if (configuredExtensions === undefined) {
|
||||
if (!seen.has('cjs')) {
|
||||
seen.add('cjs');
|
||||
}
|
||||
|
||||
if (!seen.has('mjs')) {
|
||||
seen.add('mjs');
|
||||
}
|
||||
|
||||
if (!seen.has('js')) {
|
||||
seen.add('js');
|
||||
}
|
||||
}
|
||||
|
||||
return [...seen];
|
||||
};
|
||||
117
node_modules/ava/lib/fork.js
generated
vendored
Normal file
117
node_modules/ava/lib/fork.js
generated
vendored
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
'use strict';
|
||||
const childProcess = require('child_process');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const Emittery = require('emittery');
|
||||
|
||||
if (fs.realpathSync(__filename) !== __filename) {
|
||||
console.warn('WARNING: `npm link ava` and the `--preserve-symlink` flag are incompatible. We have detected that AVA is linked via `npm link`, and that you are using either an early version of Node 6, or the `--preserve-symlink` flag. This breaks AVA. You should upgrade to Node 6.2.0+, avoid the `--preserve-symlink` flag, or avoid using `npm link ava`.');
|
||||
}
|
||||
|
||||
// In case the test file imports a different AVA install,
|
||||
// the presence of this variable allows it to require this one instead
|
||||
const AVA_PATH = path.resolve(__dirname, '..');
|
||||
|
||||
const workerPath = require.resolve('./worker/subprocess');
|
||||
|
||||
module.exports = (file, options, execArgv = process.execArgv) => {
|
||||
let finished = false;
|
||||
|
||||
const emitter = new Emittery();
|
||||
const emitStateChange = evt => {
|
||||
if (!finished) {
|
||||
emitter.emit('stateChange', Object.assign(evt, {testFile: file}));
|
||||
}
|
||||
};
|
||||
|
||||
options = {
|
||||
file,
|
||||
baseDir: process.cwd(),
|
||||
...options
|
||||
};
|
||||
|
||||
const subprocess = childProcess.fork(workerPath, options.workerArgv, {
|
||||
cwd: options.projectDir,
|
||||
silent: true,
|
||||
env: {NODE_ENV: 'test', ...process.env, ...options.environmentVariables, AVA_PATH},
|
||||
execArgv
|
||||
});
|
||||
|
||||
subprocess.stdout.on('data', chunk => {
|
||||
emitStateChange({type: 'worker-stdout', chunk});
|
||||
});
|
||||
|
||||
subprocess.stderr.on('data', chunk => {
|
||||
emitStateChange({type: 'worker-stderr', chunk});
|
||||
});
|
||||
|
||||
let forcedExit = false;
|
||||
const send = evt => {
|
||||
if (subprocess.connected && !finished && !forcedExit) {
|
||||
subprocess.send({ava: evt}, () => {
|
||||
// Disregard errors.
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const promise = new Promise(resolve => {
|
||||
const finish = () => {
|
||||
finished = true;
|
||||
resolve();
|
||||
};
|
||||
|
||||
subprocess.on('message', message => {
|
||||
if (!message.ava) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.ava.type === 'ready-for-options') {
|
||||
send({type: 'options', options});
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.ava.type === 'ping') {
|
||||
send({type: 'pong'});
|
||||
} else {
|
||||
emitStateChange(message.ava);
|
||||
}
|
||||
});
|
||||
|
||||
subprocess.on('error', err => {
|
||||
emitStateChange({type: 'worker-failed', err});
|
||||
finish();
|
||||
});
|
||||
|
||||
subprocess.on('exit', (code, signal) => {
|
||||
if (forcedExit) {
|
||||
emitStateChange({type: 'worker-finished', forcedExit});
|
||||
} else if (code > 0) {
|
||||
emitStateChange({type: 'worker-failed', nonZeroExitCode: code});
|
||||
} else if (code === null && signal) {
|
||||
emitStateChange({type: 'worker-failed', signal});
|
||||
} else {
|
||||
emitStateChange({type: 'worker-finished', forcedExit});
|
||||
}
|
||||
|
||||
finish();
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
exit() {
|
||||
forcedExit = true;
|
||||
subprocess.kill();
|
||||
},
|
||||
|
||||
notifyOfPeerFailure() {
|
||||
send({type: 'peer-failed'});
|
||||
},
|
||||
|
||||
onStateChange(listener) {
|
||||
return emitter.on('stateChange', listener);
|
||||
},
|
||||
|
||||
file,
|
||||
promise
|
||||
};
|
||||
};
|
||||
247
node_modules/ava/lib/globs.js
generated
vendored
Normal file
247
node_modules/ava/lib/globs.js
generated
vendored
Normal file
|
|
@ -0,0 +1,247 @@
|
|||
'use strict';
|
||||
const path = require('path');
|
||||
const globby = require('globby');
|
||||
const ignoreByDefault = require('ignore-by-default');
|
||||
const picomatch = require('picomatch');
|
||||
const slash = require('slash');
|
||||
const providerManager = require('./provider-manager');
|
||||
|
||||
const defaultIgnorePatterns = [...ignoreByDefault.directories(), '**/node_modules'];
|
||||
const defaultPicomatchIgnorePatterns = [
|
||||
...defaultIgnorePatterns,
|
||||
// Unlike globby(), picomatch needs a complete pattern when ignoring directories.
|
||||
...defaultIgnorePatterns.map(pattern => `${pattern}/**/*`)
|
||||
];
|
||||
|
||||
const defaultMatchNoIgnore = picomatch(defaultPicomatchIgnorePatterns);
|
||||
|
||||
const defaultIgnoredByWatcherPatterns = [
|
||||
'**/*.snap.md', // No need to rerun tests when the Markdown files change.
|
||||
'ava.config.js', // Config is not reloaded so avoid rerunning tests when it changes.
|
||||
'ava.config.cjs' // Config is not reloaded so avoid rerunning tests when it changes.
|
||||
];
|
||||
|
||||
const buildExtensionPattern = extensions => extensions.length === 1 ? extensions[0] : `{${extensions.join(',')}}`;
|
||||
|
||||
function normalizePattern(pattern) {
|
||||
// Always use `/` in patterns, harmonizing matching across platforms
|
||||
if (process.platform === 'win32') {
|
||||
pattern = slash(pattern);
|
||||
}
|
||||
|
||||
if (pattern.startsWith('./')) {
|
||||
return pattern.slice(2);
|
||||
}
|
||||
|
||||
if (pattern.startsWith('!./')) {
|
||||
return `!${pattern.slice(3)}`;
|
||||
}
|
||||
|
||||
return pattern;
|
||||
}
|
||||
|
||||
exports.normalizePattern = normalizePattern;
|
||||
|
||||
function normalizePatterns(patterns) {
|
||||
return patterns.map(pattern => normalizePattern(pattern));
|
||||
}
|
||||
|
||||
exports.normalizePatterns = normalizePatterns;
|
||||
|
||||
function normalizeGlobs({extensions, files: filePatterns, ignoredByWatcher: ignoredByWatcherPatterns, providers}) {
|
||||
if (filePatterns !== undefined && (!Array.isArray(filePatterns) || filePatterns.length === 0)) {
|
||||
throw new Error('The ’files’ configuration must be an array containing glob patterns.');
|
||||
}
|
||||
|
||||
if (ignoredByWatcherPatterns !== undefined && (!Array.isArray(ignoredByWatcherPatterns) || ignoredByWatcherPatterns.length === 0)) {
|
||||
throw new Error('The ’ignoredByWatcher’ configuration must be an array containing glob patterns.');
|
||||
}
|
||||
|
||||
const extensionPattern = buildExtensionPattern(extensions);
|
||||
const defaultTestPatterns = [
|
||||
`test.${extensionPattern}`,
|
||||
`{src,source}/test.${extensionPattern}`,
|
||||
`**/__tests__/**/*.${extensionPattern}`,
|
||||
`**/*.spec.${extensionPattern}`,
|
||||
`**/*.test.${extensionPattern}`,
|
||||
`**/test-*.${extensionPattern}`,
|
||||
`**/test/**/*.${extensionPattern}`,
|
||||
`**/tests/**/*.${extensionPattern}`,
|
||||
'!**/__tests__/**/__{helper,fixture}?(s)__/**/*',
|
||||
'!**/test?(s)/**/{helper,fixture}?(s)/**/*'
|
||||
];
|
||||
|
||||
if (filePatterns) {
|
||||
filePatterns = normalizePatterns(filePatterns);
|
||||
|
||||
if (filePatterns.every(pattern => pattern.startsWith('!'))) {
|
||||
// Use defaults if patterns only contains exclusions.
|
||||
filePatterns = [...defaultTestPatterns, ...filePatterns];
|
||||
}
|
||||
} else {
|
||||
filePatterns = defaultTestPatterns;
|
||||
}
|
||||
|
||||
if (ignoredByWatcherPatterns) {
|
||||
ignoredByWatcherPatterns = [...defaultIgnoredByWatcherPatterns, ...normalizePatterns(ignoredByWatcherPatterns)];
|
||||
} else {
|
||||
ignoredByWatcherPatterns = [...defaultIgnoredByWatcherPatterns];
|
||||
}
|
||||
|
||||
for (const {level, main} of providers) {
|
||||
if (level >= providerManager.levels.pathRewrites) {
|
||||
({filePatterns, ignoredByWatcherPatterns} = main.updateGlobs({filePatterns, ignoredByWatcherPatterns}));
|
||||
}
|
||||
}
|
||||
|
||||
return {extensions, filePatterns, ignoredByWatcherPatterns};
|
||||
}
|
||||
|
||||
exports.normalizeGlobs = normalizeGlobs;
|
||||
|
||||
const hasExtension = (extensions, file) => extensions.includes(path.extname(file).slice(1));
|
||||
|
||||
exports.hasExtension = hasExtension;
|
||||
|
||||
const globFiles = async (cwd, patterns) => {
|
||||
const files = await globby(patterns, {
|
||||
// Globs should work relative to the cwd value only (this should be the
|
||||
// project directory that AVA is run in).
|
||||
absolute: false,
|
||||
braceExpansion: true,
|
||||
caseSensitiveMatch: false,
|
||||
cwd,
|
||||
dot: false,
|
||||
expandDirectories: false,
|
||||
extglob: true,
|
||||
followSymbolicLinks: true,
|
||||
gitignore: false,
|
||||
globstar: true,
|
||||
ignore: defaultIgnorePatterns,
|
||||
baseNameMatch: false,
|
||||
onlyFiles: true,
|
||||
stats: false,
|
||||
unique: true
|
||||
});
|
||||
|
||||
// Return absolute file paths. This has the side-effect of normalizing paths
|
||||
// on Windows.
|
||||
return files.map(file => path.join(cwd, file));
|
||||
};
|
||||
|
||||
async function findFiles({cwd, extensions, filePatterns}) {
|
||||
return (await globFiles(cwd, filePatterns)).filter(file => hasExtension(extensions, file));
|
||||
}
|
||||
|
||||
exports.findFiles = findFiles;
|
||||
|
||||
async function findTests({cwd, extensions, filePatterns}) {
|
||||
return (await findFiles({cwd, extensions, filePatterns})).filter(file => !path.basename(file).startsWith('_'));
|
||||
}
|
||||
|
||||
exports.findTests = findTests;
|
||||
|
||||
function getChokidarIgnorePatterns({ignoredByWatcherPatterns}) {
|
||||
return [
|
||||
...defaultIgnorePatterns.map(pattern => `${pattern}/**/*`),
|
||||
...ignoredByWatcherPatterns.filter(pattern => !pattern.startsWith('!'))
|
||||
];
|
||||
}
|
||||
|
||||
exports.getChokidarIgnorePatterns = getChokidarIgnorePatterns;
|
||||
|
||||
const matchingCache = new WeakMap();
|
||||
const processMatchingPatterns = input => {
|
||||
let result = matchingCache.get(input);
|
||||
if (!result) {
|
||||
const ignore = [...defaultPicomatchIgnorePatterns];
|
||||
const patterns = input.filter(pattern => {
|
||||
if (pattern.startsWith('!')) {
|
||||
// Unlike globby(), picomatch needs a complete pattern when ignoring directories.
|
||||
ignore.push(pattern.slice(1), `${pattern.slice(1)}/**/*`);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
result = {
|
||||
match: picomatch(patterns, {ignore}),
|
||||
matchNoIgnore: picomatch(patterns)
|
||||
};
|
||||
matchingCache.set(input, result);
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
function matches(file, patterns) {
|
||||
const {match} = processMatchingPatterns(patterns);
|
||||
return match(file);
|
||||
}
|
||||
|
||||
exports.matches = matches;
|
||||
|
||||
const matchesIgnorePatterns = (file, patterns) => {
|
||||
const {matchNoIgnore} = processMatchingPatterns(patterns);
|
||||
return matchNoIgnore(file) || defaultMatchNoIgnore(file);
|
||||
};
|
||||
|
||||
function normalizeFileForMatching(cwd, file) {
|
||||
if (process.platform === 'win32') {
|
||||
cwd = slash(cwd);
|
||||
file = slash(file);
|
||||
}
|
||||
|
||||
if (!cwd) { // TODO: Ensure tests provide an actual value.
|
||||
return file;
|
||||
}
|
||||
|
||||
// TODO: If `file` is outside `cwd` we can't normalize it. Need to figure
|
||||
// out if that's a real-world scenario, but we may have to ensure the file
|
||||
// isn't even selected.
|
||||
if (!file.startsWith(cwd)) {
|
||||
return file;
|
||||
}
|
||||
|
||||
// Assume `cwd` does *not* end in a slash.
|
||||
return file.slice(cwd.length + 1);
|
||||
}
|
||||
|
||||
exports.normalizeFileForMatching = normalizeFileForMatching;
|
||||
|
||||
function isHelperish(file) { // Assume file has been normalized already.
|
||||
// File names starting with an underscore are deemed "helpers".
|
||||
if (path.basename(file).startsWith('_')) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// This function assumes the file has been normalized. If it couldn't be,
|
||||
// don't check if it's got a parent directory that starts with an underscore.
|
||||
// Deem it not a "helper".
|
||||
if (path.isAbsolute(file)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the file has a parent directory that starts with only a single
|
||||
// underscore, it's deemed a "helper".
|
||||
return path.dirname(file).split('/').some(dir => /^_(?:$|[^_])/.test(dir));
|
||||
}
|
||||
|
||||
exports.isHelperish = isHelperish;
|
||||
|
||||
function classify(file, {cwd, extensions, filePatterns, ignoredByWatcherPatterns}) {
|
||||
file = normalizeFileForMatching(cwd, file);
|
||||
return {
|
||||
isIgnoredByWatcher: matchesIgnorePatterns(file, ignoredByWatcherPatterns),
|
||||
isTest: hasExtension(extensions, file) && !isHelperish(file) && filePatterns.length > 0 && matches(file, filePatterns)
|
||||
};
|
||||
}
|
||||
|
||||
exports.classify = classify;
|
||||
|
||||
function applyTestFileFilter({cwd, filter, testFiles}) {
|
||||
return testFiles.filter(file => matches(normalizeFileForMatching(cwd, file), filter));
|
||||
}
|
||||
|
||||
exports.applyTestFileFilter = applyTestFileFilter;
|
||||
5
node_modules/ava/lib/is-ci.js
generated
vendored
Normal file
5
node_modules/ava/lib/is-ci.js
generated
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
const info = require('ci-info');
|
||||
|
||||
const {AVA_FORCE_CI} = process.env;
|
||||
|
||||
module.exports = AVA_FORCE_CI === 'not-ci' ? false : AVA_FORCE_CI === 'ci' || info.isCI;
|
||||
64
node_modules/ava/lib/line-numbers.js
generated
vendored
Normal file
64
node_modules/ava/lib/line-numbers.js
generated
vendored
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
'use strict';
|
||||
|
||||
const micromatch = require('micromatch');
|
||||
const flatten = require('lodash/flatten');
|
||||
|
||||
const NUMBER_REGEX = /^\d+$/;
|
||||
const RANGE_REGEX = /^(?<startGroup>\d+)-(?<endGroup>\d+)$/;
|
||||
const LINE_NUMBERS_REGEX = /^(?:\d+(?:-\d+)?,?)+$/;
|
||||
const DELIMITER = ':';
|
||||
|
||||
const distinctArray = array => [...new Set(array)];
|
||||
const sortNumbersAscending = array => {
|
||||
const sorted = [...array];
|
||||
sorted.sort((a, b) => a - b);
|
||||
return sorted;
|
||||
};
|
||||
|
||||
const parseNumber = string => Number.parseInt(string, 10);
|
||||
const removeAllWhitespace = string => string.replace(/\s/g, '');
|
||||
const range = (start, end) => new Array(end - start + 1).fill(start).map((element, index) => element + index);
|
||||
|
||||
const parseLineNumbers = suffix => sortNumbersAscending(distinctArray(flatten(
|
||||
suffix.split(',').map(part => {
|
||||
if (NUMBER_REGEX.test(part)) {
|
||||
return parseNumber(part);
|
||||
}
|
||||
|
||||
const {groups: {startGroup, endGroup}} = RANGE_REGEX.exec(part);
|
||||
const start = parseNumber(startGroup);
|
||||
const end = parseNumber(endGroup);
|
||||
|
||||
if (start > end) {
|
||||
return range(end, start);
|
||||
}
|
||||
|
||||
return range(start, end);
|
||||
})
|
||||
)));
|
||||
|
||||
function splitPatternAndLineNumbers(pattern) {
|
||||
const parts = pattern.split(DELIMITER);
|
||||
if (parts.length === 1) {
|
||||
return {pattern, lineNumbers: null};
|
||||
}
|
||||
|
||||
const suffix = removeAllWhitespace(parts.pop());
|
||||
if (!LINE_NUMBERS_REGEX.test(suffix)) {
|
||||
return {pattern, lineNumbers: null};
|
||||
}
|
||||
|
||||
return {pattern: parts.join(DELIMITER), lineNumbers: parseLineNumbers(suffix)};
|
||||
}
|
||||
|
||||
exports.splitPatternAndLineNumbers = splitPatternAndLineNumbers;
|
||||
|
||||
function getApplicableLineNumbers(normalizedFilePath, filter) {
|
||||
return sortNumbersAscending(distinctArray(flatten(
|
||||
filter
|
||||
.filter(({pattern, lineNumbers}) => lineNumbers && micromatch.isMatch(normalizedFilePath, pattern))
|
||||
.map(({lineNumbers}) => lineNumbers)
|
||||
)));
|
||||
}
|
||||
|
||||
exports.getApplicableLineNumbers = getApplicableLineNumbers;
|
||||
160
node_modules/ava/lib/load-config.js
generated
vendored
Normal file
160
node_modules/ava/lib/load-config.js
generated
vendored
Normal file
|
|
@ -0,0 +1,160 @@
|
|||
'use strict';
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const vm = require('vm');
|
||||
const isPlainObject = require('is-plain-object');
|
||||
const pkgConf = require('pkg-conf');
|
||||
|
||||
const NO_SUCH_FILE = Symbol('no ava.config.js file');
|
||||
const MISSING_DEFAULT_EXPORT = Symbol('missing default export');
|
||||
const EXPERIMENTS = new Set();
|
||||
|
||||
// *Very* rudimentary support for loading ava.config.js files containing an `export default` statement.
|
||||
const evaluateJsConfig = configFile => {
|
||||
const contents = fs.readFileSync(configFile, 'utf8');
|
||||
const script = new vm.Script(`'use strict';(()=>{let __export__;\n${contents.replace(/export default/g, '__export__ =')};return __export__;})()`, {
|
||||
filename: configFile,
|
||||
lineOffset: -1
|
||||
});
|
||||
return {
|
||||
default: script.runInThisContext()
|
||||
};
|
||||
};
|
||||
|
||||
const loadJsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.config.js')}) => {
|
||||
if (!configFile.endsWith('.js')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const fileForErrorMessage = path.relative(projectDir, configFile);
|
||||
|
||||
let config;
|
||||
try {
|
||||
({default: config = MISSING_DEFAULT_EXPORT} = evaluateJsConfig(configFile));
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
|
||||
throw Object.assign(new Error(`Error loading ${fileForErrorMessage}: ${error.message}`), {parent: error});
|
||||
}
|
||||
|
||||
if (config === MISSING_DEFAULT_EXPORT) {
|
||||
throw new Error(`${fileForErrorMessage} must have a default export, using ES module syntax`);
|
||||
}
|
||||
|
||||
return {config, fileForErrorMessage};
|
||||
};
|
||||
|
||||
const loadCjsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.config.cjs')}) => {
|
||||
if (!configFile.endsWith('.cjs')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const fileForErrorMessage = path.relative(projectDir, configFile);
|
||||
try {
|
||||
return {config: require(configFile), fileForErrorMessage};
|
||||
} catch (error) {
|
||||
if (error.code === 'MODULE_NOT_FOUND') {
|
||||
return null;
|
||||
}
|
||||
|
||||
throw Object.assign(new Error(`Error loading ${fileForErrorMessage}`), {parent: error});
|
||||
}
|
||||
};
|
||||
|
||||
const loadMjsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.config.mjs')}) => {
|
||||
if (!configFile.endsWith('.mjs')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const fileForErrorMessage = path.relative(projectDir, configFile);
|
||||
try {
|
||||
fs.readFileSync(configFile);
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
|
||||
throw Object.assign(new Error(`Error loading ${fileForErrorMessage}`), {parent: error});
|
||||
}
|
||||
|
||||
throw new Error(`AVA cannot yet load ${fileForErrorMessage} files`);
|
||||
};
|
||||
|
||||
function loadConfig({configFile, resolveFrom = process.cwd(), defaults = {}} = {}) { // eslint-disable-line complexity
|
||||
let packageConf = pkgConf.sync('ava', {cwd: resolveFrom});
|
||||
const filepath = pkgConf.filepath(packageConf);
|
||||
const projectDir = filepath === null ? resolveFrom : path.dirname(filepath);
|
||||
|
||||
if (configFile) {
|
||||
configFile = path.resolve(configFile); // Relative to CWD
|
||||
if (path.basename(configFile) !== path.relative(projectDir, configFile)) {
|
||||
throw new Error('Config files must be located next to the package.json file');
|
||||
}
|
||||
|
||||
if (!configFile.endsWith('.js') && !configFile.endsWith('.cjs') && !configFile.endsWith('.mjs')) {
|
||||
throw new Error('Config files must have .js, .cjs or .mjs extensions');
|
||||
}
|
||||
}
|
||||
|
||||
const allowConflictWithPackageJson = Boolean(configFile);
|
||||
|
||||
let [{config: fileConf, fileForErrorMessage} = {config: NO_SUCH_FILE, fileForErrorMessage: undefined}, ...conflicting] = [
|
||||
loadJsConfig({projectDir, configFile}),
|
||||
loadCjsConfig({projectDir, configFile}),
|
||||
loadMjsConfig({projectDir, configFile})
|
||||
].filter(result => result !== null);
|
||||
|
||||
if (conflicting.length > 0) {
|
||||
throw new Error(`Conflicting configuration in ${fileForErrorMessage} and ${conflicting.map(({fileForErrorMessage}) => fileForErrorMessage).join(' & ')}`);
|
||||
}
|
||||
|
||||
if (fileConf !== NO_SUCH_FILE) {
|
||||
if (allowConflictWithPackageJson) {
|
||||
packageConf = {};
|
||||
} else if (Object.keys(packageConf).length > 0) {
|
||||
throw new Error(`Conflicting configuration in ${fileForErrorMessage} and package.json`);
|
||||
}
|
||||
|
||||
if (fileConf && typeof fileConf.then === 'function') { // eslint-disable-line promise/prefer-await-to-then
|
||||
throw new TypeError(`${fileForErrorMessage} must not export a promise`);
|
||||
}
|
||||
|
||||
if (!isPlainObject(fileConf) && typeof fileConf !== 'function') {
|
||||
throw new TypeError(`${fileForErrorMessage} must export a plain object or factory function`);
|
||||
}
|
||||
|
||||
if (typeof fileConf === 'function') {
|
||||
fileConf = fileConf({projectDir});
|
||||
if (fileConf && typeof fileConf.then === 'function') { // eslint-disable-line promise/prefer-await-to-then
|
||||
throw new TypeError(`Factory method exported by ${fileForErrorMessage} must not return a promise`);
|
||||
}
|
||||
|
||||
if (!isPlainObject(fileConf)) {
|
||||
throw new TypeError(`Factory method exported by ${fileForErrorMessage} must return a plain object`);
|
||||
}
|
||||
}
|
||||
|
||||
if ('ava' in fileConf) {
|
||||
throw new Error(`Encountered ’ava’ property in ${fileForErrorMessage}; avoid wrapping the configuration`);
|
||||
}
|
||||
}
|
||||
|
||||
const config = {...defaults, nonSemVerExperiments: {}, ...fileConf, ...packageConf, projectDir};
|
||||
|
||||
const {nonSemVerExperiments: experiments} = config;
|
||||
if (!isPlainObject(experiments)) {
|
||||
throw new Error(`nonSemVerExperiments from ${fileForErrorMessage} must be an object`);
|
||||
}
|
||||
|
||||
for (const key of Object.keys(experiments)) {
|
||||
if (!EXPERIMENTS.has(key)) {
|
||||
throw new Error(`nonSemVerExperiments.${key} from ${fileForErrorMessage} is not a supported experiment`);
|
||||
}
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
module.exports = loadConfig;
|
||||
17
node_modules/ava/lib/node-arguments.js
generated
vendored
Normal file
17
node_modules/ava/lib/node-arguments.js
generated
vendored
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
'use strict';
|
||||
const arrgv = require('arrgv');
|
||||
|
||||
function normalizeNodeArguments(fromConf = [], fromArgv = '') {
|
||||
let parsedArgv = [];
|
||||
if (fromArgv !== '') {
|
||||
try {
|
||||
parsedArgv = arrgv(fromArgv);
|
||||
} catch {
|
||||
throw new Error('Could not parse `--node-arguments` value. Make sure all strings are closed and backslashes are used correctly.');
|
||||
}
|
||||
}
|
||||
|
||||
return [...process.execArgv, ...fromConf, ...parsedArgv];
|
||||
}
|
||||
|
||||
module.exports = normalizeNodeArguments;
|
||||
5
node_modules/ava/lib/now-and-timers.js
generated
vendored
Normal file
5
node_modules/ava/lib/now-and-timers.js
generated
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
'use strict';
|
||||
const timers = require('timers');
|
||||
|
||||
Object.assign(exports, timers);
|
||||
exports.now = Date.now;
|
||||
15
node_modules/ava/lib/parse-test-args.js
generated
vendored
Normal file
15
node_modules/ava/lib/parse-test-args.js
generated
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
'use strict';
|
||||
function parseTestArgs(args) {
|
||||
const rawTitle = typeof args[0] === 'string' ? args.shift() : undefined;
|
||||
const receivedImplementationArray = Array.isArray(args[0]);
|
||||
const implementations = receivedImplementationArray ? args.shift() : args.splice(0, 1);
|
||||
|
||||
const buildTitle = implementation => {
|
||||
const title = implementation.title ? implementation.title(rawTitle, ...args) : rawTitle;
|
||||
return {title, isSet: typeof title !== 'undefined', isValid: typeof title === 'string', isEmpty: !title};
|
||||
};
|
||||
|
||||
return {args, buildTitle, implementations, rawTitle, receivedImplementationArray};
|
||||
}
|
||||
|
||||
module.exports = parseTestArgs;
|
||||
53
node_modules/ava/lib/provider-manager.js
generated
vendored
Normal file
53
node_modules/ava/lib/provider-manager.js
generated
vendored
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
const pkg = require('../package.json');
|
||||
const globs = require('./globs');
|
||||
|
||||
const levels = {
|
||||
ava3: 1,
|
||||
pathRewrites: 2
|
||||
};
|
||||
|
||||
exports.levels = levels;
|
||||
|
||||
const levelsByProtocol = {
|
||||
'ava-3': levels.ava3,
|
||||
'ava-3.2': levels.pathRewrites
|
||||
};
|
||||
|
||||
function load(providerModule, projectDir) {
|
||||
const ava = {version: pkg.version};
|
||||
const makeProvider = require(providerModule);
|
||||
|
||||
let fatal;
|
||||
let level;
|
||||
const provider = makeProvider({
|
||||
negotiateProtocol(identifiers, {version}) {
|
||||
const [identifier] = identifiers.filter(identifier => Reflect.has(levelsByProtocol, identifier));
|
||||
|
||||
if (identifier === undefined) {
|
||||
fatal = new Error(`This version of AVA (${ava.version}) is not compatible with ${providerModule}@${version}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
level = levelsByProtocol[identifier];
|
||||
|
||||
return {
|
||||
ava,
|
||||
async findFiles({extensions, patterns}) {
|
||||
return globs.findFiles({cwd: projectDir, extensions, filePatterns: patterns});
|
||||
},
|
||||
identifier,
|
||||
normalizeGlobPatterns: globs.normalizePatterns,
|
||||
projectDir
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
if (fatal) {
|
||||
throw fatal;
|
||||
}
|
||||
|
||||
return {...provider, level};
|
||||
}
|
||||
|
||||
exports.babel = projectDir => load('@ava/babel', projectDir);
|
||||
exports.typescript = projectDir => load('@ava/typescript', projectDir);
|
||||
73
node_modules/ava/lib/reporters/beautify-stack.js
generated
vendored
Normal file
73
node_modules/ava/lib/reporters/beautify-stack.js
generated
vendored
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
'use strict';
|
||||
const StackUtils = require('stack-utils');
|
||||
|
||||
const stackUtils = new StackUtils({
|
||||
ignoredPackages: [
|
||||
'@ava/babel',
|
||||
'@ava/require-precompiled',
|
||||
'@ava/typescript',
|
||||
'append-transform',
|
||||
'ava',
|
||||
'empower-core',
|
||||
'esm',
|
||||
'nyc'
|
||||
],
|
||||
internals: [
|
||||
// AVA internals, which ignoredPackages don't ignore when we run our own unit tests.
|
||||
/\/ava\/(?:lib\/|lib\/worker\/)?[\w-]+\.js:\d+:\d+\)?$/,
|
||||
// Only ignore Node.js internals that really are not useful for debugging.
|
||||
...StackUtils.nodeInternals().filter(regexp => !/\(internal/.test(regexp.source)),
|
||||
/\(internal\/process\/task_queues\.js:\d+:\d+\)$/,
|
||||
/\(internal\/modules\/cjs\/.+?\.js:\d+:\d+\)$/,
|
||||
/async Promise\.all \(index/,
|
||||
/new Promise \(<anonymous>\)/
|
||||
]
|
||||
});
|
||||
|
||||
/*
|
||||
* Given a string value of the format generated for the `stack` property of a
|
||||
* V8 error object, return a string that contains only stack frame information
|
||||
* for frames that have relevance to the consumer.
|
||||
*
|
||||
* For example, given the following string value:
|
||||
*
|
||||
* ```
|
||||
* Error
|
||||
* at inner (/home/ava/ex.js:7:12)
|
||||
* at /home/ava/ex.js:12:5
|
||||
* at outer (/home/ava/ex.js:13:4)
|
||||
* at Object.<anonymous> (/home/ava/ex.js:14:3)
|
||||
* at Module._compile (module.js:570:32)
|
||||
* at Object.Module._extensions..js (module.js:579:10)
|
||||
* at Module.load (module.js:487:32)
|
||||
* at tryModuleLoad (module.js:446:12)
|
||||
* at Function.Module._load (module.js:438:3)
|
||||
* at Module.runMain (module.js:604:10)
|
||||
* ```
|
||||
*
|
||||
* ...this function returns the following string value:
|
||||
*
|
||||
* ```
|
||||
* inner (/home/ava/ex.js:7:12)
|
||||
* /home/ava/ex.js:12:5
|
||||
* outer (/home/ava/ex.js:13:4)
|
||||
* Object.<anonymous> (/home/ava/ex.js:14:3)
|
||||
* Module._compile (module.js:570:32)
|
||||
* Object.Module._extensions..js (module.js:579:10)
|
||||
* Module.load (module.js:487:32)
|
||||
* tryModuleLoad (module.js:446:12)
|
||||
* Function.Module._load (module.js:438:3)
|
||||
* Module.runMain (module.js:604:10)
|
||||
* ```
|
||||
*/
|
||||
module.exports = stack => {
|
||||
if (!stack) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return stackUtils.clean(stack)
|
||||
.trim()
|
||||
.split('\n')
|
||||
.map(line => line.trim())
|
||||
.filter(line => line !== '');
|
||||
};
|
||||
17
node_modules/ava/lib/reporters/colors.js
generated
vendored
Normal file
17
node_modules/ava/lib/reporters/colors.js
generated
vendored
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
'use strict';
|
||||
const chalk = require('../chalk').get();
|
||||
|
||||
module.exports = {
|
||||
log: chalk.gray,
|
||||
title: chalk.bold,
|
||||
error: chalk.red,
|
||||
skip: chalk.yellow,
|
||||
todo: chalk.blue,
|
||||
pass: chalk.green,
|
||||
duration: chalk.gray.dim,
|
||||
errorSource: chalk.gray,
|
||||
errorStack: chalk.gray,
|
||||
errorStackInternal: chalk.gray.dim,
|
||||
stack: chalk.red,
|
||||
information: chalk.magenta
|
||||
};
|
||||
27
node_modules/ava/lib/reporters/format-serialized-error.js
generated
vendored
Normal file
27
node_modules/ava/lib/reporters/format-serialized-error.js
generated
vendored
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
'use strict';
|
||||
const trimOffNewlines = require('trim-off-newlines');
|
||||
const chalk = require('../chalk').get();
|
||||
|
||||
function formatSerializedError(error) {
|
||||
const printMessage = error.values.length === 0 ?
|
||||
Boolean(error.message) :
|
||||
!error.values[0].label.startsWith(error.message);
|
||||
|
||||
if (error.statements.length === 0 && error.values.length === 0) {
|
||||
return {formatted: null, printMessage};
|
||||
}
|
||||
|
||||
let formatted = '';
|
||||
for (const value of error.values) {
|
||||
formatted += `${value.label}\n\n${trimOffNewlines(value.formatted)}\n\n`;
|
||||
}
|
||||
|
||||
for (const statement of error.statements) {
|
||||
formatted += `${statement[0]}\n${chalk.grey('=>')} ${trimOffNewlines(statement[1])}\n\n`;
|
||||
}
|
||||
|
||||
formatted = trimOffNewlines(formatted);
|
||||
return {formatted, printMessage};
|
||||
}
|
||||
|
||||
module.exports = formatSerializedError;
|
||||
55
node_modules/ava/lib/reporters/improper-usage-messages.js
generated
vendored
Normal file
55
node_modules/ava/lib/reporters/improper-usage-messages.js
generated
vendored
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
'use strict';
|
||||
const chalk = require('../chalk').get();
|
||||
const pkg = require('../../package.json');
|
||||
|
||||
exports.forError = error => {
|
||||
if (!error.improperUsage) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const {assertion} = error;
|
||||
if (assertion === 'throws' || assertion === 'notThrows') {
|
||||
return `Try wrapping the first argument to \`t.${assertion}()\` in a function:
|
||||
|
||||
${chalk.cyan(`t.${assertion}(() => { `)}${chalk.grey('/* your code here */')}${chalk.cyan(' })')}
|
||||
|
||||
Visit the following URL for more details:
|
||||
|
||||
${chalk.blue.underline(`https://github.com/avajs/ava/blob/v${pkg.version}/docs/03-assertions.md#throwsfn-expected-message`)}`;
|
||||
}
|
||||
|
||||
if (assertion === 'snapshot') {
|
||||
const {name, snapPath} = error.improperUsage;
|
||||
|
||||
if (name === 'ChecksumError') {
|
||||
return `The snapshot file is corrupted.
|
||||
|
||||
File path: ${chalk.yellow(snapPath)}
|
||||
|
||||
Please run AVA again with the ${chalk.cyan('--update-snapshots')} flag to recreate it.`;
|
||||
}
|
||||
|
||||
if (name === 'LegacyError') {
|
||||
return `The snapshot file was created with AVA 0.19. It’s not supported by this AVA version.
|
||||
|
||||
File path: ${chalk.yellow(snapPath)}
|
||||
|
||||
Please run AVA again with the ${chalk.cyan('--update-snapshots')} flag to upgrade.`;
|
||||
}
|
||||
|
||||
if (name === 'VersionMismatchError') {
|
||||
const {snapVersion, expectedVersion} = error.improperUsage;
|
||||
const upgradeMessage = snapVersion < expectedVersion ?
|
||||
`Please run AVA again with the ${chalk.cyan('--update-snapshots')} flag to upgrade.` :
|
||||
'You should upgrade AVA.';
|
||||
|
||||
return `The snapshot file is v${snapVersion}, but only v${expectedVersion} is supported.
|
||||
|
||||
File path: ${chalk.yellow(snapPath)}
|
||||
|
||||
${upgradeMessage}`;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
619
node_modules/ava/lib/reporters/mini.js
generated
vendored
Normal file
619
node_modules/ava/lib/reporters/mini.js
generated
vendored
Normal file
|
|
@ -0,0 +1,619 @@
|
|||
'use strict';
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const stream = require('stream');
|
||||
|
||||
const cliCursor = require('cli-cursor');
|
||||
const figures = require('figures');
|
||||
const indentString = require('indent-string');
|
||||
const ora = require('ora');
|
||||
const plur = require('plur');
|
||||
const trimOffNewlines = require('trim-off-newlines');
|
||||
const beautifyStack = require('./beautify-stack');
|
||||
|
||||
const chalk = require('../chalk').get();
|
||||
const codeExcerpt = require('../code-excerpt');
|
||||
const colors = require('./colors');
|
||||
const formatSerializedError = require('./format-serialized-error');
|
||||
const improperUsageMessages = require('./improper-usage-messages');
|
||||
const prefixTitle = require('./prefix-title');
|
||||
const whileCorked = require('./while-corked');
|
||||
|
||||
const nodeInternals = require('stack-utils').nodeInternals();
|
||||
|
||||
class LineWriter extends stream.Writable {
|
||||
constructor(dest, spinner) {
|
||||
super();
|
||||
|
||||
this.dest = dest;
|
||||
this.columns = dest.columns || 80;
|
||||
this.spinner = spinner;
|
||||
this.lastSpinnerText = '';
|
||||
}
|
||||
|
||||
_write(chunk, encoding, callback) {
|
||||
// Discard the current spinner output. Any lines that were meant to be
|
||||
// preserved should be rewritten.
|
||||
this.spinner.clear();
|
||||
|
||||
this._writeWithSpinner(chunk.toString('utf8'));
|
||||
callback();
|
||||
}
|
||||
|
||||
_writev(pieces, callback) {
|
||||
// Discard the current spinner output. Any lines that were meant to be
|
||||
// preserved should be rewritten.
|
||||
this.spinner.clear();
|
||||
|
||||
const last = pieces.pop();
|
||||
for (const piece of pieces) {
|
||||
this.dest.write(piece.chunk);
|
||||
}
|
||||
|
||||
this._writeWithSpinner(last.chunk.toString('utf8'));
|
||||
callback();
|
||||
}
|
||||
|
||||
_writeWithSpinner(string) {
|
||||
if (!this.spinner.id) {
|
||||
this.dest.write(string);
|
||||
return;
|
||||
}
|
||||
|
||||
this.lastSpinnerText = string;
|
||||
// Ignore whitespace at the end of the chunk. We're continiously rewriting
|
||||
// the last line through the spinner. Also be careful to remove the indent
|
||||
// as the spinner adds its own.
|
||||
this.spinner.text = string.trimEnd().slice(2);
|
||||
this.spinner.render();
|
||||
}
|
||||
|
||||
writeLine(string) {
|
||||
if (string) {
|
||||
this.write(indentString(string, 2) + os.EOL);
|
||||
} else {
|
||||
this.write(os.EOL);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class MiniReporter {
|
||||
constructor(options) {
|
||||
this.reportStream = options.reportStream;
|
||||
this.stdStream = options.stdStream;
|
||||
this.watching = options.watching;
|
||||
|
||||
this.spinner = ora({
|
||||
isEnabled: true,
|
||||
color: options.spinner ? options.spinner.color : 'gray',
|
||||
discardStdin: !options.watching,
|
||||
hideCursor: false,
|
||||
spinner: options.spinner || (process.platform === 'win32' ? 'line' : 'dots'),
|
||||
stream: options.reportStream
|
||||
});
|
||||
this.lineWriter = new LineWriter(this.reportStream, this.spinner);
|
||||
|
||||
this.consumeStateChange = whileCorked(this.reportStream, whileCorked(this.lineWriter, this.consumeStateChange));
|
||||
this.endRun = whileCorked(this.reportStream, whileCorked(this.lineWriter, this.endRun));
|
||||
this.relativeFile = file => path.relative(options.projectDir, file);
|
||||
|
||||
this.reset();
|
||||
}
|
||||
|
||||
reset() {
|
||||
if (this.removePreviousListener) {
|
||||
this.removePreviousListener();
|
||||
}
|
||||
|
||||
this.failFastEnabled = false;
|
||||
this.failures = [];
|
||||
this.filesWithMissingAvaImports = new Set();
|
||||
this.filesWithoutDeclaredTests = new Set();
|
||||
this.filesWithoutMatchedLineNumbers = new Set();
|
||||
this.internalErrors = [];
|
||||
this.knownFailures = [];
|
||||
this.lineNumberErrors = [];
|
||||
this.matching = false;
|
||||
this.prefixTitle = (testFile, title) => title;
|
||||
this.previousFailures = 0;
|
||||
this.removePreviousListener = null;
|
||||
this.stats = null;
|
||||
this.uncaughtExceptions = [];
|
||||
this.unhandledRejections = [];
|
||||
}
|
||||
|
||||
startRun(plan) {
|
||||
if (plan.bailWithoutReporting) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.reset();
|
||||
|
||||
this.failFastEnabled = plan.failFastEnabled;
|
||||
this.matching = plan.matching;
|
||||
this.previousFailures = plan.previousFailures;
|
||||
|
||||
if (this.watching || plan.files.length > 1) {
|
||||
this.prefixTitle = (testFile, title) => prefixTitle(plan.filePathPrefix, testFile, title);
|
||||
}
|
||||
|
||||
this.removePreviousListener = plan.status.on('stateChange', evt => this.consumeStateChange(evt));
|
||||
|
||||
if (this.watching && plan.runVector > 1) {
|
||||
this.reportStream.write(chalk.gray.dim('\u2500'.repeat(this.lineWriter.columns)) + os.EOL);
|
||||
}
|
||||
|
||||
cliCursor.hide(this.reportStream);
|
||||
this.lineWriter.writeLine();
|
||||
|
||||
this.spinner.start();
|
||||
}
|
||||
|
||||
consumeStateChange(evt) { // eslint-disable-line complexity
|
||||
const fileStats = this.stats && evt.testFile ? this.stats.byFile.get(evt.testFile) : null;
|
||||
|
||||
switch (evt.type) {
|
||||
case 'declared-test':
|
||||
// Ignore
|
||||
break;
|
||||
case 'hook-failed':
|
||||
this.failures.push(evt);
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'internal-error':
|
||||
this.internalErrors.push(evt);
|
||||
if (evt.testFile) {
|
||||
this.writeWithCounts(colors.error(`${figures.cross} Internal error when running ${this.relativeFile(evt.testFile)}`));
|
||||
} else {
|
||||
this.writeWithCounts(colors.error(`${figures.cross} Internal error`));
|
||||
}
|
||||
|
||||
break;
|
||||
case 'line-number-selection-error':
|
||||
this.lineNumberErrors.push(evt);
|
||||
this.writeWithCounts(colors.information(`${figures.warning} Could not parse ${this.relativeFile(evt.testFile)} for line number selection`));
|
||||
break;
|
||||
case 'missing-ava-import':
|
||||
this.filesWithMissingAvaImports.add(evt.testFile);
|
||||
this.writeWithCounts(colors.error(`${figures.cross} No tests found in ${this.relativeFile(evt.testFile)}, make sure to import "ava" at the top of your test file`));
|
||||
break;
|
||||
case 'selected-test':
|
||||
// Ignore
|
||||
break;
|
||||
case 'stats':
|
||||
this.stats = evt.stats;
|
||||
break;
|
||||
case 'test-failed':
|
||||
this.failures.push(evt);
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'test-passed':
|
||||
if (evt.knownFailing) {
|
||||
this.knownFailures.push(evt);
|
||||
}
|
||||
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'timeout':
|
||||
this.lineWriter.writeLine(colors.error(`\n${figures.cross} Timed out while running tests`));
|
||||
this.lineWriter.writeLine('');
|
||||
this.writePendingTests(evt);
|
||||
break;
|
||||
case 'interrupt':
|
||||
this.lineWriter.writeLine(colors.error(`\n${figures.cross} Exiting due to SIGINT`));
|
||||
this.lineWriter.writeLine('');
|
||||
this.writePendingTests(evt);
|
||||
break;
|
||||
case 'uncaught-exception':
|
||||
this.uncaughtExceptions.push(evt);
|
||||
break;
|
||||
case 'unhandled-rejection':
|
||||
this.unhandledRejections.push(evt);
|
||||
break;
|
||||
case 'worker-failed':
|
||||
if (fileStats.declaredTests === 0) {
|
||||
this.filesWithoutDeclaredTests.add(evt.testFile);
|
||||
}
|
||||
|
||||
break;
|
||||
case 'worker-finished':
|
||||
if (fileStats.declaredTests === 0) {
|
||||
this.filesWithoutDeclaredTests.add(evt.testFile);
|
||||
this.writeWithCounts(colors.error(`${figures.cross} No tests found in ${this.relativeFile(evt.testFile)}`));
|
||||
} else if (fileStats.selectingLines && fileStats.selectedTests === 0) {
|
||||
this.filesWithoutMatchedLineNumbers.add(evt.testFile);
|
||||
this.writeWithCounts(colors.error(`${figures.cross} Line numbers for ${this.relativeFile(evt.testFile)} did not match any tests`));
|
||||
}
|
||||
|
||||
break;
|
||||
case 'worker-stderr':
|
||||
case 'worker-stdout':
|
||||
// Forcibly clear the spinner, writing the chunk corrupts the TTY.
|
||||
this.spinner.clear();
|
||||
|
||||
this.stdStream.write(evt.chunk);
|
||||
// If the chunk does not end with a linebreak, *forcibly* write one to
|
||||
// ensure it remains visible in the TTY.
|
||||
// Tests cannot assume their standard output is not interrupted. Indeed
|
||||
// we multiplex stdout and stderr into a single stream. However as
|
||||
// long as stdStream is different from reportStream users can read
|
||||
// their original output by redirecting the streams.
|
||||
if (evt.chunk[evt.chunk.length - 1] !== 0x0A) {
|
||||
// Use write() rather than writeLine() so the (presumably corked)
|
||||
// line writer will actually write the empty line before re-rendering
|
||||
// the last spinner text below.
|
||||
this.lineWriter.write(os.EOL);
|
||||
}
|
||||
|
||||
this.lineWriter.write(this.lineWriter.lastSpinnerText);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
writeWithCounts(string) {
|
||||
if (!this.stats) {
|
||||
return this.lineWriter.writeLine(string);
|
||||
}
|
||||
|
||||
string = string || '';
|
||||
if (string !== '') {
|
||||
string += os.EOL;
|
||||
}
|
||||
|
||||
let firstLinePostfix = this.watching ?
|
||||
' ' + chalk.gray.dim('[' + new Date().toLocaleTimeString('en-US', {hour12: false}) + ']') :
|
||||
'';
|
||||
|
||||
if (this.stats.passedTests > 0) {
|
||||
string += os.EOL + colors.pass(`${this.stats.passedTests} passed`) + firstLinePostfix;
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
string += os.EOL + colors.error(`${this.stats.passedKnownFailingTests} ${plur('known failure', this.stats.passedKnownFailingTests)}`);
|
||||
}
|
||||
|
||||
if (this.stats.failedHooks > 0) {
|
||||
string += os.EOL + colors.error(`${this.stats.failedHooks} ${plur('hook', this.stats.failedHooks)} failed`) + firstLinePostfix;
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedTests > 0) {
|
||||
string += os.EOL + colors.error(`${this.stats.failedTests} ${plur('test', this.stats.failedTests)} failed`) + firstLinePostfix;
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.skippedTests > 0) {
|
||||
string += os.EOL + colors.skip(`${this.stats.skippedTests} skipped`);
|
||||
}
|
||||
|
||||
if (this.stats.todoTests > 0) {
|
||||
string += os.EOL + colors.todo(`${this.stats.todoTests} todo`);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(string);
|
||||
}
|
||||
|
||||
writeErr(evt) {
|
||||
if (evt.err.name === 'TSError' && evt.err.object && evt.err.object.diagnosticText) {
|
||||
this.lineWriter.writeLine(colors.errorStack(trimOffNewlines(evt.err.object.diagnosticText)));
|
||||
return;
|
||||
}
|
||||
|
||||
if (evt.err.source) {
|
||||
this.lineWriter.writeLine(colors.errorSource(`${this.relativeFile(evt.err.source.file)}:${evt.err.source.line}`));
|
||||
const excerpt = codeExcerpt(evt.err.source, {maxWidth: this.lineWriter.columns - 2});
|
||||
if (excerpt) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(excerpt);
|
||||
}
|
||||
}
|
||||
|
||||
if (evt.err.avaAssertionError) {
|
||||
const result = formatSerializedError(evt.err);
|
||||
if (result.printMessage) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(evt.err.message);
|
||||
}
|
||||
|
||||
if (result.formatted) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(result.formatted);
|
||||
}
|
||||
|
||||
const message = improperUsageMessages.forError(evt.err);
|
||||
if (message) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(message);
|
||||
}
|
||||
} else if (evt.err.nonErrorObject) {
|
||||
this.lineWriter.writeLine(trimOffNewlines(evt.err.formatted));
|
||||
} else {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(evt.err.summary);
|
||||
}
|
||||
|
||||
const formatted = this.formatErrorStack(evt.err);
|
||||
if (formatted.length > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(formatted.join('\n'));
|
||||
}
|
||||
}
|
||||
|
||||
formatErrorStack(error) {
|
||||
if (!error.stack) {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (error.shouldBeautifyStack) {
|
||||
return beautifyStack(error.stack).map(line => {
|
||||
if (nodeInternals.some(internal => internal.test(line))) {
|
||||
return colors.errorStackInternal(`${figures.pointerSmall} ${line}`);
|
||||
}
|
||||
|
||||
return colors.errorStack(`${figures.pointerSmall} ${line}`);
|
||||
});
|
||||
}
|
||||
|
||||
return [error.stack];
|
||||
}
|
||||
|
||||
writeLogs(evt) {
|
||||
if (evt.logs) {
|
||||
for (const log of evt.logs) {
|
||||
const logLines = indentString(colors.log(log), 4);
|
||||
const logLinesWithLeadingFigure = logLines.replace(
|
||||
/^ {4}/,
|
||||
` ${colors.information(figures.info)} `
|
||||
);
|
||||
this.lineWriter.writeLine(logLinesWithLeadingFigure);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writeTestSummary(evt) {
|
||||
if (evt.type === 'hook-failed' || evt.type === 'test-failed') {
|
||||
this.writeWithCounts(`${this.prefixTitle(evt.testFile, evt.title)}`);
|
||||
} else if (evt.knownFailing) {
|
||||
this.writeWithCounts(`${colors.error(this.prefixTitle(evt.testFile, evt.title))}`);
|
||||
} else {
|
||||
this.writeWithCounts(`${this.prefixTitle(evt.testFile, evt.title)}`);
|
||||
}
|
||||
}
|
||||
|
||||
writeFailure(evt) {
|
||||
this.lineWriter.writeLine(`${colors.title(this.prefixTitle(evt.testFile, evt.title))}`);
|
||||
this.writeLogs(evt);
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
}
|
||||
|
||||
writePendingTests(evt) {
|
||||
for (const [file, testsInFile] of evt.pendingTests) {
|
||||
if (testsInFile.size === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(`${testsInFile.size} tests were pending in ${this.relativeFile(file)}\n`);
|
||||
for (const title of testsInFile) {
|
||||
this.lineWriter.writeLine(`${figures.circleDotted} ${this.prefixTitle(file, title)}`);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine('');
|
||||
}
|
||||
}
|
||||
|
||||
endRun() { // eslint-disable-line complexity
|
||||
this.spinner.stop();
|
||||
cliCursor.show(this.reportStream);
|
||||
|
||||
if (!this.stats) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldn’t find any files to test`));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.matching && this.stats.selectedTests === 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldn’t find any matching tests`));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine();
|
||||
|
||||
let firstLinePostfix = this.watching ?
|
||||
' ' + chalk.gray.dim('[' + new Date().toLocaleTimeString('en-US', {hour12: false}) + ']') :
|
||||
'';
|
||||
|
||||
if (this.filesWithMissingAvaImports.size > 0) {
|
||||
for (const testFile of this.filesWithMissingAvaImports) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(testFile)}, make sure to import "ava" at the top of your test file`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
}
|
||||
|
||||
if (this.filesWithoutDeclaredTests.size > 0) {
|
||||
for (const testFile of this.filesWithoutDeclaredTests) {
|
||||
if (!this.filesWithMissingAvaImports.has(testFile)) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(testFile)}`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.lineNumberErrors.length > 0) {
|
||||
for (const evt of this.lineNumberErrors) {
|
||||
this.lineWriter.writeLine(colors.information(`${figures.warning} Could not parse ${this.relativeFile(evt.testFile)} for line number selection`));
|
||||
}
|
||||
}
|
||||
|
||||
if (this.filesWithoutMatchedLineNumbers.size > 0) {
|
||||
for (const testFile of this.filesWithoutMatchedLineNumbers) {
|
||||
if (!this.filesWithMissingAvaImports.has(testFile) && !this.filesWithoutDeclaredTests.has(testFile)) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Line numbers for ${this.relativeFile(testFile)} did not match any tests`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.filesWithMissingAvaImports.size > 0 || this.filesWithoutDeclaredTests.size > 0 || this.filesWithoutMatchedLineNumbers.size > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
if (this.stats.failedHooks > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.failedHooks} ${plur('hook', this.stats.failedHooks)} failed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.failedTests} ${plur('test', this.stats.failedTests)} failed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedHooks === 0 && this.stats.failedTests === 0 && this.stats.passedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.pass(`${this.stats.passedTests} ${plur('test', this.stats.passedTests)} passed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.passedKnownFailingTests} ${plur('known failure', this.stats.passedKnownFailingTests)}`));
|
||||
}
|
||||
|
||||
if (this.stats.skippedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.skip(`${this.stats.skippedTests} ${plur('test', this.stats.skippedTests)} skipped`));
|
||||
}
|
||||
|
||||
if (this.stats.todoTests > 0) {
|
||||
this.lineWriter.writeLine(colors.todo(`${this.stats.todoTests} ${plur('test', this.stats.todoTests)} todo`));
|
||||
}
|
||||
|
||||
if (this.stats.unhandledRejections > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.unhandledRejections} unhandled ${plur('rejection', this.stats.unhandledRejections)}`));
|
||||
}
|
||||
|
||||
if (this.stats.uncaughtExceptions > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.uncaughtExceptions} uncaught ${plur('exception', this.stats.uncaughtExceptions)}`));
|
||||
}
|
||||
|
||||
if (this.previousFailures > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.previousFailures} previous ${plur('failure', this.previousFailures)} in test files that were not rerun`));
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
for (const evt of this.knownFailures) {
|
||||
this.lineWriter.writeLine(colors.error(this.prefixTitle(evt.testFile, evt.title)));
|
||||
}
|
||||
}
|
||||
|
||||
const shouldWriteFailFastDisclaimer = this.failFastEnabled && (this.stats.remainingTests > 0 || this.stats.files > this.stats.finishedWorkers);
|
||||
|
||||
if (this.failures.length > 0) {
|
||||
const writeTrailingLines = shouldWriteFailFastDisclaimer || this.internalErrors.length > 0 || this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
|
||||
this.lineWriter.writeLine();
|
||||
|
||||
const last = this.failures[this.failures.length - 1];
|
||||
for (const evt of this.failures) {
|
||||
this.writeFailure(evt);
|
||||
if (evt !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.internalErrors.length > 0) {
|
||||
const writeLeadingLine = this.failures.length === 0;
|
||||
const writeTrailingLines = shouldWriteFailFastDisclaimer || this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
|
||||
|
||||
if (writeLeadingLine) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
const last = this.internalErrors[this.internalErrors.length - 1];
|
||||
for (const evt of this.internalErrors) {
|
||||
if (evt.testFile) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error when running ${this.relativeFile(evt.testFile)}`));
|
||||
} else {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error`));
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(colors.stack(evt.err.summary));
|
||||
this.lineWriter.writeLine(colors.errorStack(evt.err.stack));
|
||||
if (evt !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.uncaughtExceptions.length > 0) {
|
||||
const writeLeadingLine = this.failures.length === 0 && this.internalErrors.length === 0;
|
||||
const writeTrailingLines = shouldWriteFailFastDisclaimer || this.unhandledRejections.length > 0;
|
||||
|
||||
if (writeLeadingLine) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
const last = this.uncaughtExceptions[this.uncaughtExceptions.length - 1];
|
||||
for (const evt of this.uncaughtExceptions) {
|
||||
this.lineWriter.writeLine(colors.title(`Uncaught exception in ${this.relativeFile(evt.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
if (evt !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.unhandledRejections.length > 0) {
|
||||
const writeLeadingLine = this.failures.length === 0 && this.internalErrors.length === 0 && this.uncaughtExceptions.length === 0;
|
||||
const writeTrailingLines = shouldWriteFailFastDisclaimer;
|
||||
|
||||
if (writeLeadingLine) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
const last = this.unhandledRejections[this.unhandledRejections.length - 1];
|
||||
for (const evt of this.unhandledRejections) {
|
||||
this.lineWriter.writeLine(colors.title(`Unhandled rejection in ${this.relativeFile(evt.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
if (evt !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldWriteFailFastDisclaimer) {
|
||||
let remaining = '';
|
||||
if (this.stats.remainingTests > 0) {
|
||||
remaining += `At least ${this.stats.remainingTests} ${plur('test was', 'tests were', this.stats.remainingTests)} skipped`;
|
||||
if (this.stats.files > this.stats.finishedWorkers) {
|
||||
remaining += ', as well as ';
|
||||
}
|
||||
}
|
||||
|
||||
if (this.stats.files > this.stats.finishedWorkers) {
|
||||
const skippedFileCount = this.stats.files - this.stats.finishedWorkers;
|
||||
remaining += `${skippedFileCount} ${plur('test file', 'test files', skippedFileCount)}`;
|
||||
if (this.stats.remainingTests === 0) {
|
||||
remaining += ` ${plur('was', 'were', skippedFileCount)} skipped`;
|
||||
}
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(colors.information(`\`--fail-fast\` is on. ${remaining}.`));
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
module.exports = MiniReporter;
|
||||
21
node_modules/ava/lib/reporters/prefix-title.js
generated
vendored
Normal file
21
node_modules/ava/lib/reporters/prefix-title.js
generated
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
'use strict';
|
||||
const path = require('path');
|
||||
const figures = require('figures');
|
||||
const chalk = require('../chalk').get();
|
||||
|
||||
const SEPERATOR = ' ' + chalk.gray.dim(figures.pointerSmall) + ' ';
|
||||
|
||||
module.exports = (base, file, title) => {
|
||||
const prefix = file
|
||||
// Only replace base if it is found at the start of the path
|
||||
.replace(base, (match, offset) => offset === 0 ? '' : match)
|
||||
.replace(/\.spec/, '')
|
||||
.replace(/\.test/, '')
|
||||
.replace(/test-/g, '')
|
||||
.replace(/\.js$/, '')
|
||||
.split(path.sep)
|
||||
.filter(p => p !== '__tests__')
|
||||
.join(SEPERATOR);
|
||||
|
||||
return prefix + SEPERATOR + title;
|
||||
};
|
||||
209
node_modules/ava/lib/reporters/tap.js
generated
vendored
Normal file
209
node_modules/ava/lib/reporters/tap.js
generated
vendored
Normal file
|
|
@ -0,0 +1,209 @@
|
|||
'use strict';
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
|
||||
const plur = require('plur');
|
||||
const stripAnsi = require('strip-ansi');
|
||||
const supertap = require('supertap');
|
||||
const indentString = require('indent-string');
|
||||
|
||||
const beautifyStack = require('./beautify-stack');
|
||||
const prefixTitle = require('./prefix-title');
|
||||
|
||||
function dumpError(error) {
|
||||
const object = {...error.object};
|
||||
if (error.name) {
|
||||
object.name = error.name;
|
||||
}
|
||||
|
||||
if (error.message) {
|
||||
object.message = error.message;
|
||||
}
|
||||
|
||||
if (error.avaAssertionError) {
|
||||
if (error.assertion) {
|
||||
object.assertion = error.assertion;
|
||||
}
|
||||
|
||||
if (error.operator) {
|
||||
object.operator = error.operator;
|
||||
}
|
||||
|
||||
if (error.values.length > 0) {
|
||||
object.values = error.values.reduce((acc, value) => {
|
||||
acc[value.label] = stripAnsi(value.formatted);
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
}
|
||||
|
||||
if (error.nonErrorObject) {
|
||||
object.message = 'Non-error object';
|
||||
object.formatted = stripAnsi(error.formatted);
|
||||
}
|
||||
|
||||
if (error.stack) {
|
||||
object.at = error.shouldBeautifyStack ? beautifyStack(error.stack).join('\n') : error.stack;
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
class TapReporter {
|
||||
constructor(options) {
|
||||
this.i = 0;
|
||||
|
||||
this.stdStream = options.stdStream;
|
||||
this.reportStream = options.reportStream;
|
||||
|
||||
this.crashCount = 0;
|
||||
this.filesWithMissingAvaImports = new Set();
|
||||
this.prefixTitle = (testFile, title) => title;
|
||||
this.relativeFile = file => path.relative(options.projectDir, file);
|
||||
this.stats = null;
|
||||
}
|
||||
|
||||
startRun(plan) {
|
||||
if (plan.files.length > 1) {
|
||||
this.prefixTitle = (testFile, title) => prefixTitle(plan.filePathPrefix, testFile, title);
|
||||
}
|
||||
|
||||
plan.status.on('stateChange', evt => this.consumeStateChange(evt));
|
||||
|
||||
this.reportStream.write(supertap.start() + os.EOL);
|
||||
}
|
||||
|
||||
endRun() {
|
||||
if (this.stats) {
|
||||
this.reportStream.write(supertap.finish({
|
||||
crashed: this.crashCount,
|
||||
failed: this.stats.failedTests + this.stats.remainingTests,
|
||||
passed: this.stats.passedTests + this.stats.passedKnownFailingTests,
|
||||
skipped: this.stats.skippedTests,
|
||||
todo: this.stats.todoTests
|
||||
}) + os.EOL);
|
||||
|
||||
if (this.stats.parallelRuns) {
|
||||
const {currentFileCount, currentIndex, totalRuns} = this.stats.parallelRuns;
|
||||
this.reportStream.write(`# Ran ${currentFileCount} test ${plur('file', currentFileCount)} out of ${this.stats.files} for job ${currentIndex + 1} of ${totalRuns}` + os.EOL + os.EOL);
|
||||
}
|
||||
} else {
|
||||
this.reportStream.write(supertap.finish({
|
||||
crashed: this.crashCount,
|
||||
failed: 0,
|
||||
passed: 0,
|
||||
skipped: 0,
|
||||
todo: 0
|
||||
}) + os.EOL);
|
||||
}
|
||||
}
|
||||
|
||||
writeTest(evt, flags) {
|
||||
this.reportStream.write(supertap.test(this.prefixTitle(evt.testFile, evt.title), {
|
||||
comment: evt.logs,
|
||||
error: evt.err ? dumpError(evt.err) : null,
|
||||
index: ++this.i,
|
||||
passed: flags.passed,
|
||||
skip: flags.skip,
|
||||
todo: flags.todo
|
||||
}) + os.EOL);
|
||||
}
|
||||
|
||||
writeCrash(evt, title) {
|
||||
this.crashCount++;
|
||||
this.reportStream.write(supertap.test(title || evt.err.summary || evt.type, {
|
||||
comment: evt.logs,
|
||||
error: evt.err ? dumpError(evt.err) : null,
|
||||
index: ++this.i,
|
||||
passed: false,
|
||||
skip: false,
|
||||
todo: false
|
||||
}) + os.EOL);
|
||||
}
|
||||
|
||||
writeComment(evt, {title = this.prefixTitle(evt.testFile, evt.title)}) {
|
||||
this.reportStream.write(`# ${stripAnsi(title)}${os.EOL}`);
|
||||
if (evt.logs) {
|
||||
for (const log of evt.logs) {
|
||||
const logLines = indentString(log, 4).replace(/^ {4}/, ' # ');
|
||||
this.reportStream.write(`${logLines}${os.EOL}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
consumeStateChange(evt) { // eslint-disable-line complexity
|
||||
const fileStats = this.stats && evt.testFile ? this.stats.byFile.get(evt.testFile) : null;
|
||||
|
||||
switch (evt.type) {
|
||||
case 'declared-test':
|
||||
// Ignore
|
||||
break;
|
||||
case 'hook-failed':
|
||||
this.writeTest(evt, {passed: false, todo: false, skip: false});
|
||||
break;
|
||||
case 'hook-finished':
|
||||
this.writeComment(evt, {});
|
||||
break;
|
||||
case 'internal-error':
|
||||
this.writeCrash(evt);
|
||||
break;
|
||||
case 'missing-ava-import':
|
||||
this.filesWithMissingAvaImports.add(evt.testFile);
|
||||
this.writeCrash(evt, `No tests found in ${this.relativeFile(evt.testFile)}, make sure to import "ava" at the top of your test file`);
|
||||
break;
|
||||
case 'selected-test':
|
||||
if (evt.skip) {
|
||||
this.writeTest(evt, {passed: true, todo: false, skip: true});
|
||||
} else if (evt.todo) {
|
||||
this.writeTest(evt, {passed: false, todo: true, skip: false});
|
||||
}
|
||||
|
||||
break;
|
||||
case 'stats':
|
||||
this.stats = evt.stats;
|
||||
break;
|
||||
case 'test-failed':
|
||||
this.writeTest(evt, {passed: false, todo: false, skip: false});
|
||||
break;
|
||||
case 'test-passed':
|
||||
this.writeTest(evt, {passed: true, todo: false, skip: false});
|
||||
break;
|
||||
case 'timeout':
|
||||
this.writeCrash(evt, `Exited because no new tests completed within the last ${evt.period}ms of inactivity`);
|
||||
break;
|
||||
case 'uncaught-exception':
|
||||
this.writeCrash(evt);
|
||||
break;
|
||||
case 'unhandled-rejection':
|
||||
this.writeCrash(evt);
|
||||
break;
|
||||
case 'worker-failed':
|
||||
if (!this.filesWithMissingAvaImports.has(evt.testFile)) {
|
||||
if (evt.nonZeroExitCode) {
|
||||
this.writeCrash(evt, `${this.relativeFile(evt.testFile)} exited with a non-zero exit code: ${evt.nonZeroExitCode}`);
|
||||
} else {
|
||||
this.writeCrash(evt, `${this.relativeFile(evt.testFile)} exited due to ${evt.signal}`);
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
case 'worker-finished':
|
||||
if (!evt.forcedExit && !this.filesWithMissingAvaImports.has(evt.testFile)) {
|
||||
if (fileStats.declaredTests === 0) {
|
||||
this.writeCrash(evt, `No tests found in ${this.relativeFile(evt.testFile)}`);
|
||||
} else if (!this.failFastEnabled && fileStats.remainingTests > 0) {
|
||||
this.writeComment(evt, {title: `${fileStats.remainingTests} ${plur('test', fileStats.remainingTests)} remaining in ${this.relativeFile(evt.testFile)}`});
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
case 'worker-stderr':
|
||||
case 'worker-stdout':
|
||||
this.stdStream.write(evt.chunk);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports = TapReporter;
|
||||
463
node_modules/ava/lib/reporters/verbose.js
generated
vendored
Normal file
463
node_modules/ava/lib/reporters/verbose.js
generated
vendored
Normal file
|
|
@ -0,0 +1,463 @@
|
|||
'use strict';
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const stream = require('stream');
|
||||
|
||||
const figures = require('figures');
|
||||
const indentString = require('indent-string');
|
||||
const plur = require('plur');
|
||||
const prettyMs = require('pretty-ms');
|
||||
const trimOffNewlines = require('trim-off-newlines');
|
||||
const beautifyStack = require('./beautify-stack');
|
||||
|
||||
const chalk = require('../chalk').get();
|
||||
const codeExcerpt = require('../code-excerpt');
|
||||
const colors = require('./colors');
|
||||
const formatSerializedError = require('./format-serialized-error');
|
||||
const improperUsageMessages = require('./improper-usage-messages');
|
||||
const prefixTitle = require('./prefix-title');
|
||||
const whileCorked = require('./while-corked');
|
||||
|
||||
const nodeInternals = require('stack-utils').nodeInternals();
|
||||
|
||||
class LineWriter extends stream.Writable {
|
||||
constructor(dest) {
|
||||
super();
|
||||
|
||||
this.dest = dest;
|
||||
this.columns = dest.columns || 80;
|
||||
this.lastLineIsEmpty = false;
|
||||
}
|
||||
|
||||
_write(chunk, encoding, callback) {
|
||||
this.dest.write(chunk);
|
||||
callback();
|
||||
}
|
||||
|
||||
writeLine(string) {
|
||||
if (string) {
|
||||
this.write(indentString(string, 2) + os.EOL);
|
||||
this.lastLineIsEmpty = false;
|
||||
} else {
|
||||
this.write(os.EOL);
|
||||
this.lastLineIsEmpty = true;
|
||||
}
|
||||
}
|
||||
|
||||
ensureEmptyLine() {
|
||||
if (!this.lastLineIsEmpty) {
|
||||
this.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class VerboseReporter {
|
||||
constructor(options) {
|
||||
this.durationThreshold = options.durationThreshold || 100;
|
||||
this.reportStream = options.reportStream;
|
||||
this.stdStream = options.stdStream;
|
||||
this.watching = options.watching;
|
||||
|
||||
this.lineWriter = new LineWriter(this.reportStream);
|
||||
this.consumeStateChange = whileCorked(this.reportStream, this.consumeStateChange);
|
||||
this.endRun = whileCorked(this.reportStream, this.endRun);
|
||||
this.relativeFile = file => path.relative(options.projectDir, file);
|
||||
|
||||
this.reset();
|
||||
}
|
||||
|
||||
reset() {
|
||||
if (this.removePreviousListener) {
|
||||
this.removePreviousListener();
|
||||
}
|
||||
|
||||
this.failFastEnabled = false;
|
||||
this.failures = [];
|
||||
this.filesWithMissingAvaImports = new Set();
|
||||
this.knownFailures = [];
|
||||
this.runningTestFiles = new Map();
|
||||
this.lastLineIsEmpty = false;
|
||||
this.matching = false;
|
||||
this.prefixTitle = (testFile, title) => title;
|
||||
this.previousFailures = 0;
|
||||
this.removePreviousListener = null;
|
||||
this.stats = null;
|
||||
}
|
||||
|
||||
startRun(plan) {
|
||||
if (plan.bailWithoutReporting) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.reset();
|
||||
|
||||
this.failFastEnabled = plan.failFastEnabled;
|
||||
this.matching = plan.matching;
|
||||
this.previousFailures = plan.previousFailures;
|
||||
this.emptyParallelRun = plan.status.emptyParallelRun;
|
||||
|
||||
if (this.watching || plan.files.length > 1) {
|
||||
this.prefixTitle = (testFile, title) => prefixTitle(plan.filePathPrefix, testFile, title);
|
||||
}
|
||||
|
||||
this.removePreviousListener = plan.status.on('stateChange', evt => this.consumeStateChange(evt));
|
||||
|
||||
if (this.watching && plan.runVector > 1) {
|
||||
this.lineWriter.write(chalk.gray.dim('\u2500'.repeat(this.reportStream.columns || 80)) + os.EOL);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
consumeStateChange(evt) { // eslint-disable-line complexity
|
||||
const fileStats = this.stats && evt.testFile ? this.stats.byFile.get(evt.testFile) : null;
|
||||
|
||||
switch (evt.type) {
|
||||
case 'hook-failed':
|
||||
this.failures.push(evt);
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'internal-error':
|
||||
if (evt.testFile) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error when running ${this.relativeFile(evt.testFile)}`));
|
||||
} else {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error`));
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(colors.stack(evt.err.summary));
|
||||
this.lineWriter.writeLine(colors.errorStack(evt.err.stack));
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
break;
|
||||
case 'line-number-selection-error':
|
||||
this.lineWriter.writeLine(colors.information(`${figures.warning} Could not parse ${this.relativeFile(evt.testFile)} for line number selection`));
|
||||
break;
|
||||
case 'missing-ava-import':
|
||||
this.filesWithMissingAvaImports.add(evt.testFile);
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(evt.testFile)}, make sure to import "ava" at the top of your test file`));
|
||||
break;
|
||||
case 'hook-finished':
|
||||
if (evt.logs.length > 0) {
|
||||
this.lineWriter.writeLine(` ${this.prefixTitle(evt.testFile, evt.title)}`);
|
||||
this.writeLogs(evt);
|
||||
}
|
||||
|
||||
break;
|
||||
case 'selected-test':
|
||||
if (evt.skip) {
|
||||
this.lineWriter.writeLine(colors.skip(`- ${this.prefixTitle(evt.testFile, evt.title)}`));
|
||||
} else if (evt.todo) {
|
||||
this.lineWriter.writeLine(colors.todo(`- ${this.prefixTitle(evt.testFile, evt.title)}`));
|
||||
}
|
||||
|
||||
break;
|
||||
case 'stats':
|
||||
this.stats = evt.stats;
|
||||
break;
|
||||
case 'test-failed':
|
||||
this.failures.push(evt);
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'test-passed':
|
||||
if (evt.knownFailing) {
|
||||
this.knownFailures.push(evt);
|
||||
}
|
||||
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'timeout':
|
||||
this.lineWriter.writeLine(colors.error(`\n${figures.cross} Timed out while running tests`));
|
||||
this.lineWriter.writeLine('');
|
||||
this.writePendingTests(evt);
|
||||
break;
|
||||
case 'interrupt':
|
||||
this.lineWriter.writeLine(colors.error(`\n${figures.cross} Exiting due to SIGINT`));
|
||||
this.lineWriter.writeLine('');
|
||||
this.writePendingTests(evt);
|
||||
break;
|
||||
case 'uncaught-exception':
|
||||
this.lineWriter.ensureEmptyLine();
|
||||
this.lineWriter.writeLine(colors.title(`Uncaught exception in ${this.relativeFile(evt.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
this.lineWriter.writeLine();
|
||||
break;
|
||||
case 'unhandled-rejection':
|
||||
this.lineWriter.ensureEmptyLine();
|
||||
this.lineWriter.writeLine(colors.title(`Unhandled rejection in ${this.relativeFile(evt.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
this.lineWriter.writeLine();
|
||||
break;
|
||||
case 'worker-failed':
|
||||
if (!this.filesWithMissingAvaImports.has(evt.testFile)) {
|
||||
if (evt.nonZeroExitCode) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} ${this.relativeFile(evt.testFile)} exited with a non-zero exit code: ${evt.nonZeroExitCode}`));
|
||||
} else {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} ${this.relativeFile(evt.testFile)} exited due to ${evt.signal}`));
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
case 'worker-finished':
|
||||
if (!evt.forcedExit && !this.filesWithMissingAvaImports.has(evt.testFile)) {
|
||||
if (fileStats.declaredTests === 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(evt.testFile)}`));
|
||||
} else if (fileStats.selectingLines && fileStats.selectedTests === 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Line numbers for ${this.relativeFile(evt.testFile)} did not match any tests`));
|
||||
} else if (!this.failFastEnabled && fileStats.remainingTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} ${fileStats.remainingTests} ${plur('test', fileStats.remainingTests)} remaining in ${this.relativeFile(evt.testFile)}`));
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
case 'worker-stderr':
|
||||
case 'worker-stdout':
|
||||
this.stdStream.write(evt.chunk);
|
||||
// If the chunk does not end with a linebreak, *forcibly* write one to
|
||||
// ensure it remains visible in the TTY.
|
||||
// Tests cannot assume their standard output is not interrupted. Indeed
|
||||
// we multiplex stdout and stderr into a single stream. However as
|
||||
// long as stdStream is different from reportStream users can read
|
||||
// their original output by redirecting the streams.
|
||||
if (evt.chunk[evt.chunk.length - 1] !== 0x0A) {
|
||||
this.reportStream.write(os.EOL);
|
||||
}
|
||||
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
writeErr(evt) {
|
||||
if (evt.err.name === 'TSError' && evt.err.object && evt.err.object.diagnosticText) {
|
||||
this.lineWriter.writeLine(colors.errorStack(trimOffNewlines(evt.err.object.diagnosticText)));
|
||||
return;
|
||||
}
|
||||
|
||||
if (evt.err.source) {
|
||||
this.lineWriter.writeLine(colors.errorSource(`${this.relativeFile(evt.err.source.file)}:${evt.err.source.line}`));
|
||||
const excerpt = codeExcerpt(evt.err.source, {maxWidth: this.reportStream.columns - 2});
|
||||
if (excerpt) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(excerpt);
|
||||
}
|
||||
}
|
||||
|
||||
if (evt.err.avaAssertionError) {
|
||||
const result = formatSerializedError(evt.err);
|
||||
if (result.printMessage) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(evt.err.message);
|
||||
}
|
||||
|
||||
if (result.formatted) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(result.formatted);
|
||||
}
|
||||
|
||||
const message = improperUsageMessages.forError(evt.err);
|
||||
if (message) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(message);
|
||||
}
|
||||
} else if (evt.err.nonErrorObject) {
|
||||
this.lineWriter.writeLine(trimOffNewlines(evt.err.formatted));
|
||||
} else {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(evt.err.summary);
|
||||
}
|
||||
|
||||
const formatted = this.formatErrorStack(evt.err);
|
||||
if (formatted.length > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(formatted.join('\n'));
|
||||
}
|
||||
}
|
||||
|
||||
formatErrorStack(error) {
|
||||
if (!error.stack) {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (error.shouldBeautifyStack) {
|
||||
return beautifyStack(error.stack).map(line => {
|
||||
if (nodeInternals.some(internal => internal.test(line))) {
|
||||
return colors.errorStackInternal(`${figures.pointerSmall} ${line}`);
|
||||
}
|
||||
|
||||
return colors.errorStack(`${figures.pointerSmall} ${line}`);
|
||||
});
|
||||
}
|
||||
|
||||
return [error.stack];
|
||||
}
|
||||
|
||||
writePendingTests(evt) {
|
||||
for (const [file, testsInFile] of evt.pendingTests) {
|
||||
if (testsInFile.size === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(`${testsInFile.size} tests were pending in ${this.relativeFile(file)}\n`);
|
||||
for (const title of testsInFile) {
|
||||
this.lineWriter.writeLine(`${figures.circleDotted} ${this.prefixTitle(file, title)}`);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine('');
|
||||
}
|
||||
}
|
||||
|
||||
writeLogs(evt) {
|
||||
if (evt.logs) {
|
||||
for (const log of evt.logs) {
|
||||
const logLines = indentString(colors.log(log), 4);
|
||||
const logLinesWithLeadingFigure = logLines.replace(
|
||||
/^ {4}/,
|
||||
` ${colors.information(figures.info)} `
|
||||
);
|
||||
this.lineWriter.writeLine(logLinesWithLeadingFigure);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writeTestSummary(evt) {
|
||||
if (evt.type === 'hook-failed' || evt.type === 'test-failed') {
|
||||
this.lineWriter.writeLine(`${colors.error(figures.cross)} ${this.prefixTitle(evt.testFile, evt.title)} ${colors.error(evt.err.message)}`);
|
||||
} else if (evt.knownFailing) {
|
||||
this.lineWriter.writeLine(`${colors.error(figures.tick)} ${colors.error(this.prefixTitle(evt.testFile, evt.title))}`);
|
||||
} else {
|
||||
const duration = evt.duration > this.durationThreshold ? colors.duration(' (' + prettyMs(evt.duration) + ')') : '';
|
||||
|
||||
this.lineWriter.writeLine(`${colors.pass(figures.tick)} ${this.prefixTitle(evt.testFile, evt.title)}${duration}`);
|
||||
}
|
||||
|
||||
this.writeLogs(evt);
|
||||
}
|
||||
|
||||
writeFailure(evt) {
|
||||
this.lineWriter.writeLine(`${colors.title(this.prefixTitle(evt.testFile, evt.title))}`);
|
||||
this.writeLogs(evt);
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
}
|
||||
|
||||
endRun() { // eslint-disable-line complexity
|
||||
if (this.emptyParallelRun) {
|
||||
this.lineWriter.writeLine('No files tested in this parallel run');
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.stats) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldn’t find any files to test`));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.matching && this.stats.selectedTests === 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldn’t find any matching tests`));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine();
|
||||
|
||||
if (this.stats.parallelRuns) {
|
||||
const {currentFileCount, currentIndex, totalRuns} = this.stats.parallelRuns;
|
||||
this.lineWriter.writeLine(colors.information(`Ran ${currentFileCount} test ${plur('file', currentFileCount)} out of ${this.stats.files} for job ${currentIndex + 1} of ${totalRuns}`));
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
let firstLinePostfix = this.watching ?
|
||||
' ' + chalk.gray.dim('[' + new Date().toLocaleTimeString('en-US', {hour12: false}) + ']') :
|
||||
'';
|
||||
|
||||
if (this.stats.failedHooks > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.failedHooks} ${plur('hook', this.stats.failedHooks)} failed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.failedTests} ${plur('test', this.stats.failedTests)} failed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedHooks === 0 && this.stats.failedTests === 0 && this.stats.passedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.pass(`${this.stats.passedTests} ${plur('test', this.stats.passedTests)} passed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.passedKnownFailingTests} ${plur('known failure', this.stats.passedKnownFailingTests)}`));
|
||||
}
|
||||
|
||||
if (this.stats.skippedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.skip(`${this.stats.skippedTests} ${plur('test', this.stats.skippedTests)} skipped`));
|
||||
}
|
||||
|
||||
if (this.stats.todoTests > 0) {
|
||||
this.lineWriter.writeLine(colors.todo(`${this.stats.todoTests} ${plur('test', this.stats.todoTests)} todo`));
|
||||
}
|
||||
|
||||
if (this.stats.unhandledRejections > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.unhandledRejections} unhandled ${plur('rejection', this.stats.unhandledRejections)}`));
|
||||
}
|
||||
|
||||
if (this.stats.uncaughtExceptions > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.uncaughtExceptions} uncaught ${plur('exception', this.stats.uncaughtExceptions)}`));
|
||||
}
|
||||
|
||||
if (this.previousFailures > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.previousFailures} previous ${plur('failure', this.previousFailures)} in test files that were not rerun`));
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
for (const evt of this.knownFailures) {
|
||||
this.lineWriter.writeLine(colors.error(this.prefixTitle(evt.testFile, evt.title)));
|
||||
}
|
||||
}
|
||||
|
||||
const shouldWriteFailFastDisclaimer = this.failFastEnabled && (this.stats.remainingTests > 0 || this.stats.files > this.stats.finishedWorkers);
|
||||
|
||||
if (this.failures.length > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
|
||||
const lastFailure = this.failures[this.failures.length - 1];
|
||||
for (const evt of this.failures) {
|
||||
this.writeFailure(evt);
|
||||
if (evt !== lastFailure || shouldWriteFailFastDisclaimer) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldWriteFailFastDisclaimer) {
|
||||
let remaining = '';
|
||||
if (this.stats.remainingTests > 0) {
|
||||
remaining += `At least ${this.stats.remainingTests} ${plur('test was', 'tests were', this.stats.remainingTests)} skipped`;
|
||||
if (this.stats.files > this.stats.finishedWorkers) {
|
||||
remaining += ', as well as ';
|
||||
}
|
||||
}
|
||||
|
||||
if (this.stats.files > this.stats.finishedWorkers) {
|
||||
const skippedFileCount = this.stats.files - this.stats.finishedWorkers;
|
||||
remaining += `${skippedFileCount} ${plur('test file', 'test files', skippedFileCount)}`;
|
||||
if (this.stats.remainingTests === 0) {
|
||||
remaining += ` ${plur('was', 'were', skippedFileCount)} skipped`;
|
||||
}
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(colors.information(`\`--fail-fast\` is on. ${remaining}.`));
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = VerboseReporter;
|
||||
13
node_modules/ava/lib/reporters/while-corked.js
generated
vendored
Normal file
13
node_modules/ava/lib/reporters/while-corked.js
generated
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
'use strict';
|
||||
function whileCorked(stream, fn) {
|
||||
return function (...args) {
|
||||
stream.cork();
|
||||
try {
|
||||
fn.apply(this, args);
|
||||
} finally {
|
||||
stream.uncork();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = whileCorked;
|
||||
194
node_modules/ava/lib/run-status.js
generated
vendored
Normal file
194
node_modules/ava/lib/run-status.js
generated
vendored
Normal file
|
|
@ -0,0 +1,194 @@
|
|||
'use strict';
|
||||
const Emittery = require('emittery');
|
||||
const cloneDeep = require('lodash/cloneDeep');
|
||||
|
||||
class RunStatus extends Emittery {
|
||||
constructor(files, parallelRuns) {
|
||||
super();
|
||||
|
||||
this.pendingTests = new Map();
|
||||
|
||||
this.emptyParallelRun = parallelRuns &&
|
||||
parallelRuns.currentFileCount === 0 &&
|
||||
parallelRuns.totalRuns > 1 &&
|
||||
files > 0;
|
||||
|
||||
this.stats = {
|
||||
byFile: new Map(),
|
||||
declaredTests: 0,
|
||||
failedHooks: 0,
|
||||
failedTests: 0,
|
||||
failedWorkers: 0,
|
||||
files,
|
||||
parallelRuns,
|
||||
finishedWorkers: 0,
|
||||
internalErrors: 0,
|
||||
remainingTests: 0,
|
||||
passedKnownFailingTests: 0,
|
||||
passedTests: 0,
|
||||
selectedTests: 0,
|
||||
skippedTests: 0,
|
||||
timeouts: 0,
|
||||
todoTests: 0,
|
||||
uncaughtExceptions: 0,
|
||||
unhandledRejections: 0
|
||||
};
|
||||
}
|
||||
|
||||
observeWorker(worker, testFile, stats) {
|
||||
this.stats.byFile.set(testFile, {
|
||||
declaredTests: 0,
|
||||
failedHooks: 0,
|
||||
failedTests: 0,
|
||||
internalErrors: 0,
|
||||
remainingTests: 0,
|
||||
passedKnownFailingTests: 0,
|
||||
passedTests: 0,
|
||||
selectedTests: 0,
|
||||
selectingLines: false,
|
||||
skippedTests: 0,
|
||||
todoTests: 0,
|
||||
uncaughtExceptions: 0,
|
||||
unhandledRejections: 0,
|
||||
...stats
|
||||
});
|
||||
|
||||
this.pendingTests.set(testFile, new Set());
|
||||
worker.onStateChange(data => this.emitStateChange(data));
|
||||
}
|
||||
|
||||
emitStateChange(event) {
|
||||
const {stats} = this;
|
||||
const fileStats = stats.byFile.get(event.testFile);
|
||||
|
||||
let changedStats = true;
|
||||
switch (event.type) {
|
||||
case 'declared-test':
|
||||
stats.declaredTests++;
|
||||
fileStats.declaredTests++;
|
||||
break;
|
||||
case 'hook-failed':
|
||||
stats.failedHooks++;
|
||||
fileStats.failedHooks++;
|
||||
break;
|
||||
case 'internal-error':
|
||||
stats.internalErrors++;
|
||||
if (event.testFile) {
|
||||
fileStats.internalErrors++;
|
||||
}
|
||||
|
||||
break;
|
||||
case 'selected-test':
|
||||
stats.selectedTests++;
|
||||
fileStats.selectedTests++;
|
||||
if (event.skip) {
|
||||
stats.skippedTests++;
|
||||
fileStats.skippedTests++;
|
||||
} else if (event.todo) {
|
||||
stats.todoTests++;
|
||||
fileStats.todoTests++;
|
||||
} else {
|
||||
stats.remainingTests++;
|
||||
fileStats.remainingTests++;
|
||||
this.addPendingTest(event);
|
||||
}
|
||||
|
||||
break;
|
||||
case 'test-failed':
|
||||
stats.failedTests++;
|
||||
fileStats.failedTests++;
|
||||
stats.remainingTests--;
|
||||
fileStats.remainingTests--;
|
||||
this.removePendingTest(event);
|
||||
break;
|
||||
case 'test-passed':
|
||||
if (event.knownFailing) {
|
||||
stats.passedKnownFailingTests++;
|
||||
fileStats.passedKnownFailingTests++;
|
||||
} else {
|
||||
stats.passedTests++;
|
||||
fileStats.passedTests++;
|
||||
}
|
||||
|
||||
stats.remainingTests--;
|
||||
fileStats.remainingTests--;
|
||||
this.removePendingTest(event);
|
||||
break;
|
||||
case 'timeout':
|
||||
event.pendingTests = this.pendingTests;
|
||||
this.pendingTests = new Map();
|
||||
stats.timeouts++;
|
||||
break;
|
||||
case 'interrupt':
|
||||
event.pendingTests = this.pendingTests;
|
||||
this.pendingTests = new Map();
|
||||
break;
|
||||
case 'uncaught-exception':
|
||||
stats.uncaughtExceptions++;
|
||||
fileStats.uncaughtExceptions++;
|
||||
break;
|
||||
case 'unhandled-rejection':
|
||||
stats.unhandledRejections++;
|
||||
fileStats.unhandledRejections++;
|
||||
break;
|
||||
case 'worker-failed':
|
||||
stats.failedWorkers++;
|
||||
break;
|
||||
case 'worker-finished':
|
||||
stats.finishedWorkers++;
|
||||
break;
|
||||
default:
|
||||
changedStats = false;
|
||||
break;
|
||||
}
|
||||
|
||||
if (changedStats) {
|
||||
this.emit('stateChange', {type: 'stats', stats: cloneDeep(stats)});
|
||||
}
|
||||
|
||||
this.emit('stateChange', event);
|
||||
}
|
||||
|
||||
suggestExitCode(circumstances) {
|
||||
if (this.emptyParallelRun) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (circumstances.matching && this.stats.selectedTests === 0) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (
|
||||
this.stats.declaredTests === 0 ||
|
||||
this.stats.internalErrors > 0 ||
|
||||
this.stats.failedHooks > 0 ||
|
||||
this.stats.failedTests > 0 ||
|
||||
this.stats.failedWorkers > 0 ||
|
||||
this.stats.timeouts > 0 ||
|
||||
this.stats.uncaughtExceptions > 0 ||
|
||||
this.stats.unhandledRejections > 0
|
||||
) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if ([...this.stats.byFile.values()].some(stats => stats.selectingLines && stats.selectedTests === 0)) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
addPendingTest(event) {
|
||||
if (this.pendingTests.has(event.testFile)) {
|
||||
this.pendingTests.get(event.testFile).add(event.title);
|
||||
}
|
||||
}
|
||||
|
||||
removePendingTest(event) {
|
||||
if (this.pendingTests.has(event.testFile)) {
|
||||
this.pendingTests.get(event.testFile).delete(event.title);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RunStatus;
|
||||
514
node_modules/ava/lib/runner.js
generated
vendored
Normal file
514
node_modules/ava/lib/runner.js
generated
vendored
Normal file
|
|
@ -0,0 +1,514 @@
|
|||
'use strict';
|
||||
const Emittery = require('emittery');
|
||||
const matcher = require('matcher');
|
||||
const ContextRef = require('./context-ref');
|
||||
const createChain = require('./create-chain');
|
||||
const parseTestArgs = require('./parse-test-args');
|
||||
const snapshotManager = require('./snapshot-manager');
|
||||
const serializeError = require('./serialize-error');
|
||||
const Runnable = require('./test');
|
||||
|
||||
class Runner extends Emittery {
|
||||
constructor(options = {}) {
|
||||
super();
|
||||
|
||||
this.experiments = options.experiments || {};
|
||||
this.failFast = options.failFast === true;
|
||||
this.failWithoutAssertions = options.failWithoutAssertions !== false;
|
||||
this.file = options.file;
|
||||
this.checkSelectedByLineNumbers = options.checkSelectedByLineNumbers;
|
||||
this.match = options.match || [];
|
||||
this.powerAssert = undefined; // Assigned later.
|
||||
this.projectDir = options.projectDir;
|
||||
this.recordNewSnapshots = options.recordNewSnapshots === true;
|
||||
this.runOnlyExclusive = options.runOnlyExclusive === true;
|
||||
this.serial = options.serial === true;
|
||||
this.snapshotDir = options.snapshotDir;
|
||||
this.updateSnapshots = options.updateSnapshots;
|
||||
|
||||
this.activeRunnables = new Set();
|
||||
this.boundCompareTestSnapshot = this.compareTestSnapshot.bind(this);
|
||||
this.interrupted = false;
|
||||
this.snapshots = null;
|
||||
this.tasks = {
|
||||
after: [],
|
||||
afterAlways: [],
|
||||
afterEach: [],
|
||||
afterEachAlways: [],
|
||||
before: [],
|
||||
beforeEach: [],
|
||||
concurrent: [],
|
||||
serial: [],
|
||||
todo: []
|
||||
};
|
||||
|
||||
const uniqueTestTitles = new Set();
|
||||
this.registerUniqueTitle = title => {
|
||||
if (uniqueTestTitles.has(title)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
uniqueTestTitles.add(title);
|
||||
return true;
|
||||
};
|
||||
|
||||
let hasStarted = false;
|
||||
let scheduledStart = false;
|
||||
const meta = Object.freeze({
|
||||
file: options.file,
|
||||
get snapshotDirectory() {
|
||||
const {file, snapshotDir: fixedLocation, projectDir} = options;
|
||||
return snapshotManager.determineSnapshotDir({file, fixedLocation, projectDir});
|
||||
}
|
||||
});
|
||||
this.chain = createChain((metadata, testArgs) => { // eslint-disable-line complexity
|
||||
if (hasStarted) {
|
||||
throw new Error('All tests and hooks must be declared synchronously in your test file, and cannot be nested within other tests or hooks.');
|
||||
}
|
||||
|
||||
if (!scheduledStart) {
|
||||
scheduledStart = true;
|
||||
process.nextTick(() => {
|
||||
hasStarted = true;
|
||||
this.start();
|
||||
});
|
||||
}
|
||||
|
||||
const {args, buildTitle, implementations, rawTitle} = parseTestArgs(testArgs);
|
||||
|
||||
if (this.checkSelectedByLineNumbers) {
|
||||
metadata.selected = this.checkSelectedByLineNumbers();
|
||||
}
|
||||
|
||||
if (metadata.todo) {
|
||||
if (implementations.length > 0) {
|
||||
throw new TypeError('`todo` tests are not allowed to have an implementation. Use `test.skip()` for tests with an implementation.');
|
||||
}
|
||||
|
||||
if (!rawTitle) { // Either undefined or a string.
|
||||
throw new TypeError('`todo` tests require a title');
|
||||
}
|
||||
|
||||
if (!this.registerUniqueTitle(rawTitle)) {
|
||||
throw new Error(`Duplicate test title: ${rawTitle}`);
|
||||
}
|
||||
|
||||
if (this.match.length > 0) {
|
||||
// --match selects TODO tests.
|
||||
if (matcher([rawTitle], this.match).length === 1) {
|
||||
metadata.exclusive = true;
|
||||
this.runOnlyExclusive = true;
|
||||
}
|
||||
}
|
||||
|
||||
this.tasks.todo.push({title: rawTitle, metadata});
|
||||
this.emit('stateChange', {
|
||||
type: 'declared-test',
|
||||
title: rawTitle,
|
||||
knownFailing: false,
|
||||
todo: true
|
||||
});
|
||||
} else {
|
||||
if (implementations.length === 0) {
|
||||
throw new TypeError('Expected an implementation. Use `test.todo()` for tests without an implementation.');
|
||||
}
|
||||
|
||||
for (const implementation of implementations) {
|
||||
let {title, isSet, isValid, isEmpty} = buildTitle(implementation);
|
||||
|
||||
if (isSet && !isValid) {
|
||||
throw new TypeError('Test & hook titles must be strings');
|
||||
}
|
||||
|
||||
if (isEmpty) {
|
||||
if (metadata.type === 'test') {
|
||||
throw new TypeError('Tests must have a title');
|
||||
} else if (metadata.always) {
|
||||
title = `${metadata.type}.always hook`;
|
||||
} else {
|
||||
title = `${metadata.type} hook`;
|
||||
}
|
||||
}
|
||||
|
||||
if (metadata.type === 'test' && !this.registerUniqueTitle(title)) {
|
||||
throw new Error(`Duplicate test title: ${title}`);
|
||||
}
|
||||
|
||||
const task = {
|
||||
title,
|
||||
implementation,
|
||||
args,
|
||||
metadata: {...metadata}
|
||||
};
|
||||
|
||||
if (metadata.type === 'test') {
|
||||
if (this.match.length > 0) {
|
||||
// --match overrides .only()
|
||||
task.metadata.exclusive = matcher([title], this.match).length === 1;
|
||||
}
|
||||
|
||||
if (task.metadata.exclusive) {
|
||||
this.runOnlyExclusive = true;
|
||||
}
|
||||
|
||||
this.tasks[metadata.serial ? 'serial' : 'concurrent'].push(task);
|
||||
this.emit('stateChange', {
|
||||
type: 'declared-test',
|
||||
title,
|
||||
knownFailing: metadata.failing,
|
||||
todo: false
|
||||
});
|
||||
} else if (!metadata.skipped) {
|
||||
this.tasks[metadata.type + (metadata.always ? 'Always' : '')].push(task);
|
||||
}
|
||||
}
|
||||
}
|
||||
}, {
|
||||
serial: false,
|
||||
exclusive: false,
|
||||
skipped: false,
|
||||
todo: false,
|
||||
failing: false,
|
||||
callback: false,
|
||||
inline: false, // Set for attempt metadata created by `t.try()`
|
||||
always: false
|
||||
}, meta);
|
||||
}
|
||||
|
||||
compareTestSnapshot(options) {
|
||||
if (!this.snapshots) {
|
||||
this.snapshots = snapshotManager.load({
|
||||
file: this.file,
|
||||
fixedLocation: this.snapshotDir,
|
||||
projectDir: this.projectDir,
|
||||
recordNewSnapshots: this.recordNewSnapshots,
|
||||
updating: this.updateSnapshots
|
||||
});
|
||||
this.emit('dependency', this.snapshots.snapPath);
|
||||
}
|
||||
|
||||
return this.snapshots.compare(options);
|
||||
}
|
||||
|
||||
saveSnapshotState() {
|
||||
if (this.snapshots) {
|
||||
return this.snapshots.save();
|
||||
}
|
||||
|
||||
if (this.updateSnapshots) {
|
||||
// TODO: There may be unused snapshot files if no test caused the
|
||||
// snapshots to be loaded. Prune them. But not if tests (including hooks!)
|
||||
// were skipped. Perhaps emit a warning if this occurs?
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
onRun(runnable) {
|
||||
this.activeRunnables.add(runnable);
|
||||
}
|
||||
|
||||
onRunComplete(runnable) {
|
||||
this.activeRunnables.delete(runnable);
|
||||
}
|
||||
|
||||
attributeLeakedError(err) {
|
||||
for (const runnable of this.activeRunnables) {
|
||||
if (runnable.attributeLeakedError(err)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
beforeExitHandler() {
|
||||
for (const runnable of this.activeRunnables) {
|
||||
runnable.finishDueToInactivity();
|
||||
}
|
||||
}
|
||||
|
||||
async runMultiple(runnables) {
|
||||
let allPassed = true;
|
||||
const storedResults = [];
|
||||
const runAndStoreResult = async runnable => {
|
||||
const result = await this.runSingle(runnable);
|
||||
if (!result.passed) {
|
||||
allPassed = false;
|
||||
}
|
||||
|
||||
storedResults.push(result);
|
||||
};
|
||||
|
||||
let waitForSerial = Promise.resolve();
|
||||
await runnables.reduce((previous, runnable) => {
|
||||
if (runnable.metadata.serial || this.serial) {
|
||||
waitForSerial = previous.then(() => {
|
||||
// Serial runnables run as long as there was no previous failure, unless
|
||||
// the runnable should always be run.
|
||||
return (allPassed || runnable.metadata.always) && runAndStoreResult(runnable);
|
||||
});
|
||||
return waitForSerial;
|
||||
}
|
||||
|
||||
return Promise.all([
|
||||
previous,
|
||||
waitForSerial.then(() => {
|
||||
// Concurrent runnables are kicked off after the previous serial
|
||||
// runnables have completed, as long as there was no previous failure
|
||||
// (or if the runnable should always be run). One concurrent runnable's
|
||||
// failure does not prevent the next runnable from running.
|
||||
return (allPassed || runnable.metadata.always) && runAndStoreResult(runnable);
|
||||
})
|
||||
]);
|
||||
}, waitForSerial);
|
||||
|
||||
return {allPassed, storedResults};
|
||||
}
|
||||
|
||||
async runSingle(runnable) {
|
||||
this.onRun(runnable);
|
||||
const result = await runnable.run();
|
||||
// If run() throws or rejects then the entire test run crashes, so
|
||||
// onRunComplete() doesn't *have* to be inside a finally.
|
||||
this.onRunComplete(runnable);
|
||||
return result;
|
||||
}
|
||||
|
||||
async runHooks(tasks, contextRef, titleSuffix, testPassed) {
|
||||
const hooks = tasks.map(task => new Runnable({
|
||||
contextRef,
|
||||
experiments: this.experiments,
|
||||
failWithoutAssertions: false,
|
||||
fn: task.args.length === 0 ?
|
||||
task.implementation :
|
||||
t => task.implementation.apply(null, [t].concat(task.args)),
|
||||
compareTestSnapshot: this.boundCompareTestSnapshot,
|
||||
updateSnapshots: this.updateSnapshots,
|
||||
metadata: task.metadata,
|
||||
powerAssert: this.powerAssert,
|
||||
title: `${task.title}${titleSuffix || ''}`,
|
||||
isHook: true,
|
||||
testPassed
|
||||
}));
|
||||
const outcome = await this.runMultiple(hooks, this.serial);
|
||||
for (const result of outcome.storedResults) {
|
||||
if (result.passed) {
|
||||
this.emit('stateChange', {
|
||||
type: 'hook-finished',
|
||||
title: result.title,
|
||||
duration: result.duration,
|
||||
logs: result.logs
|
||||
});
|
||||
} else {
|
||||
this.emit('stateChange', {
|
||||
type: 'hook-failed',
|
||||
title: result.title,
|
||||
err: serializeError('Hook failure', true, result.error),
|
||||
duration: result.duration,
|
||||
logs: result.logs
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return outcome.allPassed;
|
||||
}
|
||||
|
||||
async runTest(task, contextRef) {
|
||||
const hookSuffix = ` for ${task.title}`;
|
||||
let hooksOk = await this.runHooks(this.tasks.beforeEach, contextRef, hookSuffix);
|
||||
|
||||
let testOk = false;
|
||||
if (hooksOk) {
|
||||
// Only run the test if all `beforeEach` hooks passed.
|
||||
const test = new Runnable({
|
||||
contextRef,
|
||||
experiments: this.experiments,
|
||||
failWithoutAssertions: this.failWithoutAssertions,
|
||||
fn: task.args.length === 0 ?
|
||||
task.implementation :
|
||||
t => task.implementation.apply(null, [t].concat(task.args)),
|
||||
compareTestSnapshot: this.boundCompareTestSnapshot,
|
||||
updateSnapshots: this.updateSnapshots,
|
||||
metadata: task.metadata,
|
||||
powerAssert: this.powerAssert,
|
||||
title: task.title,
|
||||
registerUniqueTitle: this.registerUniqueTitle
|
||||
});
|
||||
|
||||
const result = await this.runSingle(test);
|
||||
testOk = result.passed;
|
||||
|
||||
if (testOk) {
|
||||
this.emit('stateChange', {
|
||||
type: 'test-passed',
|
||||
title: result.title,
|
||||
duration: result.duration,
|
||||
knownFailing: result.metadata.failing,
|
||||
logs: result.logs
|
||||
});
|
||||
|
||||
hooksOk = await this.runHooks(this.tasks.afterEach, contextRef, hookSuffix, testOk);
|
||||
} else {
|
||||
this.emit('stateChange', {
|
||||
type: 'test-failed',
|
||||
title: result.title,
|
||||
err: serializeError('Test failure', true, result.error, this.file),
|
||||
duration: result.duration,
|
||||
knownFailing: result.metadata.failing,
|
||||
logs: result.logs
|
||||
});
|
||||
// Don't run `afterEach` hooks if the test failed.
|
||||
}
|
||||
}
|
||||
|
||||
const alwaysOk = await this.runHooks(this.tasks.afterEachAlways, contextRef, hookSuffix, testOk);
|
||||
return alwaysOk && hooksOk && testOk;
|
||||
}
|
||||
|
||||
async start() {
|
||||
const concurrentTests = [];
|
||||
const serialTests = [];
|
||||
for (const task of this.tasks.serial) {
|
||||
if (this.runOnlyExclusive && !task.metadata.exclusive) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (this.checkSelectedByLineNumbers && !task.metadata.selected) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.emit('stateChange', {
|
||||
type: 'selected-test',
|
||||
title: task.title,
|
||||
knownFailing: task.metadata.failing,
|
||||
skip: task.metadata.skipped,
|
||||
todo: false
|
||||
});
|
||||
|
||||
if (!task.metadata.skipped) {
|
||||
serialTests.push(task);
|
||||
}
|
||||
}
|
||||
|
||||
for (const task of this.tasks.concurrent) {
|
||||
if (this.runOnlyExclusive && !task.metadata.exclusive) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (this.checkSelectedByLineNumbers && !task.metadata.selected) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.emit('stateChange', {
|
||||
type: 'selected-test',
|
||||
title: task.title,
|
||||
knownFailing: task.metadata.failing,
|
||||
skip: task.metadata.skipped,
|
||||
todo: false
|
||||
});
|
||||
|
||||
if (!task.metadata.skipped) {
|
||||
if (this.serial) {
|
||||
serialTests.push(task);
|
||||
} else {
|
||||
concurrentTests.push(task);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const task of this.tasks.todo) {
|
||||
if (this.runOnlyExclusive && !task.metadata.exclusive) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (this.checkSelectedByLineNumbers && !task.metadata.selected) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.emit('stateChange', {
|
||||
type: 'selected-test',
|
||||
title: task.title,
|
||||
knownFailing: false,
|
||||
skip: false,
|
||||
todo: true
|
||||
});
|
||||
}
|
||||
|
||||
if (concurrentTests.length === 0 && serialTests.length === 0) {
|
||||
this.emit('finish');
|
||||
// Don't run any hooks if there are no tests to run.
|
||||
return;
|
||||
}
|
||||
|
||||
const contextRef = new ContextRef();
|
||||
|
||||
// Note that the hooks and tests always begin running asynchronously.
|
||||
const beforePromise = this.runHooks(this.tasks.before, contextRef);
|
||||
const serialPromise = beforePromise.then(beforeHooksOk => { // eslint-disable-line promise/prefer-await-to-then
|
||||
// Don't run tests if a `before` hook failed.
|
||||
if (!beforeHooksOk) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return serialTests.reduce(async (previous, task) => {
|
||||
const previousOk = await previous;
|
||||
// Don't start tests after an interrupt.
|
||||
if (this.interrupted) {
|
||||
return previousOk;
|
||||
}
|
||||
|
||||
// Prevent subsequent tests from running if `failFast` is enabled and
|
||||
// the previous test failed.
|
||||
if (!previousOk && this.failFast) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return this.runTest(task, contextRef.copy());
|
||||
}, true);
|
||||
});
|
||||
const concurrentPromise = Promise.all([beforePromise, serialPromise]).then(async ([beforeHooksOk, serialOk]) => { // eslint-disable-line promise/prefer-await-to-then
|
||||
// Don't run tests if a `before` hook failed, or if `failFast` is enabled
|
||||
// and a previous serial test failed.
|
||||
if (!beforeHooksOk || (!serialOk && this.failFast)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Don't start tests after an interrupt.
|
||||
if (this.interrupted) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// If a concurrent test fails, even if `failFast` is enabled it won't
|
||||
// stop other concurrent tests from running.
|
||||
const allOkays = await Promise.all(concurrentTests.map(task => {
|
||||
return this.runTest(task, contextRef.copy());
|
||||
}));
|
||||
return allOkays.every(ok => ok);
|
||||
});
|
||||
|
||||
const beforeExitHandler = this.beforeExitHandler.bind(this);
|
||||
process.on('beforeExit', beforeExitHandler);
|
||||
|
||||
try {
|
||||
const ok = await concurrentPromise;
|
||||
// Only run `after` hooks if all hooks and tests passed.
|
||||
if (ok) {
|
||||
await this.runHooks(this.tasks.after, contextRef);
|
||||
}
|
||||
|
||||
// Always run `after.always` hooks.
|
||||
await this.runHooks(this.tasks.afterAlways, contextRef);
|
||||
process.removeListener('beforeExit', beforeExitHandler);
|
||||
await this.emit('finish');
|
||||
} catch (error) {
|
||||
await this.emit('error', error);
|
||||
}
|
||||
}
|
||||
|
||||
interrupt() {
|
||||
this.interrupted = true;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Runner;
|
||||
173
node_modules/ava/lib/serialize-error.js
generated
vendored
Normal file
173
node_modules/ava/lib/serialize-error.js
generated
vendored
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
'use strict';
|
||||
const path = require('path');
|
||||
const cleanYamlObject = require('clean-yaml-object');
|
||||
const concordance = require('concordance');
|
||||
const isError = require('is-error');
|
||||
const slash = require('slash');
|
||||
const StackUtils = require('stack-utils');
|
||||
const assert = require('./assert');
|
||||
const concordanceOptions = require('./concordance-options').default;
|
||||
|
||||
function isAvaAssertionError(source) {
|
||||
return source instanceof assert.AssertionError;
|
||||
}
|
||||
|
||||
function filter(propertyName, isRoot) {
|
||||
return !isRoot || (propertyName !== 'message' && propertyName !== 'name' && propertyName !== 'stack');
|
||||
}
|
||||
|
||||
const stackUtils = new StackUtils();
|
||||
function extractSource(stack, testFile) {
|
||||
if (!stack || !testFile) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Normalize the test file so it matches `callSite.file`.
|
||||
const relFile = path.relative(process.cwd(), testFile);
|
||||
const normalizedFile = process.platform === 'win32' ? slash(relFile) : relFile;
|
||||
for (const line of stack.split('\n')) {
|
||||
try {
|
||||
const callSite = stackUtils.parseLine(line);
|
||||
if (callSite.file === normalizedFile) {
|
||||
return {
|
||||
isDependency: false,
|
||||
isWithinProject: true,
|
||||
file: path.resolve(process.cwd(), callSite.file),
|
||||
line: callSite.line
|
||||
};
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function buildSource(source) {
|
||||
if (!source) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Assume the CWD is the project directory. This holds since this function
|
||||
// is only called in test workers, which are created with their working
|
||||
// directory set to the project directory.
|
||||
const projectDir = process.cwd();
|
||||
|
||||
const file = path.resolve(projectDir, source.file.trim());
|
||||
const rel = path.relative(projectDir, file);
|
||||
|
||||
const [segment] = rel.split(path.sep);
|
||||
const isWithinProject = segment !== '..' && (process.platform !== 'win32' || !segment.includes(':'));
|
||||
const isDependency = isWithinProject && path.dirname(rel).split(path.sep).includes('node_modules');
|
||||
|
||||
return {
|
||||
isDependency,
|
||||
isWithinProject,
|
||||
file,
|
||||
line: source.line
|
||||
};
|
||||
}
|
||||
|
||||
function trySerializeError(err, shouldBeautifyStack, testFile) {
|
||||
const stack = err.savedError ? err.savedError.stack : err.stack;
|
||||
|
||||
const retval = {
|
||||
avaAssertionError: isAvaAssertionError(err),
|
||||
nonErrorObject: false,
|
||||
source: extractSource(stack, testFile),
|
||||
stack,
|
||||
shouldBeautifyStack
|
||||
};
|
||||
|
||||
if (err.actualStack) {
|
||||
retval.stack = err.actualStack;
|
||||
}
|
||||
|
||||
if (retval.avaAssertionError) {
|
||||
retval.improperUsage = err.improperUsage;
|
||||
retval.message = err.message;
|
||||
retval.name = err.name;
|
||||
retval.statements = err.statements;
|
||||
retval.values = err.values;
|
||||
|
||||
if (err.fixedSource) {
|
||||
const source = buildSource(err.fixedSource);
|
||||
if (source) {
|
||||
retval.source = source;
|
||||
}
|
||||
}
|
||||
|
||||
if (err.assertion) {
|
||||
retval.assertion = err.assertion;
|
||||
}
|
||||
|
||||
if (err.operator) {
|
||||
retval.operator = err.operator;
|
||||
}
|
||||
} else {
|
||||
retval.object = cleanYamlObject(err, filter); // Cleanly copy non-standard properties
|
||||
if (typeof err.message === 'string') {
|
||||
retval.message = err.message;
|
||||
}
|
||||
|
||||
if (typeof err.name === 'string') {
|
||||
retval.name = err.name;
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof err.stack === 'string') {
|
||||
const lines = err.stack.split('\n');
|
||||
if (err.name === 'SyntaxError' && !lines[0].startsWith('SyntaxError')) {
|
||||
retval.summary = '';
|
||||
for (const line of lines) {
|
||||
retval.summary += line + '\n';
|
||||
if (line.startsWith('SyntaxError')) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
retval.summary = retval.summary.trim();
|
||||
} else {
|
||||
// Skip the source line header inserted by `esm`:
|
||||
// <https://github.com/standard-things/esm/wiki/improved-errors>
|
||||
const start = lines.findIndex(line => !/:\d+$/.test(line));
|
||||
retval.summary = '';
|
||||
for (let index = start; index < lines.length; index++) {
|
||||
if (lines[index].startsWith(' at')) {
|
||||
break;
|
||||
}
|
||||
|
||||
const next = index + 1;
|
||||
const end = next === lines.length || lines[next].startsWith(' at');
|
||||
retval.summary += end ? lines[index] : lines[index] + '\n';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return retval;
|
||||
}
|
||||
|
||||
function serializeError(origin, shouldBeautifyStack, err, testFile) {
|
||||
if (!isError(err)) {
|
||||
return {
|
||||
avaAssertionError: false,
|
||||
nonErrorObject: true,
|
||||
formatted: concordance.formatDescriptor(concordance.describe(err, concordanceOptions), concordanceOptions)
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
return trySerializeError(err, shouldBeautifyStack, testFile);
|
||||
} catch {
|
||||
const replacement = new Error(`${origin}: Could not serialize error`);
|
||||
return {
|
||||
avaAssertionError: false,
|
||||
nonErrorObject: false,
|
||||
name: replacement.name,
|
||||
message: replacement.message,
|
||||
stack: replacement.stack,
|
||||
summary: replacement.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = serializeError;
|
||||
463
node_modules/ava/lib/snapshot-manager.js
generated
vendored
Normal file
463
node_modules/ava/lib/snapshot-manager.js
generated
vendored
Normal file
|
|
@ -0,0 +1,463 @@
|
|||
'use strict';
|
||||
|
||||
const crypto = require('crypto');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const zlib = require('zlib');
|
||||
|
||||
const concordance = require('concordance');
|
||||
const indentString = require('indent-string');
|
||||
const md5Hex = require('md5-hex');
|
||||
const convertSourceMap = require('convert-source-map');
|
||||
const slash = require('slash');
|
||||
const writeFileAtomic = require('write-file-atomic');
|
||||
const mem = require('mem');
|
||||
|
||||
const concordanceOptions = require('./concordance-options').snapshotManager;
|
||||
|
||||
// Increment if encoding layout or Concordance serialization versions change. Previous AVA versions will not be able to
|
||||
// decode buffers generated by a newer version, so changing this value will require a major version bump of AVA itself.
|
||||
// The version is encoded as an unsigned 16 bit integer.
|
||||
const VERSION = 2;
|
||||
|
||||
const VERSION_HEADER = Buffer.alloc(2);
|
||||
VERSION_HEADER.writeUInt16LE(VERSION);
|
||||
|
||||
// The decoder matches on the trailing newline byte (0x0A).
|
||||
const READABLE_PREFIX = Buffer.from(`AVA Snapshot v${VERSION}\n`, 'ascii');
|
||||
const REPORT_SEPARATOR = Buffer.from('\n\n', 'ascii');
|
||||
const REPORT_TRAILING_NEWLINE = Buffer.from('\n', 'ascii');
|
||||
|
||||
const MD5_HASH_LENGTH = 16;
|
||||
|
||||
class SnapshotError extends Error {
|
||||
constructor(message, snapPath) {
|
||||
super(message);
|
||||
this.name = 'SnapshotError';
|
||||
this.snapPath = snapPath;
|
||||
}
|
||||
}
|
||||
exports.SnapshotError = SnapshotError;
|
||||
|
||||
class ChecksumError extends SnapshotError {
|
||||
constructor(snapPath) {
|
||||
super('Checksum mismatch', snapPath);
|
||||
this.name = 'ChecksumError';
|
||||
}
|
||||
}
|
||||
exports.ChecksumError = ChecksumError;
|
||||
|
||||
class VersionMismatchError extends SnapshotError {
|
||||
constructor(snapPath, version) {
|
||||
super('Unexpected snapshot version', snapPath);
|
||||
this.name = 'VersionMismatchError';
|
||||
this.snapVersion = version;
|
||||
this.expectedVersion = VERSION;
|
||||
}
|
||||
}
|
||||
exports.VersionMismatchError = VersionMismatchError;
|
||||
|
||||
const LEGACY_SNAPSHOT_HEADER = Buffer.from('// Jest Snapshot v1');
|
||||
function isLegacySnapshot(buffer) {
|
||||
return LEGACY_SNAPSHOT_HEADER.equals(buffer.slice(0, LEGACY_SNAPSHOT_HEADER.byteLength));
|
||||
}
|
||||
|
||||
class LegacyError extends SnapshotError {
|
||||
constructor(snapPath) {
|
||||
super('Legacy snapshot file', snapPath);
|
||||
this.name = 'LegacyError';
|
||||
}
|
||||
}
|
||||
exports.LegacyError = LegacyError;
|
||||
|
||||
function tryRead(file) {
|
||||
try {
|
||||
return fs.readFileSync(file);
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function withoutLineEndings(buffer) {
|
||||
let checkPosition = buffer.byteLength - 1;
|
||||
while (buffer[checkPosition] === 0x0A || buffer[checkPosition] === 0x0D) {
|
||||
checkPosition--;
|
||||
}
|
||||
|
||||
return buffer.slice(0, checkPosition + 1);
|
||||
}
|
||||
|
||||
function formatEntry(label, descriptor) {
|
||||
if (label) {
|
||||
label = `> ${label}\n\n`;
|
||||
}
|
||||
|
||||
const codeBlock = indentString(concordance.formatDescriptor(descriptor, concordanceOptions), 4);
|
||||
return Buffer.from(label + codeBlock, 'utf8');
|
||||
}
|
||||
|
||||
function combineEntries(entries) {
|
||||
const buffers = [];
|
||||
let byteLength = 0;
|
||||
|
||||
const sortedKeys = [...entries.keys()].sort();
|
||||
for (const key of sortedKeys) {
|
||||
const keyBuffer = Buffer.from(`\n\n## ${key}\n\n`, 'utf8');
|
||||
buffers.push(keyBuffer);
|
||||
byteLength += keyBuffer.byteLength;
|
||||
|
||||
const formattedEntries = entries.get(key);
|
||||
const last = formattedEntries[formattedEntries.length - 1];
|
||||
for (const entry of formattedEntries) {
|
||||
buffers.push(entry);
|
||||
byteLength += entry.byteLength;
|
||||
|
||||
if (entry !== last) {
|
||||
buffers.push(REPORT_SEPARATOR);
|
||||
byteLength += REPORT_SEPARATOR.byteLength;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {buffers, byteLength};
|
||||
}
|
||||
|
||||
function generateReport(relFile, snapFile, entries) {
|
||||
const combined = combineEntries(entries);
|
||||
const {buffers} = combined;
|
||||
let {byteLength} = combined;
|
||||
|
||||
const header = Buffer.from(`# Snapshot report for \`${slash(relFile)}\`
|
||||
|
||||
The actual snapshot is saved in \`${snapFile}\`.
|
||||
|
||||
Generated by [AVA](https://avajs.dev).`, 'utf8');
|
||||
buffers.unshift(header);
|
||||
byteLength += header.byteLength;
|
||||
|
||||
buffers.push(REPORT_TRAILING_NEWLINE);
|
||||
byteLength += REPORT_TRAILING_NEWLINE.byteLength;
|
||||
return Buffer.concat(buffers, byteLength);
|
||||
}
|
||||
|
||||
function appendReportEntries(existingReport, entries) {
|
||||
const combined = combineEntries(entries);
|
||||
const {buffers} = combined;
|
||||
let {byteLength} = combined;
|
||||
|
||||
const prepend = withoutLineEndings(existingReport);
|
||||
buffers.unshift(prepend);
|
||||
byteLength += prepend.byteLength;
|
||||
|
||||
buffers.push(REPORT_TRAILING_NEWLINE);
|
||||
byteLength += REPORT_TRAILING_NEWLINE.byteLength;
|
||||
return Buffer.concat(buffers, byteLength);
|
||||
}
|
||||
|
||||
function encodeSnapshots(buffersByHash) {
|
||||
const buffers = [];
|
||||
let byteOffset = 0;
|
||||
|
||||
// Entry start and end pointers are relative to the header length. This means
|
||||
// it's possible to append new entries to an existing snapshot file, without
|
||||
// having to rewrite pointers for existing entries.
|
||||
const headerLength = Buffer.alloc(4);
|
||||
buffers.push(headerLength);
|
||||
byteOffset += 4;
|
||||
|
||||
// Allows 65535 hashes (tests or identified snapshots) per file.
|
||||
const numberHashes = Buffer.alloc(2);
|
||||
numberHashes.writeUInt16LE(buffersByHash.size);
|
||||
buffers.push(numberHashes);
|
||||
byteOffset += 2;
|
||||
|
||||
const entries = [];
|
||||
for (const pair of buffersByHash) {
|
||||
const hash = pair[0];
|
||||
const snapshotBuffers = pair[1];
|
||||
|
||||
buffers.push(Buffer.from(hash, 'hex'));
|
||||
byteOffset += MD5_HASH_LENGTH;
|
||||
|
||||
// Allows 65535 snapshots per hash.
|
||||
const numberSnapshots = Buffer.alloc(2);
|
||||
numberSnapshots.writeUInt16LE(snapshotBuffers.length, 0);
|
||||
buffers.push(numberSnapshots);
|
||||
byteOffset += 2;
|
||||
|
||||
for (const value of snapshotBuffers) {
|
||||
// Each pointer is 32 bits, restricting the total, uncompressed buffer to
|
||||
// 4 GiB.
|
||||
const start = Buffer.alloc(4);
|
||||
const end = Buffer.alloc(4);
|
||||
entries.push({start, end, value});
|
||||
|
||||
buffers.push(start, end);
|
||||
byteOffset += 8;
|
||||
}
|
||||
}
|
||||
|
||||
headerLength.writeUInt32LE(byteOffset, 0);
|
||||
|
||||
let bodyOffset = 0;
|
||||
for (const entry of entries) {
|
||||
const start = bodyOffset;
|
||||
const end = bodyOffset + entry.value.byteLength;
|
||||
entry.start.writeUInt32LE(start, 0);
|
||||
entry.end.writeUInt32LE(end, 0);
|
||||
buffers.push(entry.value);
|
||||
bodyOffset = end;
|
||||
}
|
||||
|
||||
byteOffset += bodyOffset;
|
||||
|
||||
const compressed = zlib.gzipSync(Buffer.concat(buffers, byteOffset));
|
||||
compressed[9] = 0x03; // Override the GZip header containing the OS to always be Linux
|
||||
const md5sum = crypto.createHash('md5').update(compressed).digest();
|
||||
return Buffer.concat([
|
||||
READABLE_PREFIX,
|
||||
VERSION_HEADER,
|
||||
md5sum,
|
||||
compressed
|
||||
], READABLE_PREFIX.byteLength + VERSION_HEADER.byteLength + MD5_HASH_LENGTH + compressed.byteLength);
|
||||
}
|
||||
|
||||
function decodeSnapshots(buffer, snapPath) {
|
||||
if (isLegacySnapshot(buffer)) {
|
||||
throw new LegacyError(snapPath);
|
||||
}
|
||||
|
||||
// The version starts after the readable prefix, which is ended by a newline
|
||||
// byte (0x0A).
|
||||
const versionOffset = buffer.indexOf(0x0A) + 1;
|
||||
const version = buffer.readUInt16LE(versionOffset);
|
||||
if (version !== VERSION) {
|
||||
throw new VersionMismatchError(snapPath, version);
|
||||
}
|
||||
|
||||
const md5sumOffset = versionOffset + 2;
|
||||
const compressedOffset = md5sumOffset + MD5_HASH_LENGTH;
|
||||
const compressed = buffer.slice(compressedOffset);
|
||||
|
||||
const md5sum = crypto.createHash('md5').update(compressed).digest();
|
||||
const expectedSum = buffer.slice(md5sumOffset, compressedOffset);
|
||||
if (!md5sum.equals(expectedSum)) {
|
||||
throw new ChecksumError(snapPath);
|
||||
}
|
||||
|
||||
const decompressed = zlib.gunzipSync(compressed);
|
||||
let byteOffset = 0;
|
||||
|
||||
const headerLength = decompressed.readUInt32LE(byteOffset);
|
||||
byteOffset += 4;
|
||||
|
||||
const snapshotsByHash = new Map();
|
||||
const numberHashes = decompressed.readUInt16LE(byteOffset);
|
||||
byteOffset += 2;
|
||||
|
||||
for (let count = 0; count < numberHashes; count++) {
|
||||
const hash = decompressed.toString('hex', byteOffset, byteOffset + MD5_HASH_LENGTH);
|
||||
byteOffset += MD5_HASH_LENGTH;
|
||||
|
||||
const numberSnapshots = decompressed.readUInt16LE(byteOffset);
|
||||
byteOffset += 2;
|
||||
|
||||
const snapshotsBuffers = new Array(numberSnapshots);
|
||||
for (let index = 0; index < numberSnapshots; index++) {
|
||||
const start = decompressed.readUInt32LE(byteOffset) + headerLength;
|
||||
byteOffset += 4;
|
||||
const end = decompressed.readUInt32LE(byteOffset) + headerLength;
|
||||
byteOffset += 4;
|
||||
snapshotsBuffers[index] = decompressed.slice(start, end);
|
||||
}
|
||||
|
||||
// Allow for new entries to be appended to an existing header, which could
|
||||
// lead to the same hash being present multiple times.
|
||||
if (snapshotsByHash.has(hash)) {
|
||||
snapshotsByHash.set(hash, snapshotsByHash.get(hash).concat(snapshotsBuffers));
|
||||
} else {
|
||||
snapshotsByHash.set(hash, snapshotsBuffers);
|
||||
}
|
||||
}
|
||||
|
||||
return snapshotsByHash;
|
||||
}
|
||||
|
||||
class Manager {
|
||||
constructor(options) {
|
||||
this.appendOnly = options.appendOnly;
|
||||
this.dir = options.dir;
|
||||
this.recordNewSnapshots = options.recordNewSnapshots;
|
||||
this.relFile = options.relFile;
|
||||
this.reportFile = options.reportFile;
|
||||
this.snapFile = options.snapFile;
|
||||
this.snapPath = options.snapPath;
|
||||
this.snapshotsByHash = options.snapshotsByHash;
|
||||
|
||||
this.hasChanges = false;
|
||||
this.reportEntries = new Map();
|
||||
}
|
||||
|
||||
compare(options) {
|
||||
const hash = md5Hex(options.belongsTo);
|
||||
const entries = this.snapshotsByHash.get(hash) || [];
|
||||
const snapshotBuffer = entries[options.index];
|
||||
|
||||
if (!snapshotBuffer) {
|
||||
if (!this.recordNewSnapshots) {
|
||||
return {pass: false};
|
||||
}
|
||||
|
||||
if (options.deferRecording) {
|
||||
const record = this.deferRecord(hash, options);
|
||||
return {pass: true, record};
|
||||
}
|
||||
|
||||
this.record(hash, options);
|
||||
return {pass: true};
|
||||
}
|
||||
|
||||
const actual = concordance.deserialize(snapshotBuffer, concordanceOptions);
|
||||
const expected = concordance.describe(options.expected, concordanceOptions);
|
||||
const pass = concordance.compareDescriptors(actual, expected);
|
||||
|
||||
return {actual, expected, pass};
|
||||
}
|
||||
|
||||
deferRecord(hash, options) {
|
||||
const descriptor = concordance.describe(options.expected, concordanceOptions);
|
||||
const snapshot = concordance.serialize(descriptor);
|
||||
const entry = formatEntry(options.label, descriptor);
|
||||
|
||||
return () => { // Must be called in order!
|
||||
this.hasChanges = true;
|
||||
|
||||
let snapshots = this.snapshotsByHash.get(hash);
|
||||
if (!snapshots) {
|
||||
snapshots = [];
|
||||
this.snapshotsByHash.set(hash, snapshots);
|
||||
}
|
||||
|
||||
if (options.index > snapshots.length) {
|
||||
throw new RangeError(`Cannot record snapshot ${options.index} for ${JSON.stringify(options.belongsTo)}, exceeds expected index of ${snapshots.length}`);
|
||||
}
|
||||
|
||||
if (options.index < snapshots.length) {
|
||||
throw new RangeError(`Cannot record snapshot ${options.index} for ${JSON.stringify(options.belongsTo)}, already exists`);
|
||||
}
|
||||
|
||||
snapshots.push(snapshot);
|
||||
|
||||
if (this.reportEntries.has(options.belongsTo)) {
|
||||
this.reportEntries.get(options.belongsTo).push(entry);
|
||||
} else {
|
||||
this.reportEntries.set(options.belongsTo, [entry]);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
record(hash, options) {
|
||||
const record = this.deferRecord(hash, options);
|
||||
record();
|
||||
}
|
||||
|
||||
save() {
|
||||
if (!this.hasChanges) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const {snapPath} = this;
|
||||
const buffer = encodeSnapshots(this.snapshotsByHash);
|
||||
|
||||
const reportPath = path.join(this.dir, this.reportFile);
|
||||
const existingReport = this.appendOnly ? tryRead(reportPath) : null;
|
||||
const reportBuffer = existingReport ?
|
||||
appendReportEntries(existingReport, this.reportEntries) :
|
||||
generateReport(this.relFile, this.snapFile, this.reportEntries);
|
||||
|
||||
fs.mkdirSync(this.dir, {recursive: true});
|
||||
|
||||
const paths = [snapPath, reportPath];
|
||||
const tmpfileCreated = tmpfile => paths.push(tmpfile);
|
||||
writeFileAtomic.sync(snapPath, buffer, {tmpfileCreated});
|
||||
writeFileAtomic.sync(reportPath, reportBuffer, {tmpfileCreated});
|
||||
return paths;
|
||||
}
|
||||
}
|
||||
|
||||
const resolveSourceFile = mem(file => {
|
||||
const testDir = path.dirname(file);
|
||||
const buffer = tryRead(file);
|
||||
if (!buffer) {
|
||||
return file; // Assume the file is stubbed in our test suite.
|
||||
}
|
||||
|
||||
const source = buffer.toString();
|
||||
const converter = convertSourceMap.fromSource(source) || convertSourceMap.fromMapFileSource(source, testDir);
|
||||
if (converter) {
|
||||
const map = converter.toObject();
|
||||
const firstSource = `${map.sourceRoot || ''}${map.sources[0]}`;
|
||||
return path.resolve(testDir, firstSource);
|
||||
}
|
||||
|
||||
return file;
|
||||
});
|
||||
|
||||
const determineSnapshotDir = mem(({file, fixedLocation, projectDir}) => {
|
||||
const testDir = path.dirname(resolveSourceFile(file));
|
||||
if (fixedLocation) {
|
||||
const relativeTestLocation = path.relative(projectDir, testDir);
|
||||
return path.join(fixedLocation, relativeTestLocation);
|
||||
}
|
||||
|
||||
const parts = new Set(path.relative(projectDir, testDir).split(path.sep));
|
||||
if (parts.has('__tests__')) {
|
||||
return path.join(testDir, '__snapshots__');
|
||||
}
|
||||
|
||||
if (parts.has('test') || parts.has('tests')) { // Accept tests, even though it's not in the default test patterns
|
||||
return path.join(testDir, 'snapshots');
|
||||
}
|
||||
|
||||
return testDir;
|
||||
}, {cacheKey: ([{file}]) => file});
|
||||
|
||||
exports.determineSnapshotDir = determineSnapshotDir;
|
||||
|
||||
function load({file, fixedLocation, projectDir, recordNewSnapshots, updating}) {
|
||||
const dir = determineSnapshotDir({file, fixedLocation, projectDir});
|
||||
const relFile = path.relative(projectDir, resolveSourceFile(file));
|
||||
const name = path.basename(relFile);
|
||||
const reportFile = `${name}.md`;
|
||||
const snapFile = `${name}.snap`;
|
||||
const snapPath = path.join(dir, snapFile);
|
||||
|
||||
let appendOnly = !updating;
|
||||
let snapshotsByHash;
|
||||
|
||||
if (!updating) {
|
||||
const buffer = tryRead(snapPath);
|
||||
if (buffer) {
|
||||
snapshotsByHash = decodeSnapshots(buffer, snapPath);
|
||||
} else {
|
||||
appendOnly = false;
|
||||
}
|
||||
}
|
||||
|
||||
return new Manager({
|
||||
appendOnly,
|
||||
dir,
|
||||
recordNewSnapshots,
|
||||
relFile,
|
||||
reportFile,
|
||||
snapFile,
|
||||
snapPath,
|
||||
snapshotsByHash: snapshotsByHash || new Map()
|
||||
});
|
||||
}
|
||||
|
||||
exports.load = load;
|
||||
738
node_modules/ava/lib/test.js
generated
vendored
Normal file
738
node_modules/ava/lib/test.js
generated
vendored
Normal file
|
|
@ -0,0 +1,738 @@
|
|||
'use strict';
|
||||
const concordance = require('concordance');
|
||||
const isPromise = require('is-promise');
|
||||
const plur = require('plur');
|
||||
const assert = require('./assert');
|
||||
const nowAndTimers = require('./now-and-timers');
|
||||
const parseTestArgs = require('./parse-test-args');
|
||||
const concordanceOptions = require('./concordance-options').default;
|
||||
|
||||
function formatErrorValue(label, error) {
|
||||
const formatted = concordance.format(error, concordanceOptions);
|
||||
return {label, formatted};
|
||||
}
|
||||
|
||||
const captureSavedError = () => {
|
||||
const limitBefore = Error.stackTraceLimit;
|
||||
Error.stackTraceLimit = 1;
|
||||
const err = new Error();
|
||||
Error.stackTraceLimit = limitBefore;
|
||||
return err;
|
||||
};
|
||||
|
||||
const testMap = new WeakMap();
|
||||
class ExecutionContext extends assert.Assertions {
|
||||
constructor(test) {
|
||||
super({
|
||||
pass: () => {
|
||||
test.countPassedAssertion();
|
||||
},
|
||||
pending: promise => {
|
||||
test.addPendingAssertion(promise);
|
||||
},
|
||||
fail: err => {
|
||||
test.addFailedAssertion(err);
|
||||
},
|
||||
skip: () => {
|
||||
test.countPassedAssertion();
|
||||
},
|
||||
compareWithSnapshot: options => {
|
||||
return test.compareWithSnapshot(options);
|
||||
},
|
||||
powerAssert: test.powerAssert
|
||||
});
|
||||
testMap.set(this, test);
|
||||
|
||||
this.snapshot.skip = () => {
|
||||
test.skipSnapshot();
|
||||
};
|
||||
|
||||
this.log = (...inputArgs) => {
|
||||
const args = inputArgs.map(value => {
|
||||
return typeof value === 'string' ?
|
||||
value :
|
||||
concordance.format(value, concordanceOptions);
|
||||
});
|
||||
if (args.length > 0) {
|
||||
test.addLog(args.join(' '));
|
||||
}
|
||||
};
|
||||
|
||||
this.plan = count => {
|
||||
test.plan(count, captureSavedError());
|
||||
};
|
||||
|
||||
this.plan.skip = () => {};
|
||||
|
||||
this.timeout = ms => {
|
||||
test.timeout(ms);
|
||||
};
|
||||
|
||||
this.teardown = callback => {
|
||||
test.addTeardown(callback);
|
||||
};
|
||||
|
||||
this.try = async (...attemptArgs) => {
|
||||
const {args, buildTitle, implementations, receivedImplementationArray} = parseTestArgs(attemptArgs);
|
||||
|
||||
if (implementations.length === 0) {
|
||||
throw new TypeError('Expected an implementation.');
|
||||
}
|
||||
|
||||
const attemptPromises = implementations.map((implementation, index) => {
|
||||
let {title, isSet, isValid, isEmpty} = buildTitle(implementation);
|
||||
|
||||
if (!isSet || isEmpty) {
|
||||
title = `${test.title} ─ attempt ${test.attemptCount + 1 + index}`;
|
||||
} else if (isValid) {
|
||||
title = `${test.title} ─ ${title}`;
|
||||
} else {
|
||||
throw new TypeError('`t.try()` titles must be strings'); // Throw synchronously!
|
||||
}
|
||||
|
||||
if (!test.registerUniqueTitle(title)) {
|
||||
throw new Error(`Duplicate test title: ${title}`);
|
||||
}
|
||||
|
||||
return {implementation, title};
|
||||
}).map(async ({implementation, title}) => {
|
||||
let committed = false;
|
||||
let discarded = false;
|
||||
|
||||
const {assertCount, deferredSnapshotRecordings, errors, logs, passed, snapshotCount, startingSnapshotCount} = await test.runAttempt(title, t => implementation(t, ...args));
|
||||
|
||||
return {
|
||||
errors,
|
||||
logs: [...logs], // Don't allow modification of logs.
|
||||
passed,
|
||||
title,
|
||||
commit: ({retainLogs = true} = {}) => {
|
||||
if (committed) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (discarded) {
|
||||
test.saveFirstError(new Error('Can’t commit a result that was previously discarded'));
|
||||
return;
|
||||
}
|
||||
|
||||
committed = true;
|
||||
test.finishAttempt({
|
||||
assertCount,
|
||||
commit: true,
|
||||
deferredSnapshotRecordings,
|
||||
errors,
|
||||
logs,
|
||||
passed,
|
||||
retainLogs,
|
||||
snapshotCount,
|
||||
startingSnapshotCount
|
||||
});
|
||||
},
|
||||
discard: ({retainLogs = false} = {}) => {
|
||||
if (committed) {
|
||||
test.saveFirstError(new Error('Can’t discard a result that was previously committed'));
|
||||
return;
|
||||
}
|
||||
|
||||
if (discarded) {
|
||||
return;
|
||||
}
|
||||
|
||||
discarded = true;
|
||||
test.finishAttempt({
|
||||
assertCount: 0,
|
||||
commit: false,
|
||||
deferredSnapshotRecordings,
|
||||
errors,
|
||||
logs,
|
||||
passed,
|
||||
retainLogs,
|
||||
snapshotCount,
|
||||
startingSnapshotCount
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
const results = await Promise.all(attemptPromises);
|
||||
return receivedImplementationArray ? results : results[0];
|
||||
};
|
||||
}
|
||||
|
||||
get end() {
|
||||
const end = testMap.get(this).bindEndCallback();
|
||||
const endFn = error => end(error, captureSavedError());
|
||||
return endFn;
|
||||
}
|
||||
|
||||
get title() {
|
||||
return testMap.get(this).title;
|
||||
}
|
||||
|
||||
get context() {
|
||||
return testMap.get(this).contextRef.get();
|
||||
}
|
||||
|
||||
set context(context) {
|
||||
testMap.get(this).contextRef.set(context);
|
||||
}
|
||||
|
||||
get passed() {
|
||||
return testMap.get(this).testPassed;
|
||||
}
|
||||
|
||||
_throwsArgStart(assertion, file, line) {
|
||||
testMap.get(this).trackThrows({assertion, file, line});
|
||||
}
|
||||
|
||||
_throwsArgEnd() {
|
||||
testMap.get(this).trackThrows(null);
|
||||
}
|
||||
}
|
||||
|
||||
class Test {
|
||||
constructor(options) {
|
||||
this.contextRef = options.contextRef;
|
||||
this.experiments = options.experiments || {};
|
||||
this.failWithoutAssertions = options.failWithoutAssertions;
|
||||
this.fn = options.fn;
|
||||
this.isHook = options.isHook === true;
|
||||
this.metadata = options.metadata;
|
||||
this.powerAssert = options.powerAssert;
|
||||
this.title = options.title;
|
||||
this.testPassed = options.testPassed;
|
||||
this.registerUniqueTitle = options.registerUniqueTitle;
|
||||
this.logs = [];
|
||||
this.teardowns = [];
|
||||
|
||||
const {snapshotBelongsTo = this.title, nextSnapshotIndex = 0} = options;
|
||||
this.snapshotBelongsTo = snapshotBelongsTo;
|
||||
this.nextSnapshotIndex = nextSnapshotIndex;
|
||||
this.snapshotCount = 0;
|
||||
|
||||
const deferRecording = this.metadata.inline;
|
||||
this.deferredSnapshotRecordings = [];
|
||||
this.compareWithSnapshot = ({expected, id, message}) => {
|
||||
this.snapshotCount++;
|
||||
|
||||
// TODO: In a breaking change, reject non-undefined, falsy IDs and messages.
|
||||
const belongsTo = id || snapshotBelongsTo;
|
||||
const index = id ? 0 : this.nextSnapshotIndex++;
|
||||
const label = id ? '' : message || `Snapshot ${index + 1}`; // Human-readable labels start counting at 1.
|
||||
|
||||
const {record, ...result} = options.compareTestSnapshot({belongsTo, deferRecording, expected, index, label});
|
||||
if (record) {
|
||||
this.deferredSnapshotRecordings.push(record);
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
this.skipSnapshot = () => {
|
||||
if (options.updateSnapshots) {
|
||||
this.addFailedAssertion(new Error('Snapshot assertions cannot be skipped when updating snapshots'));
|
||||
} else {
|
||||
this.nextSnapshotIndex++;
|
||||
this.snapshotCount++;
|
||||
this.countPassedAssertion();
|
||||
}
|
||||
};
|
||||
|
||||
this.runAttempt = async (title, fn) => {
|
||||
if (this.finishing) {
|
||||
this.saveFirstError(new Error('Running a `t.try()`, but the test has already finished'));
|
||||
}
|
||||
|
||||
this.attemptCount++;
|
||||
this.pendingAttemptCount++;
|
||||
|
||||
const {contextRef, snapshotBelongsTo, nextSnapshotIndex, snapshotCount: startingSnapshotCount} = this;
|
||||
const attempt = new Test({
|
||||
...options,
|
||||
fn,
|
||||
metadata: {...options.metadata, callback: false, failing: false, inline: true},
|
||||
contextRef: contextRef.copy(),
|
||||
snapshotBelongsTo,
|
||||
nextSnapshotIndex,
|
||||
title
|
||||
});
|
||||
|
||||
const {deferredSnapshotRecordings, error, logs, passed, assertCount, snapshotCount} = await attempt.run();
|
||||
const errors = error ? [error] : [];
|
||||
return {assertCount, deferredSnapshotRecordings, errors, logs, passed, snapshotCount, startingSnapshotCount};
|
||||
};
|
||||
|
||||
this.assertCount = 0;
|
||||
this.assertError = undefined;
|
||||
this.attemptCount = 0;
|
||||
this.calledEnd = false;
|
||||
this.duration = null;
|
||||
this.endCallbackFinisher = null;
|
||||
this.finishDueToAttributedError = null;
|
||||
this.finishDueToInactivity = null;
|
||||
this.finishDueToTimeout = null;
|
||||
this.finishing = false;
|
||||
this.pendingAssertionCount = 0;
|
||||
this.pendingAttemptCount = 0;
|
||||
this.pendingThrowsAssertion = null;
|
||||
this.planCount = null;
|
||||
this.startedAt = 0;
|
||||
this.timeoutMs = 0;
|
||||
this.timeoutTimer = null;
|
||||
}
|
||||
|
||||
bindEndCallback() {
|
||||
if (this.metadata.callback) {
|
||||
return (error, savedError) => {
|
||||
this.endCallback(error, savedError);
|
||||
};
|
||||
}
|
||||
|
||||
if (this.metadata.inline) {
|
||||
throw new Error('`t.end()` is not supported inside `t.try()`');
|
||||
} else {
|
||||
throw new Error('`t.end()` is not supported in this context. To use `t.end()` as a callback, you must use "callback mode" via `test.cb(testName, fn)`');
|
||||
}
|
||||
}
|
||||
|
||||
endCallback(error, savedError) {
|
||||
if (this.calledEnd) {
|
||||
this.saveFirstError(new Error('`t.end()` called more than once'));
|
||||
return;
|
||||
}
|
||||
|
||||
this.calledEnd = true;
|
||||
|
||||
if (error) {
|
||||
this.saveFirstError(new assert.AssertionError({
|
||||
actual: error,
|
||||
message: 'Callback called with an error',
|
||||
savedError,
|
||||
values: [formatErrorValue('Callback called with an error:', error)]
|
||||
}));
|
||||
}
|
||||
|
||||
if (this.endCallbackFinisher) {
|
||||
this.endCallbackFinisher();
|
||||
}
|
||||
}
|
||||
|
||||
createExecutionContext() {
|
||||
return new ExecutionContext(this);
|
||||
}
|
||||
|
||||
countPassedAssertion() {
|
||||
if (this.finishing) {
|
||||
this.saveFirstError(new Error('Assertion passed, but test has already finished'));
|
||||
}
|
||||
|
||||
if (this.pendingAttemptCount > 0) {
|
||||
this.saveFirstError(new Error('Assertion passed, but an attempt is pending. Use the attempt’s assertions instead'));
|
||||
}
|
||||
|
||||
this.assertCount++;
|
||||
this.refreshTimeout();
|
||||
}
|
||||
|
||||
addLog(text) {
|
||||
this.logs.push(text);
|
||||
}
|
||||
|
||||
addPendingAssertion(promise) {
|
||||
if (this.finishing) {
|
||||
this.saveFirstError(new Error('Assertion started, but test has already finished'));
|
||||
}
|
||||
|
||||
if (this.pendingAttemptCount > 0) {
|
||||
this.saveFirstError(new Error('Assertion started, but an attempt is pending. Use the attempt’s assertions instead'));
|
||||
}
|
||||
|
||||
this.assertCount++;
|
||||
this.pendingAssertionCount++;
|
||||
this.refreshTimeout();
|
||||
|
||||
promise
|
||||
.catch(error => this.saveFirstError(error))
|
||||
.then(() => { // eslint-disable-line promise/prefer-await-to-then
|
||||
this.pendingAssertionCount--;
|
||||
this.refreshTimeout();
|
||||
});
|
||||
}
|
||||
|
||||
addFailedAssertion(error) {
|
||||
if (this.finishing) {
|
||||
this.saveFirstError(new Error('Assertion failed, but test has already finished'));
|
||||
}
|
||||
|
||||
if (this.pendingAttemptCount > 0) {
|
||||
this.saveFirstError(new Error('Assertion failed, but an attempt is pending. Use the attempt’s assertions instead'));
|
||||
}
|
||||
|
||||
this.assertCount++;
|
||||
this.refreshTimeout();
|
||||
this.saveFirstError(error);
|
||||
}
|
||||
|
||||
finishAttempt({commit, deferredSnapshotRecordings, errors, logs, passed, retainLogs, snapshotCount, startingSnapshotCount}) {
|
||||
if (this.finishing) {
|
||||
if (commit) {
|
||||
this.saveFirstError(new Error('`t.try()` result was committed, but the test has already finished'));
|
||||
} else {
|
||||
this.saveFirstError(new Error('`t.try()` result was discarded, but the test has already finished'));
|
||||
}
|
||||
}
|
||||
|
||||
if (commit) {
|
||||
this.assertCount++;
|
||||
|
||||
if (startingSnapshotCount === this.snapshotCount) {
|
||||
this.snapshotCount += snapshotCount;
|
||||
this.nextSnapshotIndex += snapshotCount;
|
||||
for (const record of deferredSnapshotRecordings) {
|
||||
record();
|
||||
}
|
||||
} else {
|
||||
this.saveFirstError(new Error('Cannot commit `t.try()` result. Do not run concurrent snapshot assertions when using `t.try()`'));
|
||||
}
|
||||
}
|
||||
|
||||
this.pendingAttemptCount--;
|
||||
|
||||
if (commit && !passed) {
|
||||
this.saveFirstError(errors[0]);
|
||||
}
|
||||
|
||||
if (retainLogs) {
|
||||
for (const log of logs) {
|
||||
this.addLog(log);
|
||||
}
|
||||
}
|
||||
|
||||
this.refreshTimeout();
|
||||
}
|
||||
|
||||
saveFirstError(error) {
|
||||
if (!this.assertError) {
|
||||
this.assertError = error;
|
||||
}
|
||||
}
|
||||
|
||||
plan(count, planError) {
|
||||
if (typeof count !== 'number') {
|
||||
throw new TypeError('Expected a number');
|
||||
}
|
||||
|
||||
this.planCount = count;
|
||||
|
||||
// In case the `planCount` doesn't match `assertCount, we need the stack of
|
||||
// this function to throw with a useful stack.
|
||||
this.planError = planError;
|
||||
}
|
||||
|
||||
timeout(ms) {
|
||||
if (this.finishing) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.clearTimeout();
|
||||
this.timeoutMs = ms;
|
||||
this.timeoutTimer = nowAndTimers.setTimeout(() => {
|
||||
this.saveFirstError(new Error('Test timeout exceeded'));
|
||||
|
||||
if (this.finishDueToTimeout) {
|
||||
this.finishDueToTimeout();
|
||||
}
|
||||
}, ms);
|
||||
}
|
||||
|
||||
refreshTimeout() {
|
||||
if (!this.timeoutTimer) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.timeoutTimer.refresh) {
|
||||
this.timeoutTimer.refresh();
|
||||
} else {
|
||||
this.timeout(this.timeoutMs);
|
||||
}
|
||||
}
|
||||
|
||||
clearTimeout() {
|
||||
nowAndTimers.clearTimeout(this.timeoutTimer);
|
||||
this.timeoutTimer = null;
|
||||
}
|
||||
|
||||
addTeardown(callback) {
|
||||
if (this.isHook) {
|
||||
this.saveFirstError(new Error('`t.teardown()` is not allowed in hooks'));
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.finishing) {
|
||||
this.saveFirstError(new Error('`t.teardown()` cannot be used during teardown'));
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof callback !== 'function') {
|
||||
throw new TypeError('Expected a function');
|
||||
}
|
||||
|
||||
this.teardowns.push(callback);
|
||||
}
|
||||
|
||||
async runTeardowns() {
|
||||
for (const teardown of this.teardowns) {
|
||||
try {
|
||||
await teardown(); // eslint-disable-line no-await-in-loop
|
||||
} catch (error) {
|
||||
this.saveFirstError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
verifyPlan() {
|
||||
if (!this.assertError && this.planCount !== null && this.planCount !== this.assertCount) {
|
||||
this.saveFirstError(new assert.AssertionError({
|
||||
assertion: 'plan',
|
||||
message: `Planned for ${this.planCount} ${plur('assertion', this.planCount)}, but got ${this.assertCount}.`,
|
||||
operator: '===',
|
||||
savedError: this.planError
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
verifyAssertions() {
|
||||
if (this.assertError) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.pendingAttemptCount > 0) {
|
||||
this.saveFirstError(new Error('Test finished, but not all attempts were committed or discarded'));
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.pendingAssertionCount > 0) {
|
||||
this.saveFirstError(new Error('Test finished, but an assertion is still pending'));
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.failWithoutAssertions) {
|
||||
if (this.planCount !== null) {
|
||||
return; // `verifyPlan()` will report an error already.
|
||||
}
|
||||
|
||||
if (this.assertCount === 0 && !this.calledEnd) {
|
||||
this.saveFirstError(new Error('Test finished without running any assertions'));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trackThrows(pending) {
|
||||
this.pendingThrowsAssertion = pending;
|
||||
}
|
||||
|
||||
detectImproperThrows(error) {
|
||||
if (!this.pendingThrowsAssertion) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const pending = this.pendingThrowsAssertion;
|
||||
this.pendingThrowsAssertion = null;
|
||||
|
||||
const values = [];
|
||||
if (error) {
|
||||
values.push(formatErrorValue(`The following error was thrown, possibly before \`t.${pending.assertion}()\` could be called:`, error));
|
||||
}
|
||||
|
||||
this.saveFirstError(new assert.AssertionError({
|
||||
assertion: pending.assertion,
|
||||
fixedSource: {file: pending.file, line: pending.line},
|
||||
improperUsage: true,
|
||||
message: `Improper usage of \`t.${pending.assertion}()\` detected`,
|
||||
savedError: error instanceof Error && error,
|
||||
values
|
||||
}));
|
||||
return true;
|
||||
}
|
||||
|
||||
waitForPendingThrowsAssertion() {
|
||||
return new Promise(resolve => {
|
||||
this.finishDueToAttributedError = () => {
|
||||
resolve(this.finish());
|
||||
};
|
||||
|
||||
this.finishDueToInactivity = () => {
|
||||
this.detectImproperThrows();
|
||||
resolve(this.finish());
|
||||
};
|
||||
|
||||
// Wait up to a second to see if an error can be attributed to the
|
||||
// pending assertion.
|
||||
nowAndTimers.setTimeout(() => this.finishDueToInactivity(), 1000).unref();
|
||||
});
|
||||
}
|
||||
|
||||
attributeLeakedError(error) {
|
||||
if (!this.detectImproperThrows(error)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
this.finishDueToAttributedError();
|
||||
return true;
|
||||
}
|
||||
|
||||
callFn() {
|
||||
try {
|
||||
return {
|
||||
ok: true,
|
||||
retval: this.fn.call(null, this.createExecutionContext())
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
ok: false,
|
||||
error
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
run() {
|
||||
this.startedAt = nowAndTimers.now();
|
||||
|
||||
const result = this.callFn();
|
||||
if (!result.ok) {
|
||||
if (!this.detectImproperThrows(result.error)) {
|
||||
this.saveFirstError(new assert.AssertionError({
|
||||
message: 'Error thrown in test',
|
||||
savedError: result.error instanceof Error && result.error,
|
||||
values: [formatErrorValue('Error thrown in test:', result.error)]
|
||||
}));
|
||||
}
|
||||
|
||||
return this.finish();
|
||||
}
|
||||
|
||||
const returnedObservable = result.retval !== null && typeof result.retval === 'object' && typeof result.retval.subscribe === 'function';
|
||||
const returnedPromise = isPromise(result.retval);
|
||||
|
||||
let promise;
|
||||
if (returnedObservable) {
|
||||
promise = new Promise((resolve, reject) => {
|
||||
result.retval.subscribe({
|
||||
error: reject,
|
||||
complete: () => resolve()
|
||||
});
|
||||
});
|
||||
} else if (returnedPromise) {
|
||||
// `retval` can be any thenable, so convert to a proper promise.
|
||||
promise = Promise.resolve(result.retval);
|
||||
}
|
||||
|
||||
if (this.metadata.callback) {
|
||||
if (returnedObservable || returnedPromise) {
|
||||
const asyncType = returnedObservable ? 'observables' : 'promises';
|
||||
this.saveFirstError(new Error(`Do not return ${asyncType} from tests declared via \`test.cb(…)\`. Use \`test.cb(…)\` for legacy callback APIs. When using promises, observables or async functions, use \`test(…)\`.`));
|
||||
return this.finish();
|
||||
}
|
||||
|
||||
if (this.calledEnd) {
|
||||
return this.finish();
|
||||
}
|
||||
|
||||
return new Promise(resolve => {
|
||||
this.endCallbackFinisher = () => {
|
||||
resolve(this.finish());
|
||||
};
|
||||
|
||||
this.finishDueToAttributedError = () => {
|
||||
resolve(this.finish());
|
||||
};
|
||||
|
||||
this.finishDueToTimeout = () => {
|
||||
resolve(this.finish());
|
||||
};
|
||||
|
||||
this.finishDueToInactivity = () => {
|
||||
this.saveFirstError(new Error('`t.end()` was never called'));
|
||||
resolve(this.finish());
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
if (promise) {
|
||||
return new Promise(resolve => {
|
||||
this.finishDueToAttributedError = () => {
|
||||
resolve(this.finish());
|
||||
};
|
||||
|
||||
this.finishDueToTimeout = () => {
|
||||
resolve(this.finish());
|
||||
};
|
||||
|
||||
this.finishDueToInactivity = () => {
|
||||
const error = returnedObservable ?
|
||||
new Error('Observable returned by test never completed') :
|
||||
new Error('Promise returned by test never resolved');
|
||||
this.saveFirstError(error);
|
||||
resolve(this.finish());
|
||||
};
|
||||
|
||||
promise
|
||||
.catch(error => {
|
||||
if (!this.detectImproperThrows(error)) {
|
||||
this.saveFirstError(new assert.AssertionError({
|
||||
message: 'Rejected promise returned by test',
|
||||
savedError: error instanceof Error && error,
|
||||
values: [formatErrorValue('Rejected promise returned by test. Reason:', error)]
|
||||
}));
|
||||
}
|
||||
})
|
||||
.then(() => resolve(this.finish())); // eslint-disable-line promise/prefer-await-to-then
|
||||
});
|
||||
}
|
||||
|
||||
return this.finish();
|
||||
}
|
||||
|
||||
async finish() {
|
||||
this.finishing = true;
|
||||
|
||||
if (!this.assertError && this.pendingThrowsAssertion) {
|
||||
return this.waitForPendingThrowsAssertion();
|
||||
}
|
||||
|
||||
this.clearTimeout();
|
||||
this.verifyPlan();
|
||||
this.verifyAssertions();
|
||||
await this.runTeardowns();
|
||||
|
||||
this.duration = nowAndTimers.now() - this.startedAt;
|
||||
|
||||
let error = this.assertError;
|
||||
let passed = !error;
|
||||
|
||||
if (this.metadata.failing) {
|
||||
passed = !passed;
|
||||
|
||||
if (passed) {
|
||||
error = null;
|
||||
} else {
|
||||
error = new Error('Test was expected to fail, but succeeded, you should stop marking the test as failing');
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
deferredSnapshotRecordings: this.deferredSnapshotRecordings,
|
||||
duration: this.duration,
|
||||
error,
|
||||
logs: this.logs,
|
||||
metadata: this.metadata,
|
||||
passed,
|
||||
snapshotCount: this.snapshotCount,
|
||||
assertCount: this.assertCount,
|
||||
title: this.title
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Test;
|
||||
447
node_modules/ava/lib/watcher.js
generated
vendored
Normal file
447
node_modules/ava/lib/watcher.js
generated
vendored
Normal file
|
|
@ -0,0 +1,447 @@
|
|||
'use strict';
|
||||
const nodePath = require('path');
|
||||
const debug = require('debug')('ava:watcher');
|
||||
const chokidar = require('chokidar');
|
||||
const diff = require('lodash/difference');
|
||||
const flatten = require('lodash/flatten');
|
||||
const chalk = require('./chalk').get();
|
||||
const {applyTestFileFilter, classify, getChokidarIgnorePatterns} = require('./globs');
|
||||
const {levels: providerLevels} = require('./provider-manager');
|
||||
|
||||
function rethrowAsync(err) {
|
||||
// Don't swallow exceptions. Note that any
|
||||
// expected error should already have been logged
|
||||
setImmediate(() => {
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
const MIN_DEBOUNCE_DELAY = 10;
|
||||
const INITIAL_DEBOUNCE_DELAY = 100;
|
||||
const END_MESSAGE = chalk.gray('Type `r` and press enter to rerun tests\nType `u` and press enter to update snapshots\n');
|
||||
|
||||
class Debouncer {
|
||||
constructor(watcher) {
|
||||
this.watcher = watcher;
|
||||
this.timer = null;
|
||||
this.repeat = false;
|
||||
}
|
||||
|
||||
debounce(delay) {
|
||||
if (this.timer) {
|
||||
this.again = true;
|
||||
return;
|
||||
}
|
||||
|
||||
delay = delay ? Math.max(delay, MIN_DEBOUNCE_DELAY) : INITIAL_DEBOUNCE_DELAY;
|
||||
|
||||
const timer = setTimeout(async () => {
|
||||
await this.watcher.busy;
|
||||
// Do nothing if debouncing was canceled while waiting for the busy
|
||||
// promise to fulfil
|
||||
if (this.timer !== timer) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.again) {
|
||||
this.timer = null;
|
||||
this.again = false;
|
||||
this.debounce(delay / 2);
|
||||
} else {
|
||||
this.watcher.runAfterChanges();
|
||||
this.timer = null;
|
||||
this.again = false;
|
||||
}
|
||||
}, delay);
|
||||
|
||||
this.timer = timer;
|
||||
}
|
||||
|
||||
cancel() {
|
||||
if (this.timer) {
|
||||
clearTimeout(this.timer);
|
||||
this.timer = null;
|
||||
this.again = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class TestDependency {
|
||||
constructor(file, dependencies) {
|
||||
this.file = file;
|
||||
this.dependencies = dependencies;
|
||||
}
|
||||
|
||||
contains(dependency) {
|
||||
return this.dependencies.includes(dependency);
|
||||
}
|
||||
}
|
||||
|
||||
class Watcher {
|
||||
constructor({api, filter = [], globs, projectDir, providers, reporter}) {
|
||||
this.debouncer = new Debouncer(this);
|
||||
|
||||
this.clearLogOnNextRun = true;
|
||||
this.runVector = 0;
|
||||
this.previousFiles = [];
|
||||
this.globs = {cwd: projectDir, ...globs};
|
||||
|
||||
const patternFilters = filter.map(({pattern}) => pattern);
|
||||
|
||||
this.providers = providers.filter(({level}) => level >= providerLevels.pathRewrites);
|
||||
this.run = (specificFiles = [], updateSnapshots = false) => {
|
||||
const clearLogOnNextRun = this.clearLogOnNextRun && this.runVector > 0;
|
||||
if (this.runVector > 0) {
|
||||
this.clearLogOnNextRun = true;
|
||||
}
|
||||
|
||||
this.runVector++;
|
||||
|
||||
let runOnlyExclusive = false;
|
||||
if (specificFiles.length > 0) {
|
||||
const exclusiveFiles = specificFiles.filter(file => this.filesWithExclusiveTests.includes(file));
|
||||
runOnlyExclusive = exclusiveFiles.length !== this.filesWithExclusiveTests.length;
|
||||
if (runOnlyExclusive) {
|
||||
// The test files that previously contained exclusive tests are always
|
||||
// run, together with the remaining specific files.
|
||||
const remainingFiles = diff(specificFiles, exclusiveFiles);
|
||||
specificFiles = this.filesWithExclusiveTests.concat(remainingFiles);
|
||||
}
|
||||
|
||||
if (filter.length > 0) {
|
||||
specificFiles = applyTestFileFilter({cwd: projectDir, filter: patternFilters, testFiles: specificFiles});
|
||||
}
|
||||
|
||||
this.pruneFailures(specificFiles);
|
||||
}
|
||||
|
||||
this.touchedFiles.clear();
|
||||
this.previousFiles = specificFiles;
|
||||
this.busy = api.run({
|
||||
files: specificFiles,
|
||||
filter,
|
||||
runtimeOptions: {
|
||||
clearLogOnNextRun,
|
||||
previousFailures: this.sumPreviousFailures(this.runVector),
|
||||
runOnlyExclusive,
|
||||
runVector: this.runVector,
|
||||
updateSnapshots: updateSnapshots === true
|
||||
}
|
||||
})
|
||||
.then(runStatus => { // eslint-disable-line promise/prefer-await-to-then
|
||||
reporter.endRun();
|
||||
reporter.lineWriter.writeLine(END_MESSAGE);
|
||||
|
||||
if (this.clearLogOnNextRun && (
|
||||
runStatus.stats.failedHooks > 0 ||
|
||||
runStatus.stats.failedTests > 0 ||
|
||||
runStatus.stats.failedWorkers > 0 ||
|
||||
runStatus.stats.internalErrors > 0 ||
|
||||
runStatus.stats.timeouts > 0 ||
|
||||
runStatus.stats.uncaughtExceptions > 0 ||
|
||||
runStatus.stats.unhandledRejections > 0
|
||||
)) {
|
||||
this.clearLogOnNextRun = false;
|
||||
}
|
||||
})
|
||||
.catch(rethrowAsync);
|
||||
};
|
||||
|
||||
this.testDependencies = [];
|
||||
this.trackTestDependencies(api);
|
||||
|
||||
this.touchedFiles = new Set();
|
||||
this.trackTouchedFiles(api);
|
||||
|
||||
this.filesWithExclusiveTests = [];
|
||||
this.trackExclusivity(api);
|
||||
|
||||
this.filesWithFailures = [];
|
||||
this.trackFailures(api);
|
||||
|
||||
this.dirtyStates = {};
|
||||
this.watchFiles();
|
||||
this.rerunAll();
|
||||
}
|
||||
|
||||
watchFiles() {
|
||||
chokidar.watch(['**/*'], {
|
||||
cwd: this.globs.cwd,
|
||||
ignored: getChokidarIgnorePatterns(this.globs),
|
||||
ignoreInitial: true
|
||||
}).on('all', (event, path) => {
|
||||
if (event === 'add' || event === 'change' || event === 'unlink') {
|
||||
debug('Detected %s of %s', event, path);
|
||||
this.dirtyStates[nodePath.join(this.globs.cwd, path)] = event;
|
||||
this.debouncer.debounce();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
trackTestDependencies(api) {
|
||||
api.on('run', plan => {
|
||||
plan.status.on('stateChange', evt => {
|
||||
if (evt.type !== 'dependencies') {
|
||||
return;
|
||||
}
|
||||
|
||||
const dependencies = evt.dependencies.filter(filePath => {
|
||||
const {isIgnoredByWatcher} = classify(filePath, this.globs);
|
||||
return !isIgnoredByWatcher;
|
||||
});
|
||||
this.updateTestDependencies(evt.testFile, dependencies);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
updateTestDependencies(file, dependencies) {
|
||||
// Ensure the rewritten test file path is included in the dependencies,
|
||||
// since changes to non-rewritten paths are ignored.
|
||||
for (const {main} of this.providers) {
|
||||
const rewritten = main.resolveTestFile(file);
|
||||
if (!dependencies.includes(rewritten)) {
|
||||
dependencies = [rewritten, ...dependencies];
|
||||
}
|
||||
}
|
||||
|
||||
if (dependencies.length === 0) {
|
||||
this.testDependencies = this.testDependencies.filter(dep => dep.file !== file);
|
||||
return;
|
||||
}
|
||||
|
||||
const isUpdate = this.testDependencies.some(dep => {
|
||||
if (dep.file !== file) {
|
||||
return false;
|
||||
}
|
||||
|
||||
dep.dependencies = dependencies;
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
if (!isUpdate) {
|
||||
this.testDependencies.push(new TestDependency(file, dependencies));
|
||||
}
|
||||
}
|
||||
|
||||
trackTouchedFiles(api) {
|
||||
api.on('run', plan => {
|
||||
plan.status.on('stateChange', evt => {
|
||||
if (evt.type !== 'touched-files') {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const file of evt.files) {
|
||||
this.touchedFiles.add(file);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
trackExclusivity(api) {
|
||||
api.on('run', plan => {
|
||||
plan.status.on('stateChange', evt => {
|
||||
if (evt.type !== 'worker-finished') {
|
||||
return;
|
||||
}
|
||||
|
||||
const fileStats = plan.status.stats.byFile.get(evt.testFile);
|
||||
const ranExclusiveTests = fileStats.selectedTests > 0 && fileStats.declaredTests > fileStats.selectedTests;
|
||||
this.updateExclusivity(evt.testFile, ranExclusiveTests);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
updateExclusivity(file, hasExclusiveTests) {
|
||||
const index = this.filesWithExclusiveTests.indexOf(file);
|
||||
|
||||
if (hasExclusiveTests && index === -1) {
|
||||
this.filesWithExclusiveTests.push(file);
|
||||
} else if (!hasExclusiveTests && index !== -1) {
|
||||
this.filesWithExclusiveTests.splice(index, 1);
|
||||
}
|
||||
}
|
||||
|
||||
trackFailures(api) {
|
||||
api.on('run', plan => {
|
||||
this.pruneFailures(plan.files);
|
||||
|
||||
const currentVector = this.runVector;
|
||||
plan.status.on('stateChange', evt => {
|
||||
if (!evt.testFile) {
|
||||
return;
|
||||
}
|
||||
|
||||
switch (evt.type) {
|
||||
case 'hook-failed':
|
||||
case 'internal-error':
|
||||
case 'test-failed':
|
||||
case 'uncaught-exception':
|
||||
case 'unhandled-rejection':
|
||||
case 'worker-failed':
|
||||
this.countFailure(evt.testFile, currentVector);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
pruneFailures(files) {
|
||||
const toPrune = new Set(files);
|
||||
this.filesWithFailures = this.filesWithFailures.filter(state => !toPrune.has(state.file));
|
||||
}
|
||||
|
||||
countFailure(file, vector) {
|
||||
const isUpdate = this.filesWithFailures.some(state => {
|
||||
if (state.file !== file) {
|
||||
return false;
|
||||
}
|
||||
|
||||
state.count++;
|
||||
return true;
|
||||
});
|
||||
|
||||
if (!isUpdate) {
|
||||
this.filesWithFailures.push({
|
||||
file,
|
||||
vector,
|
||||
count: 1
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
sumPreviousFailures(beforeVector) {
|
||||
let total = 0;
|
||||
|
||||
for (const state of this.filesWithFailures) {
|
||||
if (state.vector < beforeVector) {
|
||||
total += state.count;
|
||||
}
|
||||
}
|
||||
|
||||
return total;
|
||||
}
|
||||
|
||||
cleanUnlinkedTests(unlinkedTests) {
|
||||
for (const testFile of unlinkedTests) {
|
||||
this.updateTestDependencies(testFile, []);
|
||||
this.updateExclusivity(testFile, false);
|
||||
this.pruneFailures([testFile]);
|
||||
}
|
||||
}
|
||||
|
||||
observeStdin(stdin) {
|
||||
stdin.resume();
|
||||
stdin.setEncoding('utf8');
|
||||
|
||||
stdin.on('data', async data => {
|
||||
data = data.trim().toLowerCase();
|
||||
if (data !== 'r' && data !== 'rs' && data !== 'u') {
|
||||
return;
|
||||
}
|
||||
|
||||
// Cancel the debouncer, it might rerun specific tests whereas *all* tests
|
||||
// need to be rerun
|
||||
this.debouncer.cancel();
|
||||
await this.busy;
|
||||
// Cancel the debouncer again, it might have restarted while waiting for
|
||||
// the busy promise to fulfil
|
||||
this.debouncer.cancel();
|
||||
this.clearLogOnNextRun = false;
|
||||
if (data === 'u') {
|
||||
this.updatePreviousSnapshots();
|
||||
} else {
|
||||
this.rerunAll();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
rerunAll() {
|
||||
this.dirtyStates = {};
|
||||
this.run();
|
||||
}
|
||||
|
||||
updatePreviousSnapshots() {
|
||||
this.dirtyStates = {};
|
||||
this.run(this.previousFiles, true);
|
||||
}
|
||||
|
||||
runAfterChanges() {
|
||||
const {dirtyStates} = this;
|
||||
this.dirtyStates = {};
|
||||
|
||||
let dirtyPaths = Object.keys(dirtyStates).filter(path => {
|
||||
if (this.touchedFiles.has(path)) {
|
||||
debug('Ignoring known touched file %s', path);
|
||||
this.touchedFiles.delete(path);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
for (const {main} of this.providers) {
|
||||
dirtyPaths = dirtyPaths.filter(path => {
|
||||
if (main.ignoreChange(path)) {
|
||||
debug('Ignoring changed file %s', path);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
const dirtyHelpersAndSources = [];
|
||||
const dirtyTests = [];
|
||||
for (const filePath of dirtyPaths) {
|
||||
const {isIgnoredByWatcher, isTest} = classify(filePath, this.globs);
|
||||
if (!isIgnoredByWatcher) {
|
||||
if (isTest) {
|
||||
dirtyTests.push(filePath);
|
||||
} else {
|
||||
dirtyHelpersAndSources.push(filePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const addedOrChangedTests = dirtyTests.filter(path => dirtyStates[path] !== 'unlink');
|
||||
const unlinkedTests = diff(dirtyTests, addedOrChangedTests);
|
||||
|
||||
this.cleanUnlinkedTests(unlinkedTests);
|
||||
|
||||
// No need to rerun tests if the only change is that tests were deleted
|
||||
if (unlinkedTests.length === dirtyPaths.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (dirtyHelpersAndSources.length === 0) {
|
||||
// Run any new or changed tests
|
||||
this.run(addedOrChangedTests);
|
||||
return;
|
||||
}
|
||||
|
||||
// Try to find tests that depend on the changed source files
|
||||
const testsByHelpersOrSource = dirtyHelpersAndSources.map(path => {
|
||||
return this.testDependencies.filter(dep => dep.contains(path)).map(dep => {
|
||||
debug('%s is a dependency of %s', path, dep.file);
|
||||
return dep.file;
|
||||
});
|
||||
}, this).filter(tests => tests.length > 0);
|
||||
|
||||
// Rerun all tests if source files were changed that could not be traced to
|
||||
// specific tests
|
||||
if (testsByHelpersOrSource.length !== dirtyHelpersAndSources.length) {
|
||||
debug('Files remain that cannot be traced to specific tests: %O', dirtyHelpersAndSources);
|
||||
debug('Rerunning all tests');
|
||||
this.run();
|
||||
return;
|
||||
}
|
||||
|
||||
// Run all affected tests
|
||||
this.run([...new Set(addedOrChangedTests.concat(flatten(testsByHelpersOrSource)))]);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Watcher;
|
||||
47
node_modules/ava/lib/worker/dependency-tracker.js
generated
vendored
Normal file
47
node_modules/ava/lib/worker/dependency-tracker.js
generated
vendored
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
/* eslint-disable node/no-deprecated-api */
|
||||
'use strict';
|
||||
const ipc = require('./ipc');
|
||||
|
||||
const seenDependencies = new Set();
|
||||
let newDependencies = [];
|
||||
function flush() {
|
||||
if (newDependencies.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
ipc.send({type: 'dependencies', dependencies: newDependencies});
|
||||
newDependencies = [];
|
||||
}
|
||||
|
||||
exports.flush = flush;
|
||||
|
||||
function track(filename) {
|
||||
if (seenDependencies.has(filename)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (newDependencies.length === 0) {
|
||||
process.nextTick(flush);
|
||||
}
|
||||
|
||||
seenDependencies.add(filename);
|
||||
newDependencies.push(filename);
|
||||
}
|
||||
|
||||
exports.track = track;
|
||||
|
||||
function install(testPath) {
|
||||
for (const ext of Object.keys(require.extensions)) {
|
||||
const wrappedHandler = require.extensions[ext];
|
||||
|
||||
require.extensions[ext] = (module, filename) => {
|
||||
if (filename !== testPath) {
|
||||
track(filename);
|
||||
}
|
||||
|
||||
wrappedHandler(module, filename);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
exports.install = install;
|
||||
18
node_modules/ava/lib/worker/ensure-forked.js
generated
vendored
Normal file
18
node_modules/ava/lib/worker/ensure-forked.js
generated
vendored
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
'use strict';
|
||||
const path = require('path');
|
||||
const chalk = require('chalk'); // Use default Chalk instance.
|
||||
|
||||
// Check if the test is being run without AVA cli
|
||||
const isForked = typeof process.send === 'function';
|
||||
if (!isForked) {
|
||||
if (process.argv[1]) {
|
||||
const fp = path.relative('.', process.argv[1]);
|
||||
|
||||
console.log();
|
||||
console.error(`Test files must be run with the AVA CLI:\n\n ${chalk.grey.dim('$')} ${chalk.cyan('ava ' + fp)}\n`);
|
||||
|
||||
process.exit(1); // eslint-disable-line unicorn/no-process-exit
|
||||
} else {
|
||||
throw new Error('The ’ava’ module can only be imported in test files');
|
||||
}
|
||||
}
|
||||
56
node_modules/ava/lib/worker/ipc.js
generated
vendored
Normal file
56
node_modules/ava/lib/worker/ipc.js
generated
vendored
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
'use strict';
|
||||
const Emittery = require('emittery');
|
||||
|
||||
const emitter = new Emittery();
|
||||
process.on('message', message => {
|
||||
if (!message.ava) {
|
||||
return;
|
||||
}
|
||||
|
||||
switch (message.ava.type) {
|
||||
case 'options':
|
||||
emitter.emit('options', message.ava.options);
|
||||
break;
|
||||
case 'peer-failed':
|
||||
emitter.emit('peerFailed');
|
||||
break;
|
||||
case 'pong':
|
||||
emitter.emit('pong');
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
exports.options = emitter.once('options');
|
||||
exports.peerFailed = emitter.once('peerFailed');
|
||||
|
||||
function send(evt) {
|
||||
if (process.connected) {
|
||||
process.send({ava: evt});
|
||||
}
|
||||
}
|
||||
|
||||
exports.send = send;
|
||||
|
||||
function unref() {
|
||||
process.channel.unref();
|
||||
}
|
||||
|
||||
exports.unref = unref;
|
||||
|
||||
let pendingPings = Promise.resolve();
|
||||
async function flush() {
|
||||
process.channel.ref();
|
||||
const promise = pendingPings.then(async () => { // eslint-disable-line promise/prefer-await-to-then
|
||||
send({type: 'ping'});
|
||||
await emitter.once('pong');
|
||||
if (promise === pendingPings) {
|
||||
unref();
|
||||
}
|
||||
});
|
||||
pendingPings = promise;
|
||||
await promise;
|
||||
}
|
||||
|
||||
exports.flush = flush;
|
||||
90
node_modules/ava/lib/worker/line-numbers.js
generated
vendored
Normal file
90
node_modules/ava/lib/worker/line-numbers.js
generated
vendored
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
function parse(file) {
|
||||
const fs = require('fs');
|
||||
const acorn = require('acorn');
|
||||
const walk = require('acorn-walk');
|
||||
|
||||
const ast = acorn.parse(fs.readFileSync(file, 'utf8'), {
|
||||
ecmaVersion: 11,
|
||||
locations: true
|
||||
});
|
||||
|
||||
const locations = [];
|
||||
walk.simple(ast, {
|
||||
CallExpression(node) {
|
||||
locations.push(node.loc);
|
||||
}
|
||||
});
|
||||
|
||||
// Walking is depth-first, but we want to sort these breadth-first.
|
||||
locations.sort((a, b) => {
|
||||
if (a.start.line === b.start.line) {
|
||||
return a.start.column - b.start.column;
|
||||
}
|
||||
|
||||
return a.start.line - b.start.line;
|
||||
});
|
||||
|
||||
return locations;
|
||||
}
|
||||
|
||||
function findTest(locations, declaration) {
|
||||
// Find all calls that span the test declaration.
|
||||
const spans = locations.filter(loc => {
|
||||
if (loc.start.line > declaration.line || loc.end.line < declaration.line) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (loc.start.line === declaration.line && loc.start.column > declaration.column) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (loc.end.line === declaration.line && loc.end.column < declaration.column) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
// Locations should be sorted by source order, so the last span must be the test.
|
||||
return spans.pop();
|
||||
}
|
||||
|
||||
const range = (start, end) => new Array(end - start + 1).fill(start).map((element, index) => element + index);
|
||||
|
||||
module.exports = ({file, lineNumbers = []}) => {
|
||||
if (lineNumbers.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Avoid loading these until we actually need to select tests by line number.
|
||||
const callsites = require('callsites');
|
||||
const sourceMapSupport = require('source-map-support');
|
||||
|
||||
const locations = parse(file);
|
||||
const selected = new Set(lineNumbers);
|
||||
|
||||
return () => {
|
||||
// Assume this is called from a test declaration, which is located in the file.
|
||||
// If not… don't select the test!
|
||||
const callSite = callsites().find(callSite => callSite.getFileName() === file);
|
||||
if (!callSite) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// FIXME: This assumes the callSite hasn't already been adjusted. It's likely
|
||||
// that if `source-map-support/register` has been loaded, this would result
|
||||
// in the wrong location.
|
||||
const sourceCallSite = sourceMapSupport.wrapCallSite(callSite);
|
||||
const start = {
|
||||
line: sourceCallSite.getLineNumber(),
|
||||
column: sourceCallSite.getColumnNumber() - 1 // Use 0-indexed columns.
|
||||
};
|
||||
|
||||
const test = findTest(locations, start);
|
||||
if (!test) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return range(test.start.line, test.end.line).some(line => selected.has(line));
|
||||
};
|
||||
};
|
||||
21
node_modules/ava/lib/worker/main.js
generated
vendored
Normal file
21
node_modules/ava/lib/worker/main.js
generated
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
'use strict';
|
||||
const runner = require('./subprocess').getRunner();
|
||||
|
||||
const makeCjsExport = () => {
|
||||
function test(...args) {
|
||||
return runner.chain(...args);
|
||||
}
|
||||
|
||||
return Object.assign(test, runner.chain);
|
||||
};
|
||||
|
||||
// Support CommonJS modules by exporting a test function that can be fully
|
||||
// chained. Also support ES module loaders by exporting __esModule and a
|
||||
// default. Support `import * as ava from 'ava'` use cases by exporting a
|
||||
// `test` member. Do all this whilst preventing `test.test.test() or
|
||||
// `test.default.test()` chains, though in CommonJS `test.test()` is
|
||||
// unavoidable.
|
||||
module.exports = Object.assign(makeCjsExport(), {
|
||||
__esModule: true,
|
||||
default: runner.chain
|
||||
});
|
||||
17
node_modules/ava/lib/worker/options.js
generated
vendored
Normal file
17
node_modules/ava/lib/worker/options.js
generated
vendored
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
'use strict';
|
||||
let options = null;
|
||||
exports.get = () => {
|
||||
if (!options) {
|
||||
throw new Error('Options have not yet been set');
|
||||
}
|
||||
|
||||
return options;
|
||||
};
|
||||
|
||||
exports.set = newOptions => {
|
||||
if (options) {
|
||||
throw new Error('Options have already been set');
|
||||
}
|
||||
|
||||
options = newOptions;
|
||||
};
|
||||
235
node_modules/ava/lib/worker/subprocess.js
generated
vendored
Normal file
235
node_modules/ava/lib/worker/subprocess.js
generated
vendored
Normal file
|
|
@ -0,0 +1,235 @@
|
|||
'use strict';
|
||||
const {pathToFileURL} = require('url');
|
||||
const currentlyUnhandled = require('currently-unhandled')();
|
||||
|
||||
require('./ensure-forked'); // eslint-disable-line import/no-unassigned-import
|
||||
|
||||
const ipc = require('./ipc');
|
||||
|
||||
const supportsESM = async () => {
|
||||
try {
|
||||
await import('data:text/javascript,'); // eslint-disable-line node/no-unsupported-features/es-syntax
|
||||
return true;
|
||||
} catch {}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
ipc.send({type: 'ready-for-options'});
|
||||
ipc.options.then(async options => {
|
||||
require('./options').set(options);
|
||||
require('../chalk').set(options.chalkOptions);
|
||||
|
||||
if (options.chalkOptions.level > 0) {
|
||||
const {stdout, stderr} = process;
|
||||
global.console = Object.assign(global.console, new console.Console({stdout, stderr, colorMode: true}));
|
||||
}
|
||||
|
||||
const nowAndTimers = require('../now-and-timers');
|
||||
const providerManager = require('../provider-manager');
|
||||
const Runner = require('../runner');
|
||||
const serializeError = require('../serialize-error');
|
||||
const dependencyTracking = require('./dependency-tracker');
|
||||
const lineNumberSelection = require('./line-numbers');
|
||||
|
||||
async function exit(code) {
|
||||
if (!process.exitCode) {
|
||||
process.exitCode = code;
|
||||
}
|
||||
|
||||
dependencyTracking.flush();
|
||||
await ipc.flush();
|
||||
process.exit(); // eslint-disable-line unicorn/no-process-exit
|
||||
}
|
||||
|
||||
// TODO: Initialize providers here, then pass to lineNumberSelection() so they
|
||||
// can be used to parse the test file.
|
||||
let checkSelectedByLineNumbers;
|
||||
try {
|
||||
checkSelectedByLineNumbers = lineNumberSelection({
|
||||
file: options.file,
|
||||
lineNumbers: options.lineNumbers
|
||||
});
|
||||
} catch (error) {
|
||||
ipc.send({type: 'line-number-selection-error', err: serializeError('Line number selection error', false, error, options.file)});
|
||||
checkSelectedByLineNumbers = () => false;
|
||||
}
|
||||
|
||||
const runner = new Runner({
|
||||
checkSelectedByLineNumbers,
|
||||
experiments: options.experiments,
|
||||
failFast: options.failFast,
|
||||
failWithoutAssertions: options.failWithoutAssertions,
|
||||
file: options.file,
|
||||
match: options.match,
|
||||
projectDir: options.projectDir,
|
||||
recordNewSnapshots: options.recordNewSnapshots,
|
||||
runOnlyExclusive: options.runOnlyExclusive,
|
||||
serial: options.serial,
|
||||
snapshotDir: options.snapshotDir,
|
||||
updateSnapshots: options.updateSnapshots
|
||||
});
|
||||
|
||||
ipc.peerFailed.then(() => { // eslint-disable-line promise/prefer-await-to-then
|
||||
runner.interrupt();
|
||||
});
|
||||
|
||||
const attributedRejections = new Set();
|
||||
process.on('unhandledRejection', (reason, promise) => {
|
||||
if (runner.attributeLeakedError(reason)) {
|
||||
attributedRejections.add(promise);
|
||||
}
|
||||
});
|
||||
|
||||
runner.on('dependency', dependencyTracking.track);
|
||||
runner.on('stateChange', state => ipc.send(state));
|
||||
|
||||
runner.on('error', error => {
|
||||
ipc.send({type: 'internal-error', err: serializeError('Internal runner error', false, error, runner.file)});
|
||||
exit(1);
|
||||
});
|
||||
|
||||
runner.on('finish', () => {
|
||||
try {
|
||||
const touchedFiles = runner.saveSnapshotState();
|
||||
if (touchedFiles) {
|
||||
ipc.send({type: 'touched-files', files: touchedFiles});
|
||||
}
|
||||
} catch (error) {
|
||||
ipc.send({type: 'internal-error', err: serializeError('Internal runner error', false, error, runner.file)});
|
||||
exit(1);
|
||||
return;
|
||||
}
|
||||
|
||||
nowAndTimers.setImmediate(() => {
|
||||
currentlyUnhandled()
|
||||
.filter(rejection => !attributedRejections.has(rejection.promise))
|
||||
.forEach(rejection => {
|
||||
ipc.send({type: 'unhandled-rejection', err: serializeError('Unhandled rejection', true, rejection.reason, runner.file)});
|
||||
});
|
||||
|
||||
exit(0);
|
||||
});
|
||||
});
|
||||
|
||||
process.on('uncaughtException', error => {
|
||||
if (runner.attributeLeakedError(error)) {
|
||||
return;
|
||||
}
|
||||
|
||||
ipc.send({type: 'uncaught-exception', err: serializeError('Uncaught exception', true, error, runner.file)});
|
||||
exit(1);
|
||||
});
|
||||
|
||||
let accessedRunner = false;
|
||||
exports.getRunner = () => {
|
||||
accessedRunner = true;
|
||||
return runner;
|
||||
};
|
||||
|
||||
// Store value to prevent required modules from modifying it.
|
||||
const testPath = options.file;
|
||||
|
||||
// Install basic source map support.
|
||||
const sourceMapSupport = require('source-map-support');
|
||||
sourceMapSupport.install({
|
||||
environment: 'node',
|
||||
handleUncaughtExceptions: false
|
||||
});
|
||||
|
||||
const extensionsToLoadAsModules = Object.entries(options.moduleTypes)
|
||||
.filter(([, type]) => type === 'module')
|
||||
.map(([extension]) => extension);
|
||||
|
||||
// Install before processing options.require, so if helpers are added to the
|
||||
// require configuration the *compiled* helper will be loaded.
|
||||
const {projectDir, providerStates = []} = options;
|
||||
const providers = providerStates.map(({type, state}) => {
|
||||
if (type === 'babel') {
|
||||
const provider = providerManager.babel(projectDir).worker({extensionsToLoadAsModules, state});
|
||||
runner.powerAssert = provider.powerAssert;
|
||||
return provider;
|
||||
}
|
||||
|
||||
if (type === 'typescript') {
|
||||
return providerManager.typescript(projectDir).worker({extensionsToLoadAsModules, state});
|
||||
}
|
||||
|
||||
return null;
|
||||
}).filter(provider => provider !== null);
|
||||
|
||||
let requireFn = require;
|
||||
let isESMSupported;
|
||||
const load = async ref => {
|
||||
for (const extension of extensionsToLoadAsModules) {
|
||||
if (ref.endsWith(`.${extension}`)) {
|
||||
if (typeof isESMSupported !== 'boolean') {
|
||||
// Lazily determine support since this prints an experimental warning.
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
isESMSupported = await supportsESM();
|
||||
}
|
||||
|
||||
if (isESMSupported) {
|
||||
return import(pathToFileURL(ref)); // eslint-disable-line node/no-unsupported-features/es-syntax
|
||||
}
|
||||
|
||||
ipc.send({type: 'internal-error', err: serializeError('Internal runner error', false, new Error('ECMAScript Modules are not supported in this Node.js version.'))});
|
||||
exit(1);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
for (const provider of providers) {
|
||||
if (provider.canLoad(ref)) {
|
||||
return provider.load(ref, {requireFn});
|
||||
}
|
||||
}
|
||||
|
||||
return requireFn(ref);
|
||||
};
|
||||
|
||||
try {
|
||||
for await (const ref of (options.require || [])) {
|
||||
const mod = await load(ref);
|
||||
|
||||
try {
|
||||
if (Reflect.has(mod, Symbol.for('esm:package'))) {
|
||||
requireFn = mod(module);
|
||||
}
|
||||
} catch (_) {}
|
||||
}
|
||||
|
||||
// Install dependency tracker after the require configuration has been evaluated
|
||||
// to make sure we also track dependencies with custom require hooks
|
||||
dependencyTracking.install(testPath);
|
||||
|
||||
if (options.debug) {
|
||||
require('inspector').open(options.debug.port, options.debug.host, true); // eslint-disable-line node/no-unsupported-features/node-builtins
|
||||
if (options.debug.break) {
|
||||
debugger; // eslint-disable-line no-debugger
|
||||
}
|
||||
}
|
||||
|
||||
await load(testPath);
|
||||
|
||||
if (accessedRunner) {
|
||||
// Unreference the IPC channel if the test file required AVA. This stops it
|
||||
// from keeping the event loop busy, which means the `beforeExit` event can be
|
||||
// used to detect when tests stall.
|
||||
ipc.unref();
|
||||
} else {
|
||||
ipc.send({type: 'missing-ava-import'});
|
||||
exit(1);
|
||||
}
|
||||
} catch (error) {
|
||||
ipc.send({type: 'uncaught-exception', err: serializeError('Uncaught exception', true, error, runner.file)});
|
||||
exit(1);
|
||||
}
|
||||
}).catch(error => {
|
||||
// There shouldn't be any errors, but if there are we may not have managed
|
||||
// to bootstrap enough code to serialize them. Re-throw and let the process
|
||||
// crash.
|
||||
setImmediate(() => {
|
||||
throw error;
|
||||
});
|
||||
});
|
||||
Loading…
Add table
Add a link
Reference in a new issue