Upgrade Ava to v4
This commit is contained in:
parent
9a40cc5274
commit
ce89f1b611
1153 changed files with 27264 additions and 95308 deletions
157
node_modules/ava/lib/api.js
generated
vendored
157
node_modules/ava/lib/api.js
generated
vendored
|
|
@ -1,23 +1,25 @@
|
|||
'use strict';
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const commonPathPrefix = require('common-path-prefix');
|
||||
const resolveCwd = require('resolve-cwd');
|
||||
const debounce = require('lodash/debounce');
|
||||
const arrify = require('arrify');
|
||||
const ms = require('ms');
|
||||
const chunkd = require('chunkd');
|
||||
const Emittery = require('emittery');
|
||||
const pMap = require('p-map');
|
||||
const tempDir = require('temp-dir');
|
||||
const globs = require('./globs');
|
||||
const isCi = require('./is-ci');
|
||||
const RunStatus = require('./run-status');
|
||||
const fork = require('./fork');
|
||||
const serializeError = require('./serialize-error');
|
||||
const {getApplicableLineNumbers} = require('./line-numbers');
|
||||
const sharedWorkers = require('./plugin-support/shared-workers');
|
||||
import fs from 'node:fs';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import process from 'node:process';
|
||||
|
||||
import arrify from 'arrify';
|
||||
import chunkd from 'chunkd';
|
||||
import commonPathPrefix from 'common-path-prefix';
|
||||
import Emittery from 'emittery';
|
||||
import ms from 'ms';
|
||||
import pMap from 'p-map';
|
||||
import resolveCwd from 'resolve-cwd';
|
||||
import tempDir from 'temp-dir';
|
||||
|
||||
import fork from './fork.js';
|
||||
import * as globs from './globs.js';
|
||||
import isCi from './is-ci.js';
|
||||
import {getApplicableLineNumbers} from './line-numbers.js';
|
||||
import {observeWorkerProcess} from './plugin-support/shared-workers.js';
|
||||
import RunStatus from './run-status.js';
|
||||
import scheduler from './scheduler.js';
|
||||
import serializeError from './serialize-error.js';
|
||||
|
||||
function resolveModules(modules) {
|
||||
return arrify(modules).map(name => {
|
||||
|
|
@ -40,7 +42,40 @@ function getFilePathPrefix(files) {
|
|||
return commonPathPrefix(files);
|
||||
}
|
||||
|
||||
class Api extends Emittery {
|
||||
class TimeoutTrigger {
|
||||
constructor(fn, waitMs = 0) {
|
||||
this.fn = fn.bind(null);
|
||||
this.ignoreUntil = 0;
|
||||
this.waitMs = waitMs;
|
||||
this.timer = undefined;
|
||||
}
|
||||
|
||||
debounce() {
|
||||
if (this.timer === undefined) {
|
||||
this.timer = setTimeout(() => this.trigger(), this.waitMs);
|
||||
} else {
|
||||
this.timer.refresh();
|
||||
}
|
||||
}
|
||||
|
||||
discard() {
|
||||
// N.B. this.timer is not cleared so if debounce() is called after it will
|
||||
// not run again.
|
||||
clearTimeout(this.timer);
|
||||
}
|
||||
|
||||
ignoreFor(periodMs) {
|
||||
this.ignoreUntil = Math.max(this.ignoreUntil, Date.now() + periodMs);
|
||||
}
|
||||
|
||||
trigger() {
|
||||
if (Date.now() >= this.ignoreUntil) {
|
||||
this.fn();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default class Api extends Emittery {
|
||||
constructor(options) {
|
||||
super();
|
||||
|
||||
|
|
@ -55,7 +90,7 @@ class Api extends Emittery {
|
|||
}
|
||||
}
|
||||
|
||||
async run({files: selectedFiles = [], filter = [], runtimeOptions = {}} = {}) {
|
||||
async run({files: selectedFiles = [], filter = [], runtimeOptions = {}} = {}) { // eslint-disable-line complexity
|
||||
let setupOrGlobError;
|
||||
|
||||
const apiOptions = this.options;
|
||||
|
|
@ -70,11 +105,11 @@ class Api extends Emittery {
|
|||
let bailed = false;
|
||||
const pendingWorkers = new Set();
|
||||
const timedOutWorkerFiles = new Set();
|
||||
let restartTimer;
|
||||
let timeoutTrigger;
|
||||
if (apiOptions.timeout && !apiOptions.debug) {
|
||||
const timeout = ms(apiOptions.timeout);
|
||||
|
||||
restartTimer = debounce(() => {
|
||||
timeoutTrigger = new TimeoutTrigger(() => {
|
||||
// If failFast is active, prevent new test files from running after
|
||||
// the current ones are exited.
|
||||
if (failFast) {
|
||||
|
|
@ -89,7 +124,7 @@ class Api extends Emittery {
|
|||
}
|
||||
}, timeout);
|
||||
} else {
|
||||
restartTimer = Object.assign(() => {}, {cancel() {}});
|
||||
timeoutTrigger = new TimeoutTrigger(() => {});
|
||||
}
|
||||
|
||||
this._interruptHandler = () => {
|
||||
|
|
@ -102,7 +137,7 @@ class Api extends Emittery {
|
|||
bailed = true;
|
||||
|
||||
// Make sure we don't run the timeout handler
|
||||
restartTimer.cancel();
|
||||
timeoutTrigger.discard();
|
||||
|
||||
runStatus.emitStateChange({type: 'interrupt'});
|
||||
|
||||
|
|
@ -111,6 +146,8 @@ class Api extends Emittery {
|
|||
}
|
||||
};
|
||||
|
||||
const {providers = []} = this.options;
|
||||
|
||||
let testFiles;
|
||||
try {
|
||||
testFiles = await globs.findTests({cwd: this.options.projectDir, ...apiOptions.globs});
|
||||
|
|
@ -118,7 +155,8 @@ class Api extends Emittery {
|
|||
selectedFiles = filter.length === 0 ? testFiles : globs.applyTestFileFilter({
|
||||
cwd: this.options.projectDir,
|
||||
filter: filter.map(({pattern}) => pattern),
|
||||
testFiles
|
||||
providers,
|
||||
testFiles,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
|
|
@ -126,6 +164,13 @@ class Api extends Emittery {
|
|||
setupOrGlobError = error;
|
||||
}
|
||||
|
||||
const selectionInsights = {
|
||||
filter,
|
||||
ignoredFilterPatternFiles: selectedFiles.ignoredFilterPatternFiles || [],
|
||||
testFileCount: testFiles.length,
|
||||
selectionCount: selectedFiles.length,
|
||||
};
|
||||
|
||||
try {
|
||||
if (this.options.parallelRuns) {
|
||||
const {currentIndex, totalRuns} = this.options.parallelRuns;
|
||||
|
|
@ -137,11 +182,13 @@ class Api extends Emittery {
|
|||
|
||||
const currentFileCount = selectedFiles.length;
|
||||
|
||||
runStatus = new RunStatus(fileCount, {currentFileCount, currentIndex, totalRuns});
|
||||
runStatus = new RunStatus(fileCount, {currentFileCount, currentIndex, totalRuns}, selectionInsights);
|
||||
} else {
|
||||
runStatus = new RunStatus(selectedFiles.length, null);
|
||||
runStatus = new RunStatus(selectedFiles.length, null, selectionInsights);
|
||||
}
|
||||
|
||||
selectedFiles = scheduler.failingTestsFirst(selectedFiles, this._getLocalCacheDir(), this.options.cacheEnabled);
|
||||
|
||||
const debugWithoutSpecificFile = Boolean(this.options.debug) && !this.options.debug.active && selectedFiles.length !== 1;
|
||||
|
||||
await this.emit('run', {
|
||||
|
|
@ -155,7 +202,7 @@ class Api extends Emittery {
|
|||
previousFailures: runtimeOptions.previousFailures || 0,
|
||||
runOnlyExclusive: runtimeOptions.runOnlyExclusive === true,
|
||||
runVector: runtimeOptions.runVector || 0,
|
||||
status: runStatus
|
||||
status: runStatus,
|
||||
});
|
||||
|
||||
if (setupOrGlobError) {
|
||||
|
|
@ -169,9 +216,9 @@ class Api extends Emittery {
|
|||
|
||||
runStatus.on('stateChange', record => {
|
||||
if (record.testFile && !timedOutWorkerFiles.has(record.testFile)) {
|
||||
// Restart the timer whenever there is activity from workers that
|
||||
// Debounce the timer whenever there is activity from workers that
|
||||
// haven't already timed out.
|
||||
restartTimer();
|
||||
timeoutTrigger.debounce();
|
||||
}
|
||||
|
||||
if (failFast && (record.type === 'hook-failed' || record.type === 'test-failed' || record.type === 'worker-failed')) {
|
||||
|
|
@ -185,14 +232,16 @@ class Api extends Emittery {
|
|||
}
|
||||
});
|
||||
|
||||
const {providers = []} = this.options;
|
||||
const providerStates = (await Promise.all(providers.map(async ({type, main}) => {
|
||||
const providerStates = [];
|
||||
await Promise.all(providers.map(async ({type, main}) => {
|
||||
const state = await main.compile({cacheDir: this._createCacheDir(), files: testFiles});
|
||||
return state === null ? null : {type, state};
|
||||
}))).filter(state => state !== null);
|
||||
if (state !== null) {
|
||||
providerStates.push({type, state});
|
||||
}
|
||||
}));
|
||||
|
||||
// Resolve the correct concurrency value.
|
||||
let concurrency = Math.min(os.cpus().length, isCi ? 2 : Infinity);
|
||||
let concurrency = Math.min(os.cpus().length, isCi ? 2 : Number.POSITIVE_INFINITY);
|
||||
if (apiOptions.concurrency > 0) {
|
||||
concurrency = apiOptions.concurrency;
|
||||
}
|
||||
|
|
@ -212,13 +261,15 @@ class Api extends Emittery {
|
|||
}
|
||||
|
||||
const lineNumbers = getApplicableLineNumbers(globs.normalizeFileForMatching(apiOptions.projectDir, file), filter);
|
||||
// Removing `providers` field because they cannot be transfered to the worker threads.
|
||||
const {providers, ...forkOptions} = apiOptions;
|
||||
const options = {
|
||||
...apiOptions,
|
||||
...forkOptions,
|
||||
providerStates,
|
||||
lineNumbers,
|
||||
recordNewSnapshots: !isCi,
|
||||
// If we're looking for matches, run every single test process in exclusive-only mode
|
||||
runOnlyExclusive: apiOptions.match.length > 0 || runtimeOptions.runOnlyExclusive === true
|
||||
runOnlyExclusive: apiOptions.match.length > 0 || runtimeOptions.runOnlyExclusive === true,
|
||||
};
|
||||
|
||||
if (runtimeOptions.updateSnapshots) {
|
||||
|
|
@ -227,42 +278,52 @@ class Api extends Emittery {
|
|||
}
|
||||
|
||||
const worker = fork(file, options, apiOptions.nodeArguments);
|
||||
worker.onStateChange(data => {
|
||||
if (data.type === 'test-timeout-configured' && !apiOptions.debug) {
|
||||
timeoutTrigger.ignoreFor(data.period);
|
||||
}
|
||||
});
|
||||
runStatus.observeWorker(worker, file, {selectingLines: lineNumbers.length > 0});
|
||||
deregisteredSharedWorkers.push(sharedWorkers.observeWorkerProcess(worker, runStatus));
|
||||
deregisteredSharedWorkers.push(observeWorkerProcess(worker, runStatus));
|
||||
|
||||
pendingWorkers.add(worker);
|
||||
worker.promise.then(() => {
|
||||
pendingWorkers.delete(worker);
|
||||
});
|
||||
restartTimer();
|
||||
timeoutTrigger.debounce();
|
||||
|
||||
await worker.promise;
|
||||
}, {concurrency, stopOnError: false});
|
||||
|
||||
// Allow shared workers to clean up before the run ends.
|
||||
await Promise.all(deregisteredSharedWorkers);
|
||||
scheduler.storeFailedTestFiles(runStatus, this.options.cacheEnabled === false ? null : this._createCacheDir());
|
||||
} catch (error) {
|
||||
if (error && error.name === 'AggregateError') {
|
||||
for (const err of error) {
|
||||
runStatus.emitStateChange({type: 'internal-error', err: serializeError('Internal error', false, err)});
|
||||
for (const error_ of error.errors) {
|
||||
runStatus.emitStateChange({type: 'internal-error', err: serializeError('Internal error', false, error_)});
|
||||
}
|
||||
} else {
|
||||
runStatus.emitStateChange({type: 'internal-error', err: serializeError('Internal error', false, error)});
|
||||
}
|
||||
}
|
||||
|
||||
restartTimer.cancel();
|
||||
timeoutTrigger.discard();
|
||||
return runStatus;
|
||||
}
|
||||
|
||||
_getLocalCacheDir() {
|
||||
return path.join(this.options.projectDir, 'node_modules', '.cache', 'ava');
|
||||
}
|
||||
|
||||
_createCacheDir() {
|
||||
if (this._cacheDir) {
|
||||
return this._cacheDir;
|
||||
}
|
||||
|
||||
const cacheDir = this.options.cacheEnabled === false ?
|
||||
fs.mkdtempSync(`${tempDir}${path.sep}`) :
|
||||
path.join(this.options.projectDir, 'node_modules', '.cache', 'ava');
|
||||
const cacheDir = this.options.cacheEnabled === false
|
||||
? fs.mkdtempSync(`${tempDir}${path.sep}`)
|
||||
: this._getLocalCacheDir();
|
||||
|
||||
// Ensure cacheDir exists
|
||||
fs.mkdirSync(cacheDir, {recursive: true});
|
||||
|
|
@ -272,5 +333,3 @@ class Api extends Emittery {
|
|||
return cacheDir;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Api;
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue