Upgrade Ava to v4

This commit is contained in:
Henry Mercer 2022-02-01 18:01:11 +00:00
parent 9a40cc5274
commit ce89f1b611
1153 changed files with 27264 additions and 95308 deletions

11
node_modules/ava/cli.js generated vendored
View file

@ -1,11 +0,0 @@
#!/usr/bin/env node
'use strict';
const debug = require('debug')('ava');
const importLocal = require('import-local');
// Prefer the local installation of AVA
if (importLocal(__filename)) {
debug('Using local install of AVA');
} else {
require('./lib/cli').run();
}

4
node_modules/ava/entrypoints/cli.mjs generated vendored Executable file
View file

@ -0,0 +1,4 @@
#!/usr/bin/env node
import run from '../lib/cli.js';
run();

109
node_modules/ava/entrypoints/eslint-plugin-helper.cjs generated vendored Normal file
View file

@ -0,0 +1,109 @@
'use strict';
const path = require('path');
const url = require('url');
const v8 = require('v8');
const {Worker} = require('worker_threads');
const {
classify,
hasExtension,
isHelperish,
matches,
normalizeFileForMatching,
normalizePatterns,
} = require('../lib/glob-helpers.cjs');
const MAX_DATA_LENGTH_EXCLUSIVE = 100 * 1024; // Allocate 100 KiB to exchange globs.
let data;
let sync;
let worker;
const resolveGlobsSync = (projectDir, overrideExtensions, overrideFiles) => {
if (worker === undefined) {
const dataBuffer = new SharedArrayBuffer(MAX_DATA_LENGTH_EXCLUSIVE);
data = new Uint8Array(dataBuffer);
const syncBuffer = new SharedArrayBuffer(4);
sync = new Int32Array(syncBuffer);
const filename = path.join(__dirname, '../lib/eslint-plugin-helper-worker.js');
worker = new Worker(url.pathToFileURL(filename), {
workerData: {
dataBuffer,
syncBuffer,
firstMessage: {projectDir, overrideExtensions, overrideFiles},
},
});
worker.unref();
} else {
worker.postMessage({projectDir, overrideExtensions, overrideFiles});
}
const synchronize = Atomics.wait(sync, 0, 0, 10_000);
if (synchronize === 'timed-out') {
throw new Error('Timed out resolving AVA configuration');
}
const byteLength = Atomics.exchange(sync, 0, 0);
if (byteLength === MAX_DATA_LENGTH_EXCLUSIVE) {
throw new Error('Globs are over 100 KiB and cannot be resolved');
}
const globsOrError = v8.deserialize(data.slice(0, byteLength));
if (globsOrError instanceof Error) {
throw globsOrError;
}
return globsOrError;
};
const helperCache = new Map();
function load(projectDir, overrides) {
const cacheKey = `${JSON.stringify(overrides)}\n${projectDir}`;
if (helperCache.has(cacheKey)) {
return helperCache.get(cacheKey);
}
let helperPatterns = [];
if (overrides && overrides.helpers !== undefined) {
if (!Array.isArray(overrides.helpers) || overrides.helpers.length === 0) {
throw new Error('The helpers override must be an array containing glob patterns.');
}
helperPatterns = normalizePatterns(overrides.helpers);
}
const globs = resolveGlobsSync(projectDir, overrides && overrides.extensions, overrides && overrides.files);
const classifyForESLint = file => {
const {isTest} = classify(file, globs);
let isHelper = false;
if (!isTest && hasExtension(globs.extensions, file)) {
file = normalizeFileForMatching(projectDir, file);
isHelper = isHelperish(file) || (helperPatterns.length > 0 && matches(file, helperPatterns));
}
return {isHelper, isTest};
};
const helper = Object.freeze({
classifyFile: classifyForESLint,
classifyImport: importPath => {
if (hasExtension(globs.extensions, importPath)) {
// The importPath has one of the test file extensions: we can classify
// it directly.
return classifyForESLint(importPath);
}
// Add the first extension. If multiple extensions are available, assume
// patterns are not biased to any particular extension.
return classifyForESLint(`${importPath}.${globs.extensions[0]}`);
},
});
helperCache.set(cacheKey, helper);
return helper;
}
exports.load = load;

2
node_modules/ava/entrypoints/main.cjs generated vendored Normal file
View file

@ -0,0 +1,2 @@
'use strict';
module.exports = require('../lib/worker/main.cjs');

1
node_modules/ava/entrypoints/main.mjs generated vendored Normal file
View file

@ -0,0 +1 @@
export {default} from '../lib/worker/main.cjs';

2
node_modules/ava/entrypoints/plugin.cjs generated vendored Normal file
View file

@ -0,0 +1,2 @@
'use strict';
module.exports = require('../lib/worker/plugin.cjs');

4
node_modules/ava/entrypoints/plugin.mjs generated vendored Normal file
View file

@ -0,0 +1,4 @@
import * as plugin from '../lib/worker/plugin.cjs';
const {registerSharedWorker} = plugin;
export {registerSharedWorker};

View file

@ -1,201 +0,0 @@
'use strict';
let isMainThread = true;
let supportsWorkers = false;
try {
({isMainThread} = require('worker_threads'));
supportsWorkers = true;
} catch {}
const {classify, hasExtension, isHelperish, matches, normalizeFileForMatching, normalizeGlobs, normalizePatterns} = require('./lib/globs');
let resolveGlobs;
let resolveGlobsSync;
if (!supportsWorkers || !isMainThread) {
const normalizeExtensions = require('./lib/extensions');
const {loadConfig, loadConfigSync} = require('./lib/load-config');
const providerManager = require('./lib/provider-manager');
const configCache = new Map();
const collectProviders = ({conf, projectDir}) => {
const providers = [];
if (Reflect.has(conf, 'babel')) {
const {level, main} = providerManager.babel(projectDir);
providers.push({
level,
main: main({config: conf.babel}),
type: 'babel'
});
}
if (Reflect.has(conf, 'typescript')) {
const {level, main} = providerManager.typescript(projectDir);
providers.push({
level,
main: main({config: conf.typescript}),
type: 'typescript'
});
}
return providers;
};
const buildGlobs = ({conf, providers, projectDir, overrideExtensions, overrideFiles}) => {
const extensions = overrideExtensions ?
normalizeExtensions(overrideExtensions) :
normalizeExtensions(conf.extensions, providers);
return {
cwd: projectDir,
...normalizeGlobs({
extensions,
files: overrideFiles ? overrideFiles : conf.files,
providers
})
};
};
resolveGlobsSync = (projectDir, overrideExtensions, overrideFiles) => {
if (!configCache.has(projectDir)) {
const conf = loadConfigSync({resolveFrom: projectDir});
const providers = collectProviders({conf, projectDir});
configCache.set(projectDir, {conf, providers});
}
const {conf, providers} = configCache.get(projectDir);
return buildGlobs({conf, providers, projectDir, overrideExtensions, overrideFiles});
};
resolveGlobs = async (projectDir, overrideExtensions, overrideFiles) => {
if (!configCache.has(projectDir)) {
configCache.set(projectDir, loadConfig({resolveFrom: projectDir}).then(conf => { // eslint-disable-line promise/prefer-await-to-then
const providers = collectProviders({conf, projectDir});
return {conf, providers};
}));
}
const {conf, providers} = await configCache.get(projectDir);
return buildGlobs({conf, providers, projectDir, overrideExtensions, overrideFiles});
};
}
if (supportsWorkers) {
const v8 = require('v8');
const MAX_DATA_LENGTH_EXCLUSIVE = 100 * 1024; // Allocate 100 KiB to exchange globs.
if (isMainThread) {
const {Worker} = require('worker_threads');
let data;
let sync;
let worker;
resolveGlobsSync = (projectDir, overrideExtensions, overrideFiles) => {
if (worker === undefined) {
const dataBuffer = new SharedArrayBuffer(MAX_DATA_LENGTH_EXCLUSIVE);
data = new Uint8Array(dataBuffer);
const syncBuffer = new SharedArrayBuffer(4);
sync = new Int32Array(syncBuffer);
worker = new Worker(__filename, {
workerData: {
dataBuffer,
syncBuffer,
firstMessage: {projectDir, overrideExtensions, overrideFiles}
}
});
worker.unref();
} else {
worker.postMessage({projectDir, overrideExtensions, overrideFiles});
}
Atomics.wait(sync, 0, 0);
const byteLength = Atomics.exchange(sync, 0, 0);
if (byteLength === MAX_DATA_LENGTH_EXCLUSIVE) {
throw new Error('Globs are over 100 KiB and cannot be resolved');
}
const globsOrError = v8.deserialize(data.slice(0, byteLength));
if (globsOrError instanceof Error) {
throw globsOrError;
}
return globsOrError;
};
} else {
const {parentPort, workerData} = require('worker_threads');
const data = new Uint8Array(workerData.dataBuffer);
const sync = new Int32Array(workerData.syncBuffer);
const handleMessage = async ({projectDir, overrideExtensions, overrideFiles}) => {
let encoded;
try {
const globs = await resolveGlobs(projectDir, overrideExtensions, overrideFiles);
encoded = v8.serialize(globs);
} catch (error) {
encoded = v8.serialize(error);
}
const byteLength = encoded.length < MAX_DATA_LENGTH_EXCLUSIVE ? encoded.copy(data) : MAX_DATA_LENGTH_EXCLUSIVE;
Atomics.store(sync, 0, byteLength);
Atomics.notify(sync, 0);
};
parentPort.on('message', handleMessage);
handleMessage(workerData.firstMessage);
delete workerData.firstMessage;
}
}
const helperCache = new Map();
function load(projectDir, overrides) {
const cacheKey = `${JSON.stringify(overrides)}\n${projectDir}`;
if (helperCache.has(cacheKey)) {
return helperCache.get(cacheKey);
}
let helperPatterns = [];
if (overrides && overrides.helpers !== undefined) {
if (!Array.isArray(overrides.helpers) || overrides.helpers.length === 0) {
throw new Error('The helpers override must be an array containing glob patterns.');
}
helperPatterns = normalizePatterns(overrides.helpers);
}
const globs = resolveGlobsSync(projectDir, overrides && overrides.extensions, overrides && overrides.files);
const classifyForESLint = file => {
const {isTest} = classify(file, globs);
let isHelper = false;
if (!isTest && hasExtension(globs.extensions, file)) {
file = normalizeFileForMatching(projectDir, file);
isHelper = isHelperish(file) || (helperPatterns.length > 0 && matches(file, helperPatterns));
}
return {isHelper, isTest};
};
const helper = Object.freeze({
classifyFile: classifyForESLint,
classifyImport: importPath => {
if (hasExtension(globs.extensions, importPath)) {
// The importPath has one of the test file extensions: we can classify
// it directly.
return classifyForESLint(importPath);
}
// Add the first extension. If multiple extensions are available, assume
// patterns are not biased to any particular extension.
return classifyForESLint(`${importPath}.${globs.extensions[0]}`);
}
});
helperCache.set(cacheKey, helper);
return helper;
}
exports.load = load;

822
node_modules/ava/index.d.ts generated vendored
View file

@ -1,822 +1,12 @@
export interface Subscribable {
subscribe(observer: {
error(err: any): void;
complete(): void;
}): void;
}
import type {TestFn} from './types/test-fn';
export type Constructor = (new (...args: any[]) => any);
/** Specify one or more expectations the thrown error must satisfy. */
export type ThrowsExpectation = {
/** The thrown error must have a code that equals the given string or number. */
code?: string | number;
/** The thrown error must be an instance of this constructor. */
instanceOf?: Constructor;
/** The thrown error must be strictly equal to this value. */
is?: Error;
/** The thrown error must have a message that equals the given string, or matches the regular expression. */
message?: string | RegExp;
/** The thrown error must have a name that equals the given string. */
name?: string;
};
export type CommitDiscardOptions = {
/**
* Whether the logs should be included in those of the parent test.
*/
retainLogs?: boolean;
};
/** Options that can be passed to the `t.snapshot()` assertion. */
export type SnapshotOptions = {
/** If provided and not an empty string, used to select the snapshot to compare the `expected` value against. */
id?: string;
};
export interface Assertions {
/** Assert that `actual` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy). Comes with power-assert. */
assert: AssertAssertion;
/** Assert that `actual` is [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to `expected`. */
deepEqual: DeepEqualAssertion;
/** Assert that `actual` is like `expected`. */
like: LikeAssertion;
/** Fail the test. */
fail: FailAssertion;
/** Assert that `actual` is strictly false. */
false: FalseAssertion;
/** Assert that `actual` is [falsy](https://developer.mozilla.org/en-US/docs/Glossary/Falsy). */
falsy: FalsyAssertion;
/**
* Assert that `actual` is [the same
* value](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is) as `expected`.
*/
is: IsAssertion;
/**
* Assert that `actual` is not [the same
* value](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is) as `expected`.
*/
not: NotAssertion;
/** Assert that `actual` is not [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to `expected`. */
notDeepEqual: NotDeepEqualAssertion;
/** Assert that `string` does not match the regular expression. */
notRegex: NotRegexAssertion;
/** Assert that the function does not throw. */
notThrows: NotThrowsAssertion;
/** Assert that the async function does not throw, or that the promise does not reject. Must be awaited. */
notThrowsAsync: NotThrowsAsyncAssertion;
/** Count a passing assertion. */
pass: PassAssertion;
/** Assert that `string` matches the regular expression. */
regex: RegexAssertion;
/**
* Assert that `expected` is [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to a
* previously recorded [snapshot](https://github.com/concordancejs/concordance#serialization-details), or if
* necessary record a new snapshot.
*/
snapshot: SnapshotAssertion;
/**
* Assert that the function throws [an error](https://www.npmjs.com/package/is-error). If so, returns the error value.
*/
throws: ThrowsAssertion;
/**
* Assert that the async function throws [an error](https://www.npmjs.com/package/is-error), or the promise rejects
* with one. If so, returns a promise for the error value, which must be awaited.
*/
throwsAsync: ThrowsAsyncAssertion;
/** Assert that `actual` is strictly true. */
true: TrueAssertion;
/** Assert that `actual` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy). */
truthy: TruthyAssertion;
}
export interface AssertAssertion {
/** Assert that `actual` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy). Comes with power-assert. */
(actual: any, message?: string): void;
/** Skip this assertion. */
skip(actual: any, message?: string): void;
}
export interface DeepEqualAssertion {
/** Assert that `actual` is [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to `expected`. */
<ValueType = any>(actual: ValueType, expected: ValueType, message?: string): void;
/** Skip this assertion. */
skip(actual: any, expected: any, message?: string): void;
}
export interface LikeAssertion {
/** Assert that `value` is like `selector`. */
(value: any, selector: Record<string, any>, message?: string): void;
/** Skip this assertion. */
skip(value: any, selector: any, message?: string): void;
}
export interface FailAssertion {
/** Fail the test. */
(message?: string): void;
/** Skip this assertion. */
skip(message?: string): void;
}
export interface FalseAssertion {
/** Assert that `actual` is strictly false. */
(actual: any, message?: string): void;
/** Skip this assertion. */
skip(actual: any, message?: string): void;
}
export interface FalsyAssertion {
/** Assert that `actual` is [falsy](https://developer.mozilla.org/en-US/docs/Glossary/Falsy). */
(actual: any, message?: string): void;
/** Skip this assertion. */
skip(actual: any, message?: string): void;
}
export interface IsAssertion {
/**
* Assert that `actual` is [the same
* value](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is) as `expected`.
*/
<ValueType = any>(actual: ValueType, expected: ValueType, message?: string): void;
/** Skip this assertion. */
skip(actual: any, expected: any, message?: string): void;
}
export interface NotAssertion {
/**
* Assert that `actual` is not [the same
* value](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is) as `expected`.
*/
<ValueType = any>(actual: ValueType, expected: ValueType, message?: string): void;
/** Skip this assertion. */
skip(actual: any, expected: any, message?: string): void;
}
export interface NotDeepEqualAssertion {
/** Assert that `actual` is not [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to `expected`. */
<ValueType = any>(actual: ValueType, expected: ValueType, message?: string): void;
/** Skip this assertion. */
skip(actual: any, expected: any, message?: string): void;
}
export interface NotRegexAssertion {
/** Assert that `string` does not match the regular expression. */
(string: string, regex: RegExp, message?: string): void;
/** Skip this assertion. */
skip(string: string, regex: RegExp, message?: string): void;
}
export interface NotThrowsAssertion {
/** Assert that the function does not throw. */
(fn: () => any, message?: string): void;
/** Skip this assertion. */
skip(fn: () => any, message?: string): void;
}
export interface NotThrowsAsyncAssertion {
/** Assert that the async function does not throw. You must await the result. */
(fn: () => PromiseLike<any>, message?: string): Promise<void>;
/** Assert that the promise does not reject. You must await the result. */
(promise: PromiseLike<any>, message?: string): Promise<void>;
/** Skip this assertion. */
skip(nonThrower: any, message?: string): void;
}
export interface PassAssertion {
/** Count a passing assertion. */
(message?: string): void;
/** Skip this assertion. */
skip(message?: string): void;
}
export interface RegexAssertion {
/** Assert that `string` matches the regular expression. */
(string: string, regex: RegExp, message?: string): void;
/** Skip this assertion. */
skip(string: string, regex: RegExp, message?: string): void;
}
export interface SnapshotAssertion {
/**
* Assert that `expected` is [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to a
* previously recorded [snapshot](https://github.com/concordancejs/concordance#serialization-details), or if
* necessary record a new snapshot.
*/
(expected: any, message?: string): void;
/**
* Assert that `expected` is [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to a
* previously recorded [snapshot](https://github.com/concordancejs/concordance#serialization-details) (selected
* through `options.id` if provided), or if necessary record a new snapshot.
*/
(expected: any, options: SnapshotOptions, message?: string): void;
/** Skip this assertion. */
skip(expected: any, message?: string): void;
/** Skip this assertion. */
skip(expected: any, options: SnapshotOptions, message?: string): void;
}
export interface ThrowsAssertion {
/**
* Assert that the function throws [an error](https://www.npmjs.com/package/is-error). If so, returns the error value.
* The error must satisfy all expectations.
*/
<ThrownError extends Error>(fn: () => any, expectations?: ThrowsExpectation | null, message?: string): ThrownError;
/** Skip this assertion. */
skip(fn: () => any, expectations?: any, message?: string): void;
}
export interface ThrowsAsyncAssertion {
/**
* Assert that the async function throws [an error](https://www.npmjs.com/package/is-error). If so, returns the error
* value. You must await the result.
*/
<ThrownError extends Error>(fn: () => PromiseLike<any>, expectations?: null, message?: string): Promise<ThrownError>;
/**
* Assert that the async function throws [an error](https://www.npmjs.com/package/is-error). If so, returns the error
* value. You must await the result. The error must satisfy all expectations.
*/
<ThrownError extends Error>(fn: () => PromiseLike<any>, expectations: ThrowsExpectation, message?: string): Promise<ThrownError>;
/**
* Assert that the promise rejects with [an error](https://www.npmjs.com/package/is-error). If so, returns the
* rejection reason. You must await the result.
*/
<ThrownError extends Error>(promise: PromiseLike<any>, expectations?: null, message?: string): Promise<ThrownError>;
/**
* Assert that the promise rejects with [an error](https://www.npmjs.com/package/is-error). If so, returns the
* rejection reason. You must await the result. The error must satisfy all expectations.
*/
<ThrownError extends Error>(promise: PromiseLike<any>, expectations: ThrowsExpectation, message?: string): Promise<ThrownError>;
/** Skip this assertion. */
skip(thrower: any, expectations?: any, message?: string): void;
}
export interface TrueAssertion {
/** Assert that `actual` is strictly true. */
(actual: any, message?: string): void;
/** Skip this assertion. */
skip(actual: any, message?: string): void;
}
export interface TruthyAssertion {
/** Assert that `actual` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy). */
(actual: any, message?: string): void;
/** Skip this assertion. */
skip(actual: any, message?: string): void;
}
/** The `t` value passed to test & hook implementations. */
export interface ExecutionContext<Context = unknown> extends Assertions {
/** Test context, shared with hooks. */
context: Context;
/** Title of the test or hook. */
readonly title: string;
/** Whether the test has passed. Only accurate in afterEach hooks. */
readonly passed: boolean;
log: LogFn;
plan: PlanFn;
teardown: TeardownFn;
timeout: TimeoutFn;
try: TryFn<Context>;
}
export interface LogFn {
/** Log one or more values. */
(...values: any[]): void;
/** Skip logging. */
skip(...values: any[]): void;
}
export interface PlanFn {
/**
* Plan how many assertion there are in the test. The test will fail if the actual assertion count doesn't match the
* number of planned assertions. See [assertion planning](https://github.com/avajs/ava#assertion-planning).
*/
(count: number): void;
/** Don't plan assertions. */
skip(count: number): void;
}
export interface TimeoutFn {
/**
* Set a timeout for the test, in milliseconds. The test will fail if the timeout is exceeded.
* The timeout is reset each time an assertion is made.
*/
(ms: number, message?: string): void;
}
export interface TeardownFn {
/** Declare a function to be run after the test has ended. */
(fn: () => void): void;
}
export interface TryFn<Context = unknown> {
/**
* Attempt to run some assertions. The result must be explicitly committed or discarded or else
* the test will fail. A macro may be provided. The title may help distinguish attempts from
* one another.
*/
<Args extends any[]>(title: string, fn: EitherMacro<Args, Context>, ...args: Args): Promise<TryResult>;
/**
* Attempt to run some assertions. The result must be explicitly committed or discarded or else
* the test will fail. A macro may be provided. The title may help distinguish attempts from
* one another.
*/
<Args extends any[]>(title: string, fn: [EitherMacro<Args, Context>, ...Array<EitherMacro<Args, Context>>], ...args: Args): Promise<TryResult[]>;
/**
* Attempt to run some assertions. The result must be explicitly committed or discarded or else
* the test will fail. A macro may be provided.
*/
<Args extends any[]>(fn: EitherMacro<Args, Context>, ...args: Args): Promise<TryResult>;
/**
* Attempt to run some assertions. The result must be explicitly committed or discarded or else
* the test will fail. A macro may be provided.
*/
<Args extends any[]>(fn: [EitherMacro<Args, Context>, ...Array<EitherMacro<Args, Context>>], ...args: Args): Promise<TryResult[]>;
}
export interface AssertionError extends Error {}
export interface TryResult {
/**
* Title of the attempt, helping you tell attempts aparts.
*/
title: string;
/**
* Indicates whether all assertions passed, or at least one failed.
*/
passed: boolean;
/**
* Errors raised for each failed assertion.
*/
errors: AssertionError[];
/**
* Logs created during the attempt using `t.log()`. Contains formatted values.
*/
logs: string[];
/**
* Commit the attempt. Counts as one assertion for the plan count. If the
* attempt failed, calling this will also cause your test to fail.
*/
commit(options?: CommitDiscardOptions): void;
/**
* Discard the attempt.
*/
discard(options?: CommitDiscardOptions): void;
}
/** The `t` value passed to implementations for tests & hooks declared with the `.cb` modifier. */
export interface CbExecutionContext<Context = unknown> extends ExecutionContext<Context> {
/**
* End the test. If `error` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy) the test or hook
* will fail.
*/
end(error?: any): void;
}
export type ImplementationResult = PromiseLike<void> | Subscribable | void;
export type Implementation<Context = unknown> = (t: ExecutionContext<Context>) => ImplementationResult;
export type CbImplementation<Context = unknown> = (t: CbExecutionContext<Context>) => ImplementationResult;
/** A reusable test or hook implementation. */
export type UntitledMacro<Args extends any[], Context = unknown> = (t: ExecutionContext<Context>, ...args: Args) => ImplementationResult;
/** A reusable test or hook implementation. */
export type Macro<Args extends any[], Context = unknown> = UntitledMacro<Args, Context> & {
/**
* Implement this function to generate a test (or hook) title whenever this macro is used. `providedTitle` contains
* the title provided when the test or hook was declared. Also receives the remaining test arguments.
*/
title?: (providedTitle: string | undefined, ...args: Args) => string;
};
export type EitherMacro<Args extends any[], Context> = Macro<Args, Context> | UntitledMacro<Args, Context>;
/** Alias for a single macro, or an array of macros. */
export type OneOrMoreMacros<Args extends any[], Context> = EitherMacro<Args, Context> | [EitherMacro<Args, Context>, ...Array<EitherMacro<Args, Context>>];
/** A reusable test or hook implementation, for tests & hooks declared with the `.cb` modifier. */
export type UntitledCbMacro<Args extends any[], Context = unknown> = (t: CbExecutionContext<Context>, ...args: Args) => ImplementationResult;
/** A reusable test or hook implementation, for tests & hooks declared with the `.cb` modifier. */
export type CbMacro<Args extends any[], Context = unknown> = UntitledCbMacro<Args, Context> & {
title?: (providedTitle: string | undefined, ...args: Args) => string;
};
export type EitherCbMacro<Args extends any[], Context> = CbMacro<Args, Context> | UntitledCbMacro<Args, Context>;
/** Alias for a single macro, or an array of macros, used for tests & hooks declared with the `.cb` modifier. */
export type OneOrMoreCbMacros<Args extends any[], Context> = EitherCbMacro<Args, Context> | [EitherCbMacro<Args, Context>, ...Array<EitherCbMacro<Args, Context>>];
export interface TestInterface<Context = unknown> {
/** Declare a concurrent test. */
(title: string, implementation: Implementation<Context>): void;
/** Declare a concurrent test that uses one or more macros. Additional arguments are passed to the macro. */
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a concurrent test that uses one or more macros. The macro is responsible for generating a unique test title. */
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a hook that is run once, after all tests have passed. */
after: AfterInterface<Context>;
/** Declare a hook that is run after each passing test. */
afterEach: AfterInterface<Context>;
/** Declare a hook that is run once, before all tests. */
before: BeforeInterface<Context>;
/** Declare a hook that is run before each test. */
beforeEach: BeforeInterface<Context>;
/** Declare a test that must call `t.end()` when it's done. */
cb: CbInterface<Context>;
/** Declare a test that is expected to fail. */
failing: FailingInterface<Context>;
/** Declare tests and hooks that are run serially. */
serial: SerialInterface<Context>;
only: OnlyInterface<Context>;
skip: SkipInterface<Context>;
todo: TodoDeclaration;
meta: MetaInterface;
}
export interface AfterInterface<Context = unknown> {
/** Declare a hook that is run once, after all tests have passed. */
(implementation: Implementation<Context>): void;
/** Declare a hook that is run once, after all tests have passed. */
(title: string, implementation: Implementation<Context>): void;
/** Declare a hook that is run once, after all tests have passed. Additional arguments are passed to the macro. */
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a hook that is run once, after all tests have passed. */
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a hook that is run once, after all tests are done. */
always: AlwaysInterface<Context>;
/** Declare a hook that must call `t.end()` when it's done. */
cb: HookCbInterface<Context>;
skip: HookSkipInterface<Context>;
}
export interface AlwaysInterface<Context = unknown> {
/** Declare a hook that is run once, after all tests are done. */
(implementation: Implementation<Context>): void;
/** Declare a hook that is run once, after all tests are done. */
(title: string, implementation: Implementation<Context>): void;
/** Declare a hook that is run once, after all tests are done. Additional arguments are passed to the macro. */
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a hook that is run once, after all tests are done. */
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a hook that must call `t.end()` when it's done. */
cb: HookCbInterface<Context>;
skip: HookSkipInterface<Context>;
}
export interface BeforeInterface<Context = unknown> {
/** Declare a hook that is run once, before all tests. */
(implementation: Implementation<Context>): void;
/** Declare a hook that is run once, before all tests. */
(title: string, implementation: Implementation<Context>): void;
/** Declare a hook that is run once, before all tests. Additional arguments are passed to the macro. */
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a hook that is run once, before all tests. */
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a hook that must call `t.end()` when it's done. */
cb: HookCbInterface<Context>;
skip: HookSkipInterface<Context>;
}
export interface CbInterface<Context = unknown> {
/** Declare a test that must call `t.end()` when it's done. */
(title: string, implementation: CbImplementation<Context>): void;
/**
* Declare a concurrent test that uses one or more macros. The macros must call `t.end()` when they're done.
* Additional arguments are passed to the macro.
*/
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
/**
* Declare a concurrent test that uses one or more macros. The macros must call `t.end()` when they're done.
* The macro is responsible for generating a unique test title.
*/
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
/** Declare a test that is expected to fail. */
failing: CbFailingInterface<Context>;
only: CbOnlyInterface<Context>;
skip: CbSkipInterface<Context>;
}
export interface CbFailingInterface<Context = unknown> {
/** Declare a test that must call `t.end()` when it's done. The test is expected to fail. */
(title: string, implementation: CbImplementation<Context>): void;
/**
* Declare a test that uses one or more macros. The macros must call `t.end()` when they're done.
* Additional arguments are passed to the macro. The test is expected to fail.
*/
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
/**
* Declare a test that uses one or more macros. The macros must call `t.end()` when they're done.
* The test is expected to fail.
*/
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
only: CbOnlyInterface<Context>;
skip: CbSkipInterface<Context>;
}
export interface CbOnlyInterface<Context = unknown> {
/**
* Declare a test that must call `t.end()` when it's done. Only this test and others declared with `.only()` are run.
*/
(title: string, implementation: CbImplementation<Context>): void;
/**
* Declare a test that uses one or more macros. The macros must call `t.end()` when they're done.
* Additional arguments are passed to the macro. Only this test and others declared with `.only()` are run.
*/
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
/**
* Declare a test that uses one or more macros. The macros must call `t.end()` when they're done.
* Additional arguments are passed to the macro. Only this test and others declared with `.only()` are run.
*/
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
}
export interface CbSkipInterface<Context = unknown> {
/** Skip this test. */
(title: string, implementation: CbImplementation<Context>): void;
/** Skip this test. */
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
/** Skip this test. */
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
}
export interface FailingInterface<Context = unknown> {
/** Declare a concurrent test. The test is expected to fail. */
(title: string, implementation: Implementation<Context>): void;
/**
* Declare a concurrent test that uses one or more macros. Additional arguments are passed to the macro.
* The test is expected to fail.
*/
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/**
* Declare a concurrent test that uses one or more macros. The macro is responsible for generating a unique test title.
* The test is expected to fail.
*/
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
only: OnlyInterface<Context>;
skip: SkipInterface<Context>;
}
export interface HookCbInterface<Context = unknown> {
/** Declare a hook that must call `t.end()` when it's done. */
(implementation: CbImplementation<Context>): void;
/** Declare a hook that must call `t.end()` when it's done. */
(title: string, implementation: CbImplementation<Context>): void;
/**
* Declare a hook that uses one or more macros. The macros must call `t.end()` when they're done.
* Additional arguments are passed to the macro.
*/
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
/**
* Declare a hook that uses one or more macros. The macros must call `t.end()` when they're done.
*/
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
skip: HookCbSkipInterface<Context>;
}
export interface HookCbSkipInterface<Context = unknown> {
/** Skip this hook. */
(implementation: CbImplementation<Context>): void;
/** Skip this hook. */
(title: string, implementation: CbImplementation<Context>): void;
/** Skip this hook. */
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
/** Skip this hook. */
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
}
export interface HookSkipInterface<Context = unknown> {
/** Skip this hook. */
(implementation: Implementation<Context>): void;
/** Skip this hook. */
(title: string, implementation: Implementation<Context>): void;
/** Skip this hook. */
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Skip this hook. */
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
}
export interface OnlyInterface<Context = unknown> {
/** Declare a test. Only this test and others declared with `.only()` are run. */
(title: string, implementation: Implementation<Context>): void;
/**
* Declare a test that uses one or more macros. Additional arguments are passed to the macro.
* Only this test and others declared with `.only()` are run.
*/
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/**
* Declare a test that uses one or more macros. The macro is responsible for generating a unique test title.
* Only this test and others declared with `.only()` are run.
*/
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
}
export interface SerialInterface<Context = unknown> {
/** Declare a serial test. */
(title: string, implementation: Implementation<Context>): void;
/** Declare a serial test that uses one or more macros. Additional arguments are passed to the macro. */
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/**
* Declare a serial test that uses one or more macros. The macro is responsible for generating a unique test title.
*/
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Declare a serial hook that is run once, after all tests have passed. */
after: AfterInterface<Context>;
/** Declare a serial hook that is run after each passing test. */
afterEach: AfterInterface<Context>;
/** Declare a serial hook that is run once, before all tests. */
before: BeforeInterface<Context>;
/** Declare a serial hook that is run before each test. */
beforeEach: BeforeInterface<Context>;
/** Declare a serial test that must call `t.end()` when it's done. */
cb: CbInterface<Context>;
/** Declare a serial test that is expected to fail. */
failing: FailingInterface<Context>;
only: OnlyInterface<Context>;
skip: SkipInterface<Context>;
todo: TodoDeclaration;
}
export interface SkipInterface<Context = unknown> {
/** Skip this test. */
(title: string, implementation: Implementation<Context>): void;
/** Skip this test. */
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
/** Skip this test. */
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
}
export interface TodoDeclaration {
/** Declare a test that should be implemented later. */
(title: string): void;
}
export interface MetaInterface {
/** Path to the test file being executed. */
file: string;
/** Directory where snapshots are stored. */
snapshotDirectory: string;
}
export * from './types/assertions';
export * from './types/try-fn';
export * from './types/test-fn';
export * from './types/subscribable';
/** Call to declare a test, or chain to declare hooks or test modifiers */
declare const test: TestInterface;
declare const test: TestFn;
/** Call to declare a test, or chain to declare hooks or test modifiers */
export default test;
/** Call to declare a hook that is run once, after all tests have passed, or chain to declare modifiers. */
export const after: AfterInterface;
/** Call to declare a hook that is run after each passing test, or chain to declare modifiers. */
export const afterEach: AfterInterface;
/** Call to declare a hook that is run once, before all tests, or chain to declare modifiers. */
export const before: BeforeInterface;
/** Call to declare a hook that is run before each test, or chain to declare modifiers. */
export const beforeEach: BeforeInterface;
/** Call to declare a test that must invoke `t.end()` when it's done, or chain to declare modifiers. */
export const cb: CbInterface;
/** Call to declare a test that is expected to fail, or chain to declare modifiers. */
export const failing: FailingInterface;
/** Call to declare a test that is run exclusively, along with other tests declared with `.only()`. */
export const only: OnlyInterface;
/** Call to declare a serial test, or chain to declare serial hooks or test modifiers. */
export const serial: SerialInterface;
/** Skip this test. */
export const skip: SkipInterface;
/** Declare a test that should be implemented later. */
export const todo: TodoDeclaration;
/** Meta data associated with the current process. */
export const meta: MetaInterface;

8
node_modules/ava/index.js generated vendored
View file

@ -1,8 +0,0 @@
'use strict';
// Ensure the same AVA install is loaded by the test file as by the test worker
if (process.env.AVA_PATH && process.env.AVA_PATH !== __dirname) {
module.exports = require(process.env.AVA_PATH);
} else {
module.exports = require('./lib/worker/main');
}

157
node_modules/ava/lib/api.js generated vendored
View file

@ -1,23 +1,25 @@
'use strict';
const fs = require('fs');
const path = require('path');
const os = require('os');
const commonPathPrefix = require('common-path-prefix');
const resolveCwd = require('resolve-cwd');
const debounce = require('lodash/debounce');
const arrify = require('arrify');
const ms = require('ms');
const chunkd = require('chunkd');
const Emittery = require('emittery');
const pMap = require('p-map');
const tempDir = require('temp-dir');
const globs = require('./globs');
const isCi = require('./is-ci');
const RunStatus = require('./run-status');
const fork = require('./fork');
const serializeError = require('./serialize-error');
const {getApplicableLineNumbers} = require('./line-numbers');
const sharedWorkers = require('./plugin-support/shared-workers');
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import process from 'node:process';
import arrify from 'arrify';
import chunkd from 'chunkd';
import commonPathPrefix from 'common-path-prefix';
import Emittery from 'emittery';
import ms from 'ms';
import pMap from 'p-map';
import resolveCwd from 'resolve-cwd';
import tempDir from 'temp-dir';
import fork from './fork.js';
import * as globs from './globs.js';
import isCi from './is-ci.js';
import {getApplicableLineNumbers} from './line-numbers.js';
import {observeWorkerProcess} from './plugin-support/shared-workers.js';
import RunStatus from './run-status.js';
import scheduler from './scheduler.js';
import serializeError from './serialize-error.js';
function resolveModules(modules) {
return arrify(modules).map(name => {
@ -40,7 +42,40 @@ function getFilePathPrefix(files) {
return commonPathPrefix(files);
}
class Api extends Emittery {
class TimeoutTrigger {
constructor(fn, waitMs = 0) {
this.fn = fn.bind(null);
this.ignoreUntil = 0;
this.waitMs = waitMs;
this.timer = undefined;
}
debounce() {
if (this.timer === undefined) {
this.timer = setTimeout(() => this.trigger(), this.waitMs);
} else {
this.timer.refresh();
}
}
discard() {
// N.B. this.timer is not cleared so if debounce() is called after it will
// not run again.
clearTimeout(this.timer);
}
ignoreFor(periodMs) {
this.ignoreUntil = Math.max(this.ignoreUntil, Date.now() + periodMs);
}
trigger() {
if (Date.now() >= this.ignoreUntil) {
this.fn();
}
}
}
export default class Api extends Emittery {
constructor(options) {
super();
@ -55,7 +90,7 @@ class Api extends Emittery {
}
}
async run({files: selectedFiles = [], filter = [], runtimeOptions = {}} = {}) {
async run({files: selectedFiles = [], filter = [], runtimeOptions = {}} = {}) { // eslint-disable-line complexity
let setupOrGlobError;
const apiOptions = this.options;
@ -70,11 +105,11 @@ class Api extends Emittery {
let bailed = false;
const pendingWorkers = new Set();
const timedOutWorkerFiles = new Set();
let restartTimer;
let timeoutTrigger;
if (apiOptions.timeout && !apiOptions.debug) {
const timeout = ms(apiOptions.timeout);
restartTimer = debounce(() => {
timeoutTrigger = new TimeoutTrigger(() => {
// If failFast is active, prevent new test files from running after
// the current ones are exited.
if (failFast) {
@ -89,7 +124,7 @@ class Api extends Emittery {
}
}, timeout);
} else {
restartTimer = Object.assign(() => {}, {cancel() {}});
timeoutTrigger = new TimeoutTrigger(() => {});
}
this._interruptHandler = () => {
@ -102,7 +137,7 @@ class Api extends Emittery {
bailed = true;
// Make sure we don't run the timeout handler
restartTimer.cancel();
timeoutTrigger.discard();
runStatus.emitStateChange({type: 'interrupt'});
@ -111,6 +146,8 @@ class Api extends Emittery {
}
};
const {providers = []} = this.options;
let testFiles;
try {
testFiles = await globs.findTests({cwd: this.options.projectDir, ...apiOptions.globs});
@ -118,7 +155,8 @@ class Api extends Emittery {
selectedFiles = filter.length === 0 ? testFiles : globs.applyTestFileFilter({
cwd: this.options.projectDir,
filter: filter.map(({pattern}) => pattern),
testFiles
providers,
testFiles,
});
}
} catch (error) {
@ -126,6 +164,13 @@ class Api extends Emittery {
setupOrGlobError = error;
}
const selectionInsights = {
filter,
ignoredFilterPatternFiles: selectedFiles.ignoredFilterPatternFiles || [],
testFileCount: testFiles.length,
selectionCount: selectedFiles.length,
};
try {
if (this.options.parallelRuns) {
const {currentIndex, totalRuns} = this.options.parallelRuns;
@ -137,11 +182,13 @@ class Api extends Emittery {
const currentFileCount = selectedFiles.length;
runStatus = new RunStatus(fileCount, {currentFileCount, currentIndex, totalRuns});
runStatus = new RunStatus(fileCount, {currentFileCount, currentIndex, totalRuns}, selectionInsights);
} else {
runStatus = new RunStatus(selectedFiles.length, null);
runStatus = new RunStatus(selectedFiles.length, null, selectionInsights);
}
selectedFiles = scheduler.failingTestsFirst(selectedFiles, this._getLocalCacheDir(), this.options.cacheEnabled);
const debugWithoutSpecificFile = Boolean(this.options.debug) && !this.options.debug.active && selectedFiles.length !== 1;
await this.emit('run', {
@ -155,7 +202,7 @@ class Api extends Emittery {
previousFailures: runtimeOptions.previousFailures || 0,
runOnlyExclusive: runtimeOptions.runOnlyExclusive === true,
runVector: runtimeOptions.runVector || 0,
status: runStatus
status: runStatus,
});
if (setupOrGlobError) {
@ -169,9 +216,9 @@ class Api extends Emittery {
runStatus.on('stateChange', record => {
if (record.testFile && !timedOutWorkerFiles.has(record.testFile)) {
// Restart the timer whenever there is activity from workers that
// Debounce the timer whenever there is activity from workers that
// haven't already timed out.
restartTimer();
timeoutTrigger.debounce();
}
if (failFast && (record.type === 'hook-failed' || record.type === 'test-failed' || record.type === 'worker-failed')) {
@ -185,14 +232,16 @@ class Api extends Emittery {
}
});
const {providers = []} = this.options;
const providerStates = (await Promise.all(providers.map(async ({type, main}) => {
const providerStates = [];
await Promise.all(providers.map(async ({type, main}) => {
const state = await main.compile({cacheDir: this._createCacheDir(), files: testFiles});
return state === null ? null : {type, state};
}))).filter(state => state !== null);
if (state !== null) {
providerStates.push({type, state});
}
}));
// Resolve the correct concurrency value.
let concurrency = Math.min(os.cpus().length, isCi ? 2 : Infinity);
let concurrency = Math.min(os.cpus().length, isCi ? 2 : Number.POSITIVE_INFINITY);
if (apiOptions.concurrency > 0) {
concurrency = apiOptions.concurrency;
}
@ -212,13 +261,15 @@ class Api extends Emittery {
}
const lineNumbers = getApplicableLineNumbers(globs.normalizeFileForMatching(apiOptions.projectDir, file), filter);
// Removing `providers` field because they cannot be transfered to the worker threads.
const {providers, ...forkOptions} = apiOptions;
const options = {
...apiOptions,
...forkOptions,
providerStates,
lineNumbers,
recordNewSnapshots: !isCi,
// If we're looking for matches, run every single test process in exclusive-only mode
runOnlyExclusive: apiOptions.match.length > 0 || runtimeOptions.runOnlyExclusive === true
runOnlyExclusive: apiOptions.match.length > 0 || runtimeOptions.runOnlyExclusive === true,
};
if (runtimeOptions.updateSnapshots) {
@ -227,42 +278,52 @@ class Api extends Emittery {
}
const worker = fork(file, options, apiOptions.nodeArguments);
worker.onStateChange(data => {
if (data.type === 'test-timeout-configured' && !apiOptions.debug) {
timeoutTrigger.ignoreFor(data.period);
}
});
runStatus.observeWorker(worker, file, {selectingLines: lineNumbers.length > 0});
deregisteredSharedWorkers.push(sharedWorkers.observeWorkerProcess(worker, runStatus));
deregisteredSharedWorkers.push(observeWorkerProcess(worker, runStatus));
pendingWorkers.add(worker);
worker.promise.then(() => {
pendingWorkers.delete(worker);
});
restartTimer();
timeoutTrigger.debounce();
await worker.promise;
}, {concurrency, stopOnError: false});
// Allow shared workers to clean up before the run ends.
await Promise.all(deregisteredSharedWorkers);
scheduler.storeFailedTestFiles(runStatus, this.options.cacheEnabled === false ? null : this._createCacheDir());
} catch (error) {
if (error && error.name === 'AggregateError') {
for (const err of error) {
runStatus.emitStateChange({type: 'internal-error', err: serializeError('Internal error', false, err)});
for (const error_ of error.errors) {
runStatus.emitStateChange({type: 'internal-error', err: serializeError('Internal error', false, error_)});
}
} else {
runStatus.emitStateChange({type: 'internal-error', err: serializeError('Internal error', false, error)});
}
}
restartTimer.cancel();
timeoutTrigger.discard();
return runStatus;
}
_getLocalCacheDir() {
return path.join(this.options.projectDir, 'node_modules', '.cache', 'ava');
}
_createCacheDir() {
if (this._cacheDir) {
return this._cacheDir;
}
const cacheDir = this.options.cacheEnabled === false ?
fs.mkdtempSync(`${tempDir}${path.sep}`) :
path.join(this.options.projectDir, 'node_modules', '.cache', 'ava');
const cacheDir = this.options.cacheEnabled === false
? fs.mkdtempSync(`${tempDir}${path.sep}`)
: this._getLocalCacheDir();
// Ensure cacheDir exists
fs.mkdirSync(cacheDir, {recursive: true});
@ -272,5 +333,3 @@ class Api extends Emittery {
return cacheDir;
}
}
module.exports = Api;

527
node_modules/ava/lib/assert.js generated vendored

File diff suppressed because it is too large Load diff

23
node_modules/ava/lib/chalk.js generated vendored
View file

@ -1,20 +1,15 @@
'use strict';
const chalk = require('chalk');
import {Chalk} from 'chalk'; // eslint-disable-line unicorn/import-style
let ctx = null;
exports.get = () => {
if (!ctx) {
throw new Error('Chalk has not yet been configured');
}
let chalk = new Chalk(); // eslint-disable-line import/no-mutable-exports
return ctx;
};
export {chalk};
exports.set = options => {
if (ctx) {
let configured = false;
export function set(options) {
if (configured) {
throw new Error('Chalk has already been configured');
}
ctx = new chalk.Instance(options);
return ctx;
};
configured = true;
chalk = new Chalk(options);
}

230
node_modules/ava/lib/cli.js generated vendored
View file

@ -1,89 +1,112 @@
'use strict';
const path = require('path');
const del = require('del');
const updateNotifier = require('update-notifier');
const figures = require('figures');
const arrify = require('arrify');
const yargs = require('yargs');
const readPkg = require('read-pkg');
const isCi = require('./is-ci');
const {loadConfig} = require('./load-config');
import fs from 'node:fs';
import path from 'node:path';
import process from 'node:process';
import arrify from 'arrify';
import ciParallelVars from 'ci-parallel-vars';
import del from 'del';
import figures from 'figures';
import yargs from 'yargs';
import {hideBin} from 'yargs/helpers'; // eslint-disable-line node/file-extension-in-import
import Api from './api.js';
import {chalk} from './chalk.js';
import validateEnvironmentVariables from './environment-variables.js';
import normalizeExtensions from './extensions.js';
import {normalizeGlobs, normalizePattern} from './globs.js';
import {controlFlow} from './ipc-flow-control.cjs';
import isCi from './is-ci.js';
import {splitPatternAndLineNumbers} from './line-numbers.js';
import {loadConfig} from './load-config.js';
import normalizeModuleTypes from './module-types.js';
import normalizeNodeArguments from './node-arguments.js';
import providerManager from './provider-manager.js';
import DefaultReporter from './reporters/default.js';
import TapReporter from './reporters/tap.js';
import Watcher from './watcher.js';
function exit(message) {
console.error(`\n ${require('./chalk').get().red(figures.cross)} ${message}`);
console.error(`\n ${chalk.red(figures.cross)} ${message}`);
process.exit(1); // eslint-disable-line unicorn/no-process-exit
}
const coerceLastValue = value => {
return Array.isArray(value) ? value.pop() : value;
};
const coerceLastValue = value => Array.isArray(value) ? value.pop() : value;
const FLAGS = {
concurrency: {
alias: 'c',
coerce: coerceLastValue,
description: 'Max number of test files running at the same time (default: CPU cores)',
type: 'number'
type: 'number',
},
'fail-fast': {
coerce: coerceLastValue,
description: 'Stop after first test failure',
type: 'boolean'
type: 'boolean',
},
match: {
alias: 'm',
description: 'Only run tests with matching title (can be repeated)',
type: 'string'
type: 'string',
},
'no-worker-threads': {
coerce: coerceLastValue,
description: 'Don\'t use worker threads',
type: 'boolean',
},
'node-arguments': {
coerce: coerceLastValue,
description: 'Additional Node.js arguments for launching worker processes (specify as a single string)',
type: 'string'
type: 'string',
},
serial: {
alias: 's',
coerce: coerceLastValue,
description: 'Run tests serially',
type: 'boolean'
type: 'boolean',
},
tap: {
alias: 't',
coerce: coerceLastValue,
description: 'Generate TAP output',
type: 'boolean'
type: 'boolean',
},
timeout: {
alias: 'T',
coerce: coerceLastValue,
description: 'Set global timeout (milliseconds or human-readable, e.g. 10s, 2m)',
type: 'string'
type: 'string',
},
'update-snapshots': {
alias: 'u',
coerce: coerceLastValue,
description: 'Update snapshots',
type: 'boolean'
type: 'boolean',
},
verbose: {
alias: 'v',
coerce: coerceLastValue,
description: 'Enable verbose output',
type: 'boolean'
description: 'Enable verbose output (default)',
type: 'boolean',
},
watch: {
alias: 'w',
coerce: coerceLastValue,
description: 'Re-run tests when files change',
type: 'boolean'
}
type: 'boolean',
},
};
exports.run = async () => { // eslint-disable-line complexity
let conf = {};
let confError = null;
export default async function loadCli() { // eslint-disable-line complexity
let conf;
let confError;
try {
const {argv: {config: configFile}} = yargs.help(false);
const {argv: {config: configFile}} = yargs(hideBin(process.argv)).help(false);
conf = await loadConfig({configFile});
if (conf.configFile && path.basename(conf.configFile) !== path.relative(conf.projectDir, conf.configFile)) {
console.log(chalk.magenta(` ${figures.warning} Using configuration from ${conf.configFile}`));
}
} catch (error) {
confError = error;
}
@ -91,18 +114,24 @@ exports.run = async () => { // eslint-disable-line complexity
// Enter debug mode if the main process is being inspected. This assumes the
// worker processes are automatically inspected, too. It is not necessary to
// run AVA with the debug command, though it's allowed.
const activeInspector = require('inspector').url() !== undefined; // eslint-disable-line node/no-unsupported-features/node-builtins
let debug = activeInspector ?
{
let activeInspector = false;
try {
const {default: inspector} = await import('node:inspector'); // eslint-disable-line node/no-unsupported-features/es-syntax
activeInspector = inspector.url() !== undefined;
} catch {}
let debug = activeInspector
? {
active: true,
break: false,
files: [],
host: undefined,
port: undefined
port: undefined,
} : null;
let resetCache = false;
const {argv} = yargs
const {argv} = yargs(hideBin(process.argv))
.parserConfiguration({
'boolean-negation': true,
'camel-case-expansion': false,
@ -116,7 +145,7 @@ exports.run = async () => { // eslint-disable-line complexity
'set-placeholder-key': false,
'short-option-groups': true,
'strip-aliased': true,
'unknown-options-as-args': false
'unknown-options-as-args': false,
})
.usage('$0 [<pattern>...]')
.usage('$0 debug [<pattern>...]')
@ -124,16 +153,16 @@ exports.run = async () => { // eslint-disable-line complexity
.options({
color: {
description: 'Force color output',
type: 'boolean'
type: 'boolean',
},
config: {
description: 'Specific JavaScript file for AVA to read its config from, instead of using package.json or ava.config.* files'
}
description: 'Specific JavaScript file for AVA to read its config from, instead of using package.json or ava.config.* files',
},
})
.command('* [<pattern>...]', 'Run tests', yargs => yargs.options(FLAGS).positional('pattern', {
array: true,
describe: 'Glob patterns to select what test files to run. Leave empty if you want AVA to run all test files instead. Add a colon and specify line numbers of specific tests to run',
type: 'string'
describe: 'Select which test files to run. Leave empty if you want AVA to run all test files as per your configuration. Accepts glob patterns, directories that (recursively) contain test files, and file paths. Add a colon and specify line numbers of specific tests to run',
type: 'string',
}), argv => {
if (activeInspector) {
debug.files = argv.pattern || [];
@ -145,22 +174,22 @@ exports.run = async () => { // eslint-disable-line complexity
yargs => yargs.options(FLAGS).options({
break: {
description: 'Break before the test file is loaded',
type: 'boolean'
type: 'boolean',
},
host: {
default: '127.0.0.1',
description: 'Address or hostname through which you can connect to the inspector',
type: 'string'
type: 'string',
},
port: {
default: 9229,
description: 'Port on which you can connect to the inspector',
type: 'number'
}
type: 'number',
},
}).positional('pattern', {
demand: true,
describe: 'Glob patterns to select a single test file to debug. Add a colon and specify line numbers of specific tests to run',
type: 'string'
type: 'string',
}),
argv => {
debug = {
@ -168,12 +197,12 @@ exports.run = async () => { // eslint-disable-line complexity
break: argv.break === true,
files: argv.pattern,
host: argv.host,
port: argv.port
port: argv.port,
};
})
.command(
'reset-cache',
'Reset AVAs compilation cache and exit',
'Delete any temporary files and state kept by AVA, then exit',
yargs => yargs,
() => {
resetCache = true;
@ -184,8 +213,14 @@ exports.run = async () => { // eslint-disable-line complexity
.help();
const combined = {...conf};
for (const flag of Object.keys(FLAGS)) {
if (Reflect.has(argv, flag)) {
if (flag === 'no-worker-threads' && Reflect.has(argv, 'worker-threads')) {
combined.workerThreads = argv['worker-threads'];
continue;
}
if (argv[flag] !== undefined) {
if (flag === 'fail-fast') {
combined.failFast = argv[flag];
} else if (flag === 'update-snapshots') {
@ -196,13 +231,15 @@ exports.run = async () => { // eslint-disable-line complexity
}
}
const chalkOptions = {level: combined.color === false ? 0 : require('chalk').level};
const chalk = require('./chalk').set(chalkOptions);
if (combined.updateSnapshots && combined.match) {
exit('Snapshots cannot be updated when matching specific tests.');
const chalkOptions = {level: 0};
if (combined.color !== false) {
const {supportsColor: {level}} = await import('chalk'); // eslint-disable-line node/no-unsupported-features/es-syntax, unicorn/import-style
chalkOptions.level = level;
}
const {set: setChalk} = await import('./chalk.js'); // eslint-disable-line node/no-unsupported-features/es-syntax
setChalk(chalkOptions);
if (confError) {
if (confError.parent) {
exit(`${confError.message}\n\n${chalk.gray((confError.parent && confError.parent.stack) || confError.parent)}`);
@ -211,23 +248,23 @@ exports.run = async () => { // eslint-disable-line complexity
}
}
updateNotifier({pkg: require('../package.json')}).notify();
const {nonSemVerExperiments: experiments, projectDir} = conf;
if (resetCache) {
const cacheDir = path.join(projectDir, 'node_modules', '.cache', 'ava');
try {
await del('*', {
cwd: cacheDir,
nodir: true
});
console.error(`\n${chalk.green(figures.tick)} Removed AVA cache files in ${cacheDir}`);
const deletedFilePaths = await del('*', {cwd: cacheDir});
if (deletedFilePaths.length === 0) {
console.log(`\n${chalk.green(figures.tick)} No cache files to remove`);
} else {
console.log(`\n${chalk.green(figures.tick)} Removed AVA cache files in ${cacheDir}`);
}
process.exit(0); // eslint-disable-line unicorn/no-process-exit
} catch (error) {
exit(`Error removing AVA cache files in ${cacheDir}\n\n${chalk.gray((error && error.stack) || error)}`);
}
return;
}
if (argv.watch) {
@ -266,6 +303,10 @@ exports.run = async () => { // eslint-disable-line complexity
console.log(chalk.magenta(` ${figures.warning} Experiments are enabled. These are unsupported and may change or be removed at any time.`));
}
if (Reflect.has(conf, 'babel')) {
exit('Built-in Babel support has been removed.');
}
if (Reflect.has(conf, 'compileEnhancements')) {
exit('Enhancement compilation must be configured in AVAs Babel options.');
}
@ -278,22 +319,9 @@ exports.run = async () => { // eslint-disable-line complexity
exit('sources has been removed. Use ignoredByWatcher to provide glob patterns of files that the watcher should ignore.');
}
const ciParallelVars = require('ci-parallel-vars');
const Api = require('./api');
const DefaultReporter = require('./reporters/default');
const TapReporter = require('./reporters/tap');
const Watcher = require('./watcher');
const normalizeExtensions = require('./extensions');
const normalizeModuleTypes = require('./module-types');
const {normalizeGlobs, normalizePattern} = require('./globs');
const normalizeNodeArguments = require('./node-arguments');
const validateEnvironmentVariables = require('./environment-variables');
const {splitPatternAndLineNumbers} = require('./line-numbers');
const providerManager = require('./provider-manager');
let pkg;
try {
pkg = readPkg.sync({cwd: projectDir});
pkg = JSON.parse(fs.readFileSync(path.resolve(projectDir, 'package.json')));
} catch (error) {
if (error.code !== 'ENOENT') {
throw error;
@ -303,26 +331,13 @@ exports.run = async () => { // eslint-disable-line complexity
const {type: defaultModuleType = 'commonjs'} = pkg || {};
const providers = [];
if (Reflect.has(conf, 'babel')) {
try {
const {level, main} = providerManager.babel(projectDir);
providers.push({
level,
main: main({config: conf.babel}),
type: 'babel'
});
} catch (error) {
exit(error.message);
}
}
if (Reflect.has(conf, 'typescript')) {
try {
const {level, main} = providerManager.typescript(projectDir);
const {level, main} = await providerManager.typescript(projectDir);
providers.push({
level,
main: main({config: conf.typescript}),
type: 'typescript'
type: 'typescript',
});
} catch (error) {
exit(error.message);
@ -377,16 +392,14 @@ exports.run = async () => { // eslint-disable-line complexity
.map(pattern => splitPatternAndLineNumbers(pattern))
.map(({pattern, ...rest}) => ({
pattern: normalizePattern(path.relative(projectDir, path.resolve(process.cwd(), pattern))),
...rest
...rest,
}));
if (combined.updateSnapshots && filter.some(condition => condition.lineNumbers !== null)) {
exit('Snapshots cannot be updated when selecting specific tests by their line number.');
}
const api = new Api({
cacheEnabled: combined.cache !== false,
chalkOptions,
concurrency: combined.concurrency || 0,
workerThreads: combined.workerThreads !== false,
debug,
environmentVariables,
experiments,
@ -406,38 +419,31 @@ exports.run = async () => { // eslint-disable-line complexity
snapshotDir: combined.snapshotDir ? path.resolve(projectDir, combined.snapshotDir) : null,
timeout: combined.timeout || '10s',
updateSnapshots: combined.updateSnapshots,
workerArgv: argv['--']
workerArgv: argv['--'],
});
const reporter = combined.tap && !combined.watch && debug === null ? new TapReporter({
extensions: globs.extensions,
projectDir,
reportStream: process.stdout,
stdStream: process.stderr
stdStream: process.stderr,
}) : new DefaultReporter({
extensions: globs.extensions,
projectDir,
reportStream: process.stdout,
stdStream: process.stderr,
watching: combined.watch,
verbose: debug !== null || combined.verbose || isCi || !process.stdout.isTTY
});
api.on('run', plan => {
reporter.startRun(plan);
if (process.env.AVA_EMIT_RUN_STATUS_OVER_IPC === 'I\'ll find a payphone baby / Take some time to talk to you') {
const {controlFlow} = require('./ipc-flow-control');
const bufferedSend = controlFlow(process);
if (process.versions.node >= '12.16.0') {
plan.status.on('stateChange', evt => {
bufferedSend(evt);
});
} else {
const v8 = require('v8');
plan.status.on('stateChange', evt => {
bufferedSend([...v8.serialize(evt)]);
});
}
plan.status.on('stateChange', evt => {
bufferedSend(evt);
});
}
plan.status.on('stateChange', evt => {
@ -455,7 +461,7 @@ exports.run = async () => { // eslint-disable-line complexity
globs,
projectDir,
providers,
reporter
reporter,
});
watcher.observeStdin(process.stdin);
} else {
@ -476,4 +482,4 @@ exports.run = async () => { // eslint-disable-line complexity
process.exitCode = runStatus.suggestExitCode({matching: match.length > 0});
reporter.endRun();
}
};
}

29
node_modules/ava/lib/code-excerpt.js generated vendored
View file

@ -1,14 +1,14 @@
'use strict';
const fs = require('fs');
const equalLength = require('equal-length');
const codeExcerpt = require('code-excerpt');
const truncate = require('cli-truncate');
const chalk = require('./chalk').get();
import fs from 'node:fs';
import truncate from 'cli-truncate';
import codeExcerpt from 'code-excerpt';
import {chalk} from './chalk.js';
const formatLineNumber = (lineNumber, maxLineNumber) =>
' '.repeat(Math.max(0, String(maxLineNumber).length - String(lineNumber).length)) + lineNumber;
module.exports = (source, options = {}) => {
export default function exceptCode(source, options = {}) {
if (!source.isWithinProject || source.isDependency) {
return null;
}
@ -18,7 +18,7 @@ module.exports = (source, options = {}) => {
let contents;
try {
contents = fs.readFileSync(file, 'utf8');
contents = fs.readFileSync(new URL(file), 'utf8');
} catch {
return null;
}
@ -30,25 +30,20 @@ module.exports = (source, options = {}) => {
const lines = excerpt.map(item => ({
line: item.line,
value: truncate(item.value, maxWidth - String(line).length - 5)
value: truncate(item.value, maxWidth - String(line).length - 5),
}));
const joinedLines = lines.map(line => line.value).join('\n');
const extendedLines = equalLength(joinedLines).split('\n');
const extendedWidth = Math.max(...lines.map(item => item.value.length));
return lines
.map((item, index) => ({
line: item.line,
value: extendedLines[index]
}))
.map(item => {
const isErrorSource = item.line === line;
const lineNumber = formatLineNumber(item.line, line) + ':';
const coloredLineNumber = isErrorSource ? lineNumber : chalk.grey(lineNumber);
const result = ` ${coloredLineNumber} ${item.value}`;
const result = ` ${coloredLineNumber} ${item.value.padEnd(extendedWidth)}`;
return isErrorSource ? chalk.bgRed(result) : result;
})
.join('\n');
};
}

View file

@ -1,37 +1,32 @@
'use strict';
const util = require('util'); // eslint-disable-line unicorn/import-style
const ansiStyles = require('ansi-styles');
const stripAnsi = require('strip-ansi');
const cloneDeepWith = require('lodash/cloneDeepWith');
const reactPlugin = require('@concordance/react');
const chalk = require('./chalk').get();
import {inspect} from 'node:util';
// Wrap Concordance's React plugin. Change the name to avoid collisions if in
// the future users can register plugins themselves.
const avaReactPlugin = {...reactPlugin, name: 'ava-plugin-react'};
const plugins = [avaReactPlugin];
import ansiStyles from 'ansi-styles';
import {Chalk} from 'chalk'; // eslint-disable-line unicorn/import-style
import stripAnsi from 'strip-ansi';
const forceColor = new chalk.Instance({level: Math.max(chalk.level, 1)});
import {chalk} from './chalk.js';
const forceColor = new Chalk({level: Math.max(chalk.level, 1)});
const colorTheme = {
boolean: ansiStyles.yellow,
circular: forceColor.grey('[Circular]'),
date: {
invalid: forceColor.red('invalid'),
value: ansiStyles.blue
value: ansiStyles.blue,
},
diffGutters: {
actual: forceColor.red('-') + ' ',
expected: forceColor.green('+') + ' ',
padding: ' '
padding: ' ',
},
error: {
ctor: {open: ansiStyles.grey.open + '(', close: ')' + ansiStyles.grey.close},
name: ansiStyles.magenta
name: ansiStyles.magenta,
},
function: {
name: ansiStyles.blue,
stringTag: ansiStyles.magenta
stringTag: ansiStyles.magenta,
},
global: ansiStyles.magenta,
item: {after: forceColor.grey(',')},
@ -45,44 +40,16 @@ const colorTheme = {
closeBracket: forceColor.grey('}'),
ctor: ansiStyles.magenta,
stringTag: {open: ansiStyles.magenta.open + '@', close: ansiStyles.magenta.close},
secondaryStringTag: {open: ansiStyles.grey.open + '@', close: ansiStyles.grey.close}
secondaryStringTag: {open: ansiStyles.grey.open + '@', close: ansiStyles.grey.close},
},
property: {
after: forceColor.grey(','),
keyBracket: {open: forceColor.grey('['), close: forceColor.grey(']')},
valueFallback: forceColor.grey('…')
},
react: {
functionType: forceColor.grey('\u235F'),
openTag: {
start: forceColor.grey('<'),
end: forceColor.grey('>'),
selfClose: forceColor.grey('/'),
selfCloseVoid: ' ' + forceColor.grey('/')
},
closeTag: {
open: forceColor.grey('</'),
close: forceColor.grey('>')
},
tagName: ansiStyles.magenta,
attribute: {
separator: '=',
value: {
openBracket: forceColor.grey('{'),
closeBracket: forceColor.grey('}'),
string: {
line: {open: forceColor.blue('"'), close: forceColor.blue('"'), escapeQuote: '"'}
}
}
},
child: {
openBracket: forceColor.grey('{'),
closeBracket: forceColor.grey('}')
}
valueFallback: forceColor.grey('…'),
},
regexp: {
source: {open: ansiStyles.blue.open + '/', close: '/' + ansiStyles.blue.close},
flags: ansiStyles.yellow
flags: ansiStyles.yellow,
},
stats: {separator: forceColor.grey('---')},
string: {
@ -94,45 +61,42 @@ const colorTheme = {
diff: {
insert: {
open: ansiStyles.bgGreen.open + ansiStyles.black.open,
close: ansiStyles.black.close + ansiStyles.bgGreen.close
close: ansiStyles.black.close + ansiStyles.bgGreen.close,
},
delete: {
open: ansiStyles.bgRed.open + ansiStyles.black.open,
close: ansiStyles.black.close + ansiStyles.bgRed.close
close: ansiStyles.black.close + ansiStyles.bgRed.close,
},
equal: ansiStyles.blue,
insertLine: {
open: ansiStyles.green.open,
close: ansiStyles.green.close
close: ansiStyles.green.close,
},
deleteLine: {
open: ansiStyles.red.open,
close: ansiStyles.red.close
}
}
close: ansiStyles.red.close,
},
},
},
symbol: ansiStyles.yellow,
typedArray: {
bytes: ansiStyles.yellow
bytes: ansiStyles.yellow,
},
undefined: ansiStyles.yellow
undefined: ansiStyles.yellow,
};
const plainTheme = cloneDeepWith(colorTheme, value => {
if (typeof value === 'string') {
return stripAnsi(value);
}
});
const plainTheme = JSON.parse(JSON.stringify(colorTheme), value => typeof value === 'string' ? stripAnsi(value) : value);
const theme = chalk.level > 0 ? colorTheme : plainTheme;
exports.default = {
const concordanceOptions = {
// Use Node's object inspection depth, clamped to a minimum of 3
get maxDepth() {
return Math.max(3, util.inspect.defaultOptions.depth);
return Math.max(3, inspect.defaultOptions.depth);
},
plugins,
theme
theme,
};
exports.snapshotManager = {plugins, theme: plainTheme};
export default concordanceOptions;
export const snapshotManager = {theme: plainTheme};

View file

@ -1,7 +1,4 @@
'use strict';
const clone = require('lodash/clone');
class ContextRef {
export default class ContextRef {
constructor() {
this.value = {};
}
@ -18,7 +15,6 @@ class ContextRef {
return new LateBinding(this);
}
}
module.exports = ContextRef;
class LateBinding extends ContextRef {
constructor(ref) {
@ -29,7 +25,8 @@ class LateBinding extends ContextRef {
get() {
if (!this.bound) {
this.set(clone(this.ref.get()));
const value = this.ref.get();
this.set(value !== null && typeof value === 'object' ? {...value} : value);
}
return super.get();

52
node_modules/ava/lib/create-chain.js generated vendored
View file

@ -1,4 +1,3 @@
'use strict';
const chainRegistry = new WeakMap();
function startChain(name, call, defaults) {
@ -48,20 +47,16 @@ function createHookChain(hook, isAfterHook) {
// * `skip` must come at the end
// * no `only`
// * no repeating
extendChain(hook, 'cb', 'callback');
extendChain(hook, 'skip', 'skipped');
extendChain(hook.cb, 'skip', 'skipped');
if (isAfterHook) {
extendChain(hook, 'always');
extendChain(hook.always, 'cb', 'callback');
extendChain(hook.always, 'skip', 'skipped');
extendChain(hook.always.cb, 'skip', 'skipped');
}
return hook;
}
function createChain(fn, defaults, meta) {
export default function createChain(fn, defaults, meta) {
// Test chaining rules:
// * `serial` must come at the start
// * `only` and `skip` must come at the end
@ -69,27 +64,15 @@ function createChain(fn, defaults, meta) {
// * `only` and `skip` cannot be chained together
// * no repeating
const root = startChain('test', fn, {...defaults, type: 'test'});
extendChain(root, 'cb', 'callback');
extendChain(root, 'failing');
extendChain(root, 'only', 'exclusive');
extendChain(root, 'serial');
extendChain(root, 'skip', 'skipped');
extendChain(root.cb, 'failing');
extendChain(root.cb, 'only', 'exclusive');
extendChain(root.cb, 'skip', 'skipped');
extendChain(root.cb.failing, 'only', 'exclusive');
extendChain(root.cb.failing, 'skip', 'skipped');
extendChain(root.failing, 'only', 'exclusive');
extendChain(root.failing, 'skip', 'skipped');
extendChain(root.serial, 'cb', 'callback');
extendChain(root.serial, 'failing');
extendChain(root.serial, 'only', 'exclusive');
extendChain(root.serial, 'skip', 'skipped');
extendChain(root.serial.cb, 'failing');
extendChain(root.serial.cb, 'only', 'exclusive');
extendChain(root.serial.cb, 'skip', 'skipped');
extendChain(root.serial.cb.failing, 'only', 'exclusive');
extendChain(root.serial.cb.failing, 'skip', 'skipped');
extendChain(root.serial.failing, 'only', 'exclusive');
extendChain(root.serial.failing, 'skip', 'skipped');
@ -108,9 +91,38 @@ function createChain(fn, defaults, meta) {
root.todo = startChain('test.todo', fn, {...defaults, type: 'test', todo: true});
root.serial.todo = startChain('test.serial.todo', fn, {...defaults, serial: true, type: 'test', todo: true});
root.macro = options => {
if (typeof options === 'function') {
return Object.freeze({exec: options});
}
return Object.freeze({exec: options.exec, title: options.title});
};
root.meta = meta;
// Our type definition uses ESM syntax; when using CJS with VSCode, the
// auto-completion assumes the root is accessed through `require('ava').default`.
// Placate VSCode by adding a mostly hidden default property on the root.
// This is available through both CJS and ESM imports. We use a proxy so that
// we don't end up with root.default.default.default chains.
Object.defineProperty(root, 'default', {
configurable: false,
enumerable: false,
writable: false,
value: new Proxy(root, {
apply(target, thisArg, argumentsList) {
target.apply(thisArg, argumentsList);
},
get(target, prop) {
if (prop === 'default') {
throw new TypeError('Cannot access default.default');
}
return target[prop];
},
}),
});
return root;
}
module.exports = createChain;

View file

@ -1,5 +1,4 @@
'use strict';
function validateEnvironmentVariables(environmentVariables) {
export default function validateEnvironmentVariables(environmentVariables) {
if (!environmentVariables) {
return {};
}
@ -12,5 +11,3 @@ function validateEnvironmentVariables(environmentVariables) {
return environmentVariables;
}
module.exports = validateEnvironmentVariables;

73
node_modules/ava/lib/eslint-plugin-helper-worker.js generated vendored Normal file
View file

@ -0,0 +1,73 @@
import v8 from 'node:v8';
import {parentPort, workerData} from 'node:worker_threads';
import normalizeExtensions from './extensions.js';
import {normalizeGlobs} from './globs.js';
import {loadConfig} from './load-config.js';
import providerManager from './provider-manager.js';
const MAX_DATA_LENGTH_EXCLUSIVE = 100 * 1024; // Allocate 100 KiB to exchange globs.
const configCache = new Map();
const collectProviders = async ({conf, projectDir}) => {
const providers = [];
if (Reflect.has(conf, 'typescript')) {
const {level, main} = await providerManager.typescript(projectDir);
providers.push({
level,
main: main({config: conf.typescript}),
type: 'typescript',
});
}
return providers;
};
const buildGlobs = ({conf, providers, projectDir, overrideExtensions, overrideFiles}) => {
const extensions = overrideExtensions
? normalizeExtensions(overrideExtensions)
: normalizeExtensions(conf.extensions, providers);
return {
cwd: projectDir,
...normalizeGlobs({
extensions,
files: overrideFiles ? overrideFiles : conf.files,
providers,
}),
};
};
const resolveGlobs = async (projectDir, overrideExtensions, overrideFiles) => {
if (!configCache.has(projectDir)) {
configCache.set(projectDir, loadConfig({resolveFrom: projectDir}).then(async conf => {
const providers = await collectProviders({conf, projectDir});
return {conf, providers};
}));
}
const {conf, providers} = await configCache.get(projectDir);
return buildGlobs({conf, providers, projectDir, overrideExtensions, overrideFiles});
};
const data = new Uint8Array(workerData.dataBuffer);
const sync = new Int32Array(workerData.syncBuffer);
const handleMessage = async ({projectDir, overrideExtensions, overrideFiles}) => {
let encoded;
try {
const globs = await resolveGlobs(projectDir, overrideExtensions, overrideFiles);
encoded = v8.serialize(globs);
} catch (error) {
encoded = v8.serialize(error);
}
const byteLength = encoded.length < MAX_DATA_LENGTH_EXCLUSIVE ? encoded.copy(data) : MAX_DATA_LENGTH_EXCLUSIVE;
Atomics.store(sync, 0, byteLength);
Atomics.notify(sync, 0);
};
parentPort.on('message', handleMessage);
handleMessage(workerData.firstMessage);
delete workerData.firstMessage;

4
node_modules/ava/lib/extensions.js generated vendored
View file

@ -1,4 +1,4 @@
module.exports = (configuredExtensions, providers = []) => {
export default function resolveExtensions(configuredExtensions, providers = []) {
// Combine all extensions possible for testing. Remove duplicate extensions.
const duplicates = new Set();
const seen = new Set();
@ -43,4 +43,4 @@ module.exports = (configuredExtensions, providers = []) => {
}
return [...seen];
};
}

163
node_modules/ava/lib/fork.js generated vendored
View file

@ -1,68 +1,71 @@
'use strict';
const childProcess = require('child_process');
const path = require('path');
const fs = require('fs');
const Emittery = require('emittery');
const {controlFlow} = require('./ipc-flow-control');
import childProcess from 'node:child_process';
import process from 'node:process';
import {fileURLToPath} from 'node:url';
import {Worker} from 'node:worker_threads';
if (fs.realpathSync(__filename) !== __filename) {
console.warn('WARNING: `npm link ava` and the `--preserve-symlink` flag are incompatible. We have detected that AVA is linked via `npm link`, and that you are using either an early version of Node 6, or the `--preserve-symlink` flag. This breaks AVA. You should upgrade to Node 6.2.0+, avoid the `--preserve-symlink` flag, or avoid using `npm link ava`.');
import Emittery from 'emittery';
import {pEvent} from 'p-event';
import {controlFlow} from './ipc-flow-control.cjs';
import serializeError from './serialize-error.js';
let workerPath = new URL('worker/base.js', import.meta.url);
export function _testOnlyReplaceWorkerPath(replacement) {
workerPath = replacement;
}
// In case the test file imports a different AVA install,
// the presence of this variable allows it to require this one instead
const AVA_PATH = path.resolve(__dirname, '..');
const WORKER_PATH = require.resolve('./worker/subprocess');
const additionalExecArgv = ['--enable-source-maps'];
class SharedWorkerChannel extends Emittery {
constructor({channelId, filename, initialData}, sendToFork) {
super();
this.id = channelId;
this.filename = filename;
this.initialData = initialData;
this.sendToFork = sendToFork;
}
signalReady() {
this.sendToFork({
type: 'shared-worker-ready',
channelId: this.id
const createWorker = (options, execArgv) => {
let worker;
let postMessage;
let close;
if (options.workerThreads) {
worker = new Worker(workerPath, {
argv: options.workerArgv,
env: {NODE_ENV: 'test', ...process.env, ...options.environmentVariables},
execArgv: [...execArgv, ...additionalExecArgv],
workerData: {
options,
},
trackUnmanagedFds: true,
stdin: true,
stdout: true,
stderr: true,
});
}
postMessage = worker.postMessage.bind(worker);
signalError() {
this.sendToFork({
type: 'shared-worker-error',
channelId: this.id
// Ensure we've seen this event before we terminate the worker thread, as a
// workaround for https://github.com/nodejs/node/issues/38418.
const starting = pEvent(worker, 'message', ({ava}) => ava && ava.type === 'starting');
close = async () => {
try {
await starting;
await worker.terminate();
} finally {
// No-op
}
};
} else {
worker = childProcess.fork(fileURLToPath(workerPath), options.workerArgv, {
cwd: options.projectDir,
silent: true,
env: {NODE_ENV: 'test', ...process.env, ...options.environmentVariables},
execArgv: [...execArgv, ...additionalExecArgv],
});
postMessage = controlFlow(worker);
close = async () => worker.kill();
}
emitMessage({messageId, replyTo, serializedData}) {
this.emit('message', {
messageId,
replyTo,
serializedData
});
}
forwardMessageToFork({messageId, replyTo, serializedData}) {
this.sendToFork({
type: 'shared-worker-message',
channelId: this.id,
messageId,
replyTo,
serializedData
});
}
}
let forkCounter = 0;
module.exports = (file, options, execArgv = process.execArgv) => {
const forkId = `fork/${++forkCounter}`;
const sharedWorkerChannels = new Map();
return {
worker,
postMessage,
close,
};
};
export default function loadFork(file, options, execArgv = process.execArgv) {
let finished = false;
const emitter = new Emittery();
@ -75,31 +78,22 @@ module.exports = (file, options, execArgv = process.execArgv) => {
options = {
baseDir: process.cwd(),
file,
forkId,
...options
...options,
};
const subprocess = childProcess.fork(WORKER_PATH, options.workerArgv, {
cwd: options.projectDir,
silent: true,
env: {NODE_ENV: 'test', ...process.env, ...options.environmentVariables, AVA_PATH},
execArgv
});
subprocess.stdout.on('data', chunk => {
const {worker, postMessage, close} = createWorker(options, execArgv);
worker.stdout.on('data', chunk => {
emitStateChange({type: 'worker-stdout', chunk});
});
subprocess.stderr.on('data', chunk => {
worker.stderr.on('data', chunk => {
emitStateChange({type: 'worker-stderr', chunk});
});
const bufferedSend = controlFlow(subprocess);
let forcedExit = false;
const send = evt => {
if (!finished && !forcedExit) {
bufferedSend({ava: evt});
postMessage({ava: evt});
}
};
@ -109,7 +103,7 @@ module.exports = (file, options, execArgv = process.execArgv) => {
resolve();
};
subprocess.on('message', message => {
worker.on('message', message => {
if (!message.ava) {
return;
}
@ -119,15 +113,18 @@ module.exports = (file, options, execArgv = process.execArgv) => {
send({type: 'options', options});
break;
case 'shared-worker-connect': {
const channel = new SharedWorkerChannel(message.ava, send);
sharedWorkerChannels.set(channel.id, channel);
emitter.emit('connectSharedWorker', channel);
const {channelId, filename, initialData, port} = message.ava;
emitter.emit('connectSharedWorker', {
filename,
initialData,
port,
signalError() {
send({type: 'shared-worker-error', channelId});
},
});
break;
}
case 'shared-worker-message':
sharedWorkerChannels.get(message.ava.channelId).emitMessage(message.ava);
break;
case 'ping':
send({type: 'pong'});
break;
@ -136,12 +133,12 @@ module.exports = (file, options, execArgv = process.execArgv) => {
}
});
subprocess.on('error', err => {
emitStateChange({type: 'worker-failed', err});
worker.on('error', error => {
emitStateChange({type: 'worker-failed', err: serializeError('Worker error', false, error, file)});
finish();
});
subprocess.on('exit', (code, signal) => {
worker.on('exit', (code, signal) => {
if (forcedExit) {
emitStateChange({type: 'worker-finished', forcedExit});
} else if (code > 0) {
@ -158,12 +155,12 @@ module.exports = (file, options, execArgv = process.execArgv) => {
return {
file,
forkId,
threadId: worker.threadId,
promise,
exit() {
forcedExit = true;
subprocess.kill();
close();
},
notifyOfPeerFailure() {
@ -176,6 +173,6 @@ module.exports = (file, options, execArgv = process.execArgv) => {
onStateChange(listener) {
return emitter.on('stateChange', listener);
}
},
};
};
}

140
node_modules/ava/lib/glob-helpers.cjs generated vendored Normal file
View file

@ -0,0 +1,140 @@
'use strict';
const path = require('path');
const process = require('process');
const ignoreByDefault = require('ignore-by-default');
const picomatch = require('picomatch');
const slash = require('slash');
const defaultIgnorePatterns = [...ignoreByDefault.directories(), '**/node_modules'];
exports.defaultIgnorePatterns = defaultIgnorePatterns;
const defaultPicomatchIgnorePatterns = [
...defaultIgnorePatterns,
// Unlike globby(), picomatch needs a complete pattern when ignoring directories.
...defaultIgnorePatterns.map(pattern => `${pattern}/**/*`),
];
const defaultMatchNoIgnore = picomatch(defaultPicomatchIgnorePatterns);
const matchingCache = new WeakMap();
const processMatchingPatterns = input => {
let result = matchingCache.get(input);
if (!result) {
const ignore = [...defaultPicomatchIgnorePatterns];
const patterns = input.filter(pattern => {
if (pattern.startsWith('!')) {
// Unlike globby(), picomatch needs a complete pattern when ignoring directories.
ignore.push(pattern.slice(1), `${pattern.slice(1)}/**/*`);
return false;
}
return true;
});
result = {
match: picomatch(patterns, {ignore}),
matchNoIgnore: picomatch(patterns),
individualMatchers: patterns.map(pattern => ({pattern, match: picomatch(pattern, {ignore})})),
};
matchingCache.set(input, result);
}
return result;
};
exports.processMatchingPatterns = processMatchingPatterns;
const matchesIgnorePatterns = (file, patterns) => {
const {matchNoIgnore} = processMatchingPatterns(patterns);
return matchNoIgnore(file) || defaultMatchNoIgnore(file);
};
function classify(file, {cwd, extensions, filePatterns, ignoredByWatcherPatterns}) {
file = normalizeFileForMatching(cwd, file);
return {
isIgnoredByWatcher: matchesIgnorePatterns(file, ignoredByWatcherPatterns),
isTest: hasExtension(extensions, file) && !isHelperish(file) && filePatterns.length > 0 && matches(file, filePatterns),
};
}
exports.classify = classify;
const hasExtension = (extensions, file) => extensions.includes(path.extname(file).slice(1));
exports.hasExtension = hasExtension;
function isHelperish(file) { // Assume file has been normalized already.
// File names starting with an underscore are deemed "helpers".
if (path.basename(file).startsWith('_')) {
return true;
}
// This function assumes the file has been normalized. If it couldn't be,
// don't check if it's got a parent directory that starts with an underscore.
// Deem it not a "helper".
if (path.isAbsolute(file)) {
return false;
}
// If the file has a parent directory that starts with only a single
// underscore, it's deemed a "helper".
return path.dirname(file).split('/').some(dir => /^_(?:$|[^_])/.test(dir));
}
exports.isHelperish = isHelperish;
function matches(file, patterns) {
const {match} = processMatchingPatterns(patterns);
return match(file);
}
exports.matches = matches;
function normalizeFileForMatching(cwd, file) {
if (process.platform === 'win32') {
cwd = slash(cwd);
file = slash(file);
}
// Note that if `file` is outside `cwd` we can't normalize it. If this turns
// out to be a real-world scenario we may have to make changes in calling code
// to make sure the file isn't even selected for matching.
if (!file.startsWith(cwd)) {
return file;
}
// Assume `cwd` does *not* end in a slash.
return file.slice(cwd.length + 1);
}
exports.normalizeFileForMatching = normalizeFileForMatching;
function normalizePattern(pattern) {
// Always use `/` in patterns, harmonizing matching across platforms
if (process.platform === 'win32') {
pattern = slash(pattern);
}
if (pattern.endsWith('/')) {
pattern = pattern.slice(0, -1);
}
if (pattern.startsWith('./')) {
return pattern.slice(2);
}
if (pattern.startsWith('!./')) {
return `!${pattern.slice(3)}`;
}
return pattern;
}
exports.normalizePattern = normalizePattern;
function normalizePatterns(patterns) {
return patterns.map(pattern => normalizePattern(pattern));
}
exports.normalizePatterns = normalizePatterns;

305
node_modules/ava/lib/globs.js generated vendored
View file

@ -1,54 +1,36 @@
'use strict';
const path = require('path');
const globby = require('globby');
const ignoreByDefault = require('ignore-by-default');
const picomatch = require('picomatch');
const slash = require('slash');
const providerManager = require('./provider-manager');
import fs from 'node:fs';
import path from 'node:path';
const defaultIgnorePatterns = [...ignoreByDefault.directories(), '**/node_modules'];
const defaultPicomatchIgnorePatterns = [
...defaultIgnorePatterns,
// Unlike globby(), picomatch needs a complete pattern when ignoring directories.
...defaultIgnorePatterns.map(pattern => `${pattern}/**/*`)
];
import {globby, globbySync} from 'globby';
const defaultMatchNoIgnore = picomatch(defaultPicomatchIgnorePatterns);
import {
defaultIgnorePatterns,
hasExtension,
normalizeFileForMatching,
normalizePatterns,
processMatchingPatterns,
} from './glob-helpers.cjs';
export {
classify,
isHelperish,
matches,
normalizePattern,
defaultIgnorePatterns,
hasExtension,
normalizeFileForMatching,
normalizePatterns,
} from './glob-helpers.cjs';
const defaultIgnoredByWatcherPatterns = [
'**/*.snap.md', // No need to rerun tests when the Markdown files change.
'ava.config.js', // Config is not reloaded so avoid rerunning tests when it changes.
'ava.config.cjs' // Config is not reloaded so avoid rerunning tests when it changes.
'ava.config.cjs', // Config is not reloaded so avoid rerunning tests when it changes.
];
const buildExtensionPattern = extensions => extensions.length === 1 ? extensions[0] : `{${extensions.join(',')}}`;
function normalizePattern(pattern) {
// Always use `/` in patterns, harmonizing matching across platforms
if (process.platform === 'win32') {
pattern = slash(pattern);
}
if (pattern.startsWith('./')) {
return pattern.slice(2);
}
if (pattern.startsWith('!./')) {
return `!${pattern.slice(3)}`;
}
return pattern;
}
exports.normalizePattern = normalizePattern;
function normalizePatterns(patterns) {
return patterns.map(pattern => normalizePattern(pattern));
}
exports.normalizePatterns = normalizePatterns;
function normalizeGlobs({extensions, files: filePatterns, ignoredByWatcher: ignoredByWatcherPatterns, providers}) {
export function normalizeGlobs({extensions, files: filePatterns, ignoredByWatcher: ignoredByWatcherPatterns, providers}) {
if (filePatterns !== undefined && (!Array.isArray(filePatterns) || filePatterns.length === 0)) {
throw new Error('The files configuration must be an array containing glob patterns.');
}
@ -68,7 +50,7 @@ function normalizeGlobs({extensions, files: filePatterns, ignoredByWatcher: igno
`**/test/**/*.${extensionPattern}`,
`**/tests/**/*.${extensionPattern}`,
'!**/__tests__/**/__{helper,fixture}?(s)__/**/*',
'!**/test?(s)/**/{helper,fixture}?(s)/**/*'
'!**/test?(s)/**/{helper,fixture}?(s)/**/*',
];
if (filePatterns) {
@ -84,40 +66,36 @@ function normalizeGlobs({extensions, files: filePatterns, ignoredByWatcher: igno
ignoredByWatcherPatterns = ignoredByWatcherPatterns ? [...defaultIgnoredByWatcherPatterns, ...normalizePatterns(ignoredByWatcherPatterns)] : [...defaultIgnoredByWatcherPatterns];
for (const {level, main} of providers) {
if (level >= providerManager.levels.pathRewrites) {
({filePatterns, ignoredByWatcherPatterns} = main.updateGlobs({filePatterns, ignoredByWatcherPatterns}));
}
for (const {main} of providers) {
({filePatterns, ignoredByWatcherPatterns} = main.updateGlobs({filePatterns, ignoredByWatcherPatterns}));
}
return {extensions, filePatterns, ignoredByWatcherPatterns};
}
exports.normalizeGlobs = normalizeGlobs;
const hasExtension = (extensions, file) => extensions.includes(path.extname(file).slice(1));
exports.hasExtension = hasExtension;
const globOptions = {
// Globs should work relative to the cwd value only (this should be the
// project directory that AVA is run in).
absolute: false,
braceExpansion: true,
caseSensitiveMatch: false,
dot: false,
expandDirectories: false,
extglob: true,
followSymbolicLinks: true,
gitignore: false,
globstar: true,
ignore: defaultIgnorePatterns,
baseNameMatch: false,
stats: false,
unique: true,
};
const globFiles = async (cwd, patterns) => {
const files = await globby(patterns, {
// Globs should work relative to the cwd value only (this should be the
// project directory that AVA is run in).
absolute: false,
braceExpansion: true,
caseSensitiveMatch: false,
...globOptions,
cwd,
dot: false,
expandDirectories: false,
extglob: true,
followSymbolicLinks: true,
gitignore: false,
globstar: true,
ignore: defaultIgnorePatterns,
baseNameMatch: false,
onlyFiles: true,
stats: false,
unique: true
});
// Return absolute file paths. This has the side-effect of normalizing paths
@ -125,119 +103,114 @@ const globFiles = async (cwd, patterns) => {
return files.map(file => path.join(cwd, file));
};
async function findFiles({cwd, extensions, filePatterns}) {
return (await globFiles(cwd, filePatterns)).filter(file => hasExtension(extensions, file));
const globDirectoriesSync = (cwd, patterns) => {
const files = globbySync(patterns, {
...globOptions,
cwd,
onlyDirectories: true,
});
// Return absolute file paths. This has the side-effect of normalizing paths
// on Windows.
return files.map(file => path.join(cwd, file));
};
export async function findFiles({cwd, extensions, filePatterns}) {
const files = await globFiles(cwd, filePatterns);
return files.filter(file => hasExtension(extensions, file));
}
exports.findFiles = findFiles;
async function findTests({cwd, extensions, filePatterns}) {
return (await findFiles({cwd, extensions, filePatterns})).filter(file => !path.basename(file).startsWith('_'));
export async function findTests({cwd, extensions, filePatterns}) {
const files = await findFiles({cwd, extensions, filePatterns});
return files.filter(file => !path.basename(file).startsWith('_'));
}
exports.findTests = findTests;
function getChokidarIgnorePatterns({ignoredByWatcherPatterns}) {
export function getChokidarIgnorePatterns({ignoredByWatcherPatterns}) {
return [
...defaultIgnorePatterns.map(pattern => `${pattern}/**/*`),
...ignoredByWatcherPatterns.filter(pattern => !pattern.startsWith('!'))
...ignoredByWatcherPatterns.filter(pattern => !pattern.startsWith('!')),
];
}
exports.getChokidarIgnorePatterns = getChokidarIgnorePatterns;
export function applyTestFileFilter({ // eslint-disable-line complexity
cwd,
expandDirectories = true,
filter,
providers = [],
testFiles,
treatFilterPatternsAsFiles = true,
}) {
const {individualMatchers} = processMatchingPatterns(filter);
const normalizedFiles = testFiles.map(file => ({file, matcheable: normalizeFileForMatching(cwd, file)}));
const matchingCache = new WeakMap();
const processMatchingPatterns = input => {
let result = matchingCache.get(input);
if (!result) {
const ignore = [...defaultPicomatchIgnorePatterns];
const patterns = input.filter(pattern => {
if (pattern.startsWith('!')) {
// Unlike globby(), picomatch needs a complete pattern when ignoring directories.
ignore.push(pattern.slice(1), `${pattern.slice(1)}/**/*`);
return false;
const selected = new Set();
const unmatchedPatterns = new Set(individualMatchers.map(({pattern}) => pattern));
for (const {pattern, match} of individualMatchers) {
for (const {file, matcheable} of normalizedFiles) {
if (match(matcheable)) {
unmatchedPatterns.delete(pattern);
selected.add(file);
}
}
}
if (expandDirectories && unmatchedPatterns.size > 0) {
const expansion = [];
for (const pattern of unmatchedPatterns) {
const directories = globDirectoriesSync(cwd, pattern);
if (directories.length > 0) {
unmatchedPatterns.delete(pattern);
expansion.push(directories);
}
}
const directories = expansion.flat();
if (directories.length > 0) {
for (const file of testFiles) {
if (selected.has(file)) {
continue;
}
for (const dir of directories) {
if (file.startsWith(dir + path.sep)) { // eslint-disable-line max-depth
selected.add(file);
}
}
}
}
}
const ignoredFilterPatternFiles = [];
if (treatFilterPatternsAsFiles && unmatchedPatterns.size > 0) {
const providerExtensions = new Set(providers.flatMap(({main}) => main.extensions));
for (const pattern of unmatchedPatterns) {
const file = path.join(cwd, pattern);
try {
const stats = fs.statSync(file);
if (!stats.isFile()) {
continue;
}
} catch (error) {
if (error.code === 'ENOENT') {
continue;
}
throw error;
}
return true;
});
if (
path.basename(file).startsWith('_')
|| providerExtensions.has(path.extname(file).slice(1))
|| file.split(path.sep).includes('node_modules')
) {
ignoredFilterPatternFiles.push(pattern);
continue;
}
result = {
match: picomatch(patterns, {ignore}),
matchNoIgnore: picomatch(patterns)
};
matchingCache.set(input, result);
selected.add(file);
}
}
return result;
};
function matches(file, patterns) {
const {match} = processMatchingPatterns(patterns);
return match(file);
return Object.assign([...selected], {ignoredFilterPatternFiles});
}
exports.matches = matches;
const matchesIgnorePatterns = (file, patterns) => {
const {matchNoIgnore} = processMatchingPatterns(patterns);
return matchNoIgnore(file) || defaultMatchNoIgnore(file);
};
function normalizeFileForMatching(cwd, file) {
if (process.platform === 'win32') {
cwd = slash(cwd);
file = slash(file);
}
if (!cwd) { // TODO: Ensure tests provide an actual value.
return file;
}
// TODO: If `file` is outside `cwd` we can't normalize it. Need to figure
// out if that's a real-world scenario, but we may have to ensure the file
// isn't even selected.
if (!file.startsWith(cwd)) {
return file;
}
// Assume `cwd` does *not* end in a slash.
return file.slice(cwd.length + 1);
}
exports.normalizeFileForMatching = normalizeFileForMatching;
function isHelperish(file) { // Assume file has been normalized already.
// File names starting with an underscore are deemed "helpers".
if (path.basename(file).startsWith('_')) {
return true;
}
// This function assumes the file has been normalized. If it couldn't be,
// don't check if it's got a parent directory that starts with an underscore.
// Deem it not a "helper".
if (path.isAbsolute(file)) {
return false;
}
// If the file has a parent directory that starts with only a single
// underscore, it's deemed a "helper".
return path.dirname(file).split('/').some(dir => /^_(?:$|[^_])/.test(dir));
}
exports.isHelperish = isHelperish;
function classify(file, {cwd, extensions, filePatterns, ignoredByWatcherPatterns}) {
file = normalizeFileForMatching(cwd, file);
return {
isIgnoredByWatcher: matchesIgnorePatterns(file, ignoredByWatcherPatterns),
isTest: hasExtension(extensions, file) && !isHelperish(file) && filePatterns.length > 0 && matches(file, filePatterns)
};
}
exports.classify = classify;
function applyTestFileFilter({cwd, filter, testFiles}) {
return testFiles.filter(file => matches(normalizeFileForMatching(cwd, file), filter));
}
exports.applyTestFileFilter = applyTestFileFilter;

View file

@ -1,3 +1,4 @@
'use strict';
function controlFlow(channel) {
let errored = false;
let deliverImmediately = true;

6
node_modules/ava/lib/is-ci.js generated vendored
View file

@ -1,5 +1,7 @@
const info = require('ci-info');
import process from 'node:process';
import info from 'ci-info';
const {AVA_FORCE_CI} = process.env;
module.exports = AVA_FORCE_CI === 'not-ci' ? false : AVA_FORCE_CI === 'ci' || info.isCI;
export default AVA_FORCE_CI === 'not-ci' ? false : AVA_FORCE_CI === 'ci' || info.isCI;

View file

@ -1,17 +1,13 @@
'use strict';
function isLikeSelector(selector) {
return selector !== null &&
typeof selector === 'object' &&
Reflect.getPrototypeOf(selector) === Object.prototype &&
Reflect.ownKeys(selector).length > 0;
export function isLikeSelector(selector) {
return selector !== null
&& typeof selector === 'object'
&& Reflect.getPrototypeOf(selector) === Object.prototype
&& Reflect.ownKeys(selector).length > 0;
}
exports.isLikeSelector = isLikeSelector;
export const CIRCULAR_SELECTOR = new Error('Encountered a circular selector');
const CIRCULAR_SELECTOR = new Error('Encountered a circular selector');
exports.CIRCULAR_SELECTOR = CIRCULAR_SELECTOR;
function selectComparable(lhs, selector, circular = new Set()) {
export function selectComparable(lhs, selector, circular = new Set()) {
if (circular.has(selector)) {
throw CIRCULAR_SELECTOR;
}
@ -33,5 +29,3 @@ function selectComparable(lhs, selector, circular = new Set()) {
return comparable;
}
exports.selectComparable = selectComparable;

29
node_modules/ava/lib/line-numbers.js generated vendored
View file

@ -1,7 +1,4 @@
'use strict';
const picomatch = require('picomatch');
const flatten = require('lodash/flatten');
import picomatch from 'picomatch';
const NUMBER_REGEX = /^\d+$/;
const RANGE_REGEX = /^(?<startGroup>\d+)-(?<endGroup>\d+)$/;
@ -17,10 +14,10 @@ const sortNumbersAscending = array => {
const parseNumber = string => Number.parseInt(string, 10);
const removeAllWhitespace = string => string.replace(/\s/g, '');
const range = (start, end) => new Array(end - start + 1).fill(start).map((element, index) => element + index);
const range = (start, end) => Array.from({length: end - start + 1}).fill(start).map((element, index) => element + index);
const parseLineNumbers = suffix => sortNumbersAscending(distinctArray(flatten(
suffix.split(',').map(part => {
const parseLineNumbers = suffix => sortNumbersAscending(distinctArray(
suffix.split(',').flatMap(part => {
if (NUMBER_REGEX.test(part)) {
return parseNumber(part);
}
@ -34,10 +31,10 @@ const parseLineNumbers = suffix => sortNumbersAscending(distinctArray(flatten(
}
return range(start, end);
})
)));
}),
));
function splitPatternAndLineNumbers(pattern) {
export function splitPatternAndLineNumbers(pattern) {
const parts = pattern.split(DELIMITER);
if (parts.length === 1) {
return {pattern, lineNumbers: null};
@ -51,14 +48,10 @@ function splitPatternAndLineNumbers(pattern) {
return {pattern: parts.join(DELIMITER), lineNumbers: parseLineNumbers(suffix)};
}
exports.splitPatternAndLineNumbers = splitPatternAndLineNumbers;
function getApplicableLineNumbers(normalizedFilePath, filter) {
return sortNumbersAscending(distinctArray(flatten(
export function getApplicableLineNumbers(normalizedFilePath, filter) {
return sortNumbersAscending(distinctArray(
filter
.filter(({pattern, lineNumbers}) => lineNumbers && picomatch.isMatch(normalizedFilePath, pattern))
.map(({lineNumbers}) => lineNumbers)
)));
.flatMap(({lineNumbers}) => lineNumbers),
));
}
exports.getApplicableLineNumbers = getApplicableLineNumbers;

254
node_modules/ava/lib/load-config.js generated vendored
View file

@ -1,40 +1,17 @@
'use strict';
const fs = require('fs');
const path = require('path');
const url = require('url');
const vm = require('vm');
const {isPlainObject} = require('is-plain-object');
const pkgConf = require('pkg-conf');
import fs from 'node:fs';
import path from 'node:path';
import process from 'node:process';
import url from 'node:url';
import {isPlainObject} from 'is-plain-object';
import {packageConfig, packageJsonPath} from 'pkg-conf';
const NO_SUCH_FILE = Symbol('no ava.config.js file');
const MISSING_DEFAULT_EXPORT = Symbol('missing default export');
const EXPERIMENTS = new Set([
'configurableModuleFormat',
'disableNullExpectations',
'disableSnapshotsInHooks',
'nextGenConfig',
'reverseTeardowns',
'sharedWorkers'
]);
// *Very* rudimentary support for loading ava.config.js files containing an `export default` statement.
const evaluateJsConfig = (contents, configFile) => {
const script = new vm.Script(`'use strict';(()=>{let __export__;\n${contents.toString('utf8').replace(/export default/g, '__export__ =')};return __export__;})()`, {
filename: configFile,
lineOffset: -1
});
return script.runInThisContext();
};
const EXPERIMENTS = new Set();
const importConfig = async ({configFile, fileForErrorMessage}) => {
let module;
try {
module = await import(url.pathToFileURL(configFile)); // eslint-disable-line node/no-unsupported-features/es-syntax
} catch (error) {
throw Object.assign(new Error(`Error loading ${fileForErrorMessage}: ${error.message}`), {parent: error});
}
const {default: config = MISSING_DEFAULT_EXPORT} = module;
const {default: config = MISSING_DEFAULT_EXPORT} = await import(url.pathToFileURL(configFile)); // eslint-disable-line node/no-unsupported-features/es-syntax
if (config === MISSING_DEFAULT_EXPORT) {
throw new Error(`${fileForErrorMessage} must have a default export`);
}
@ -42,79 +19,22 @@ const importConfig = async ({configFile, fileForErrorMessage}) => {
return config;
};
const loadJsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.config.js')}, useImport = false) => {
if (!configFile.endsWith('.js')) {
const loadConfigFile = async ({projectDir, configFile}) => {
if (!fs.existsSync(configFile)) {
return null;
}
const fileForErrorMessage = path.relative(projectDir, configFile);
let config;
try {
const contents = fs.readFileSync(configFile);
config = useImport && contents.includes('nonSemVerExperiments') && contents.includes('nextGenConfig') ?
importConfig({configFile, fileForErrorMessage}) :
evaluateJsConfig(contents, configFile) || MISSING_DEFAULT_EXPORT;
return {config: await importConfig({configFile, fileForErrorMessage}), configFile, fileForErrorMessage};
} catch (error) {
if (error.code === 'ENOENT') {
return null;
}
throw Object.assign(new Error(`Error loading ${fileForErrorMessage}: ${error.message}`), {parent: error});
}
if (config === MISSING_DEFAULT_EXPORT) {
throw new Error(`${fileForErrorMessage} must have a default export, using ES module syntax`);
}
return {config, fileForErrorMessage};
};
const loadCjsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.config.cjs')}) => {
if (!configFile.endsWith('.cjs')) {
return null;
}
const fileForErrorMessage = path.relative(projectDir, configFile);
try {
return {config: require(configFile), fileForErrorMessage};
} catch (error) {
if (error.code === 'MODULE_NOT_FOUND') {
return null;
}
throw Object.assign(new Error(`Error loading ${fileForErrorMessage}`), {parent: error});
}
};
const loadMjsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.config.mjs')}, experimentally = false) => {
if (!configFile.endsWith('.mjs')) {
return null;
}
const fileForErrorMessage = path.relative(projectDir, configFile);
try {
const contents = fs.readFileSync(configFile);
if (experimentally && contents.includes('nonSemVerExperiments') && contents.includes('nextGenConfig')) {
return {config: importConfig({configFile, fileForErrorMessage}), fileForErrorMessage};
}
} catch (error) {
if (error.code === 'ENOENT') {
return null;
}
throw Object.assign(new Error(`Error loading ${fileForErrorMessage}`), {parent: error});
}
throw new Error(`AVA cannot yet load ${fileForErrorMessage} files`);
};
function resolveConfigFile(projectDir, configFile) {
function resolveConfigFile(configFile) {
if (configFile) {
configFile = path.resolve(configFile); // Relative to CWD
if (path.basename(configFile) !== path.relative(projectDir, configFile)) {
throw new Error('Config files must be located next to the package.json file');
}
if (!configFile.endsWith('.js') && !configFile.endsWith('.cjs') && !configFile.endsWith('.mjs')) {
throw new Error('Config files must have .js, .cjs or .mjs extensions');
@ -124,19 +44,59 @@ function resolveConfigFile(projectDir, configFile) {
return configFile;
}
function loadConfigSync({configFile, resolveFrom = process.cwd(), defaults = {}} = {}) {
let packageConf = pkgConf.sync('ava', {cwd: resolveFrom});
const filepath = pkgConf.filepath(packageConf);
const projectDir = filepath === null ? resolveFrom : path.dirname(filepath);
const gitScmFile = process.env.AVA_FAKE_SCM_ROOT || '.git';
configFile = resolveConfigFile(projectDir, configFile);
async function findRepoRoot(fromDir) {
const {root} = path.parse(fromDir);
let dir = fromDir;
while (root !== dir) {
try {
const stat = await fs.promises.stat(path.join(dir, gitScmFile)); // eslint-disable-line no-await-in-loop
if (stat.isFile() || stat.isDirectory()) {
return dir;
}
} catch {}
dir = path.dirname(dir);
}
return root;
}
export async function loadConfig({configFile, resolveFrom = process.cwd(), defaults = {}} = {}) {
let packageConf = await packageConfig('ava', {cwd: resolveFrom});
const filepath = packageJsonPath(packageConf);
const projectDir = filepath === undefined ? resolveFrom : path.dirname(filepath);
const repoRoot = await findRepoRoot(projectDir);
// Conflicts are only allowed when an explicit config file is provided.
const allowConflictWithPackageJson = Boolean(configFile);
configFile = resolveConfigFile(configFile);
let [{config: fileConf, fileForErrorMessage} = {config: NO_SUCH_FILE, fileForErrorMessage: undefined}, ...conflicting] = [
loadJsConfig({projectDir, configFile}),
loadCjsConfig({projectDir, configFile}),
loadMjsConfig({projectDir, configFile})
].filter(result => result !== null);
let fileConf = NO_SUCH_FILE;
let fileForErrorMessage;
let conflicting = [];
if (configFile) {
const loaded = await loadConfigFile({projectDir, configFile});
if (loaded !== null) {
({config: fileConf, fileForErrorMessage} = loaded);
}
} else {
let searchDir = projectDir;
const stopAt = path.dirname(repoRoot);
do {
const results = await Promise.all([ // eslint-disable-line no-await-in-loop
loadConfigFile({projectDir, configFile: path.join(searchDir, 'ava.config.js')}),
loadConfigFile({projectDir, configFile: path.join(searchDir, 'ava.config.cjs')}),
loadConfigFile({projectDir, configFile: path.join(searchDir, 'ava.config.mjs')}),
]);
[{config: fileConf, fileForErrorMessage, configFile} = {config: NO_SUCH_FILE, fileForErrorMessage: undefined}, ...conflicting] = results.filter(result => result !== null);
searchDir = path.dirname(searchDir);
} while (fileConf === NO_SUCH_FILE && searchDir !== stopAt);
}
if (conflicting.length > 0) {
throw new Error(`Conflicting configuration in ${fileForErrorMessage} and ${conflicting.map(({fileForErrorMessage}) => fileForErrorMessage).join(' & ')}`);
@ -149,19 +109,12 @@ function loadConfigSync({configFile, resolveFrom = process.cwd(), defaults = {}}
throw new Error(`Conflicting configuration in ${fileForErrorMessage} and package.json`);
}
if (fileConf && typeof fileConf.then === 'function') { // eslint-disable-line promise/prefer-await-to-then
throw new TypeError(`${fileForErrorMessage} must not export a promise`);
}
if (!isPlainObject(fileConf) && typeof fileConf !== 'function') {
throw new TypeError(`${fileForErrorMessage} must export a plain object or factory function`);
}
if (typeof fileConf === 'function') {
fileConf = fileConf({projectDir});
if (fileConf && typeof fileConf.then === 'function') { // eslint-disable-line promise/prefer-await-to-then
throw new TypeError(`Factory method exported by ${fileForErrorMessage} must not return a promise`);
}
fileConf = await fileConf({projectDir});
if (!isPlainObject(fileConf)) {
throw new TypeError(`Factory method exported by ${fileForErrorMessage} must return a plain object`);
@ -173,7 +126,7 @@ function loadConfigSync({configFile, resolveFrom = process.cwd(), defaults = {}}
}
}
const config = {...defaults, nonSemVerExperiments: {}, ...fileConf, ...packageConf, projectDir};
const config = {...defaults, nonSemVerExperiments: {}, ...fileConf, ...packageConf, projectDir, configFile};
const {nonSemVerExperiments: experiments} = config;
if (!isPlainObject(experiments)) {
@ -188,80 +141,3 @@ function loadConfigSync({configFile, resolveFrom = process.cwd(), defaults = {}}
return config;
}
exports.loadConfigSync = loadConfigSync;
async function loadConfig({configFile, resolveFrom = process.cwd(), defaults = {}} = {}) {
let packageConf = await pkgConf('ava', {cwd: resolveFrom});
const filepath = pkgConf.filepath(packageConf);
const projectDir = filepath === null ? resolveFrom : path.dirname(filepath);
configFile = resolveConfigFile(projectDir, configFile);
const allowConflictWithPackageJson = Boolean(configFile);
// TODO: Refactor resolution logic to implement https://github.com/avajs/ava/issues/2285.
let [{config: fileConf, fileForErrorMessage} = {config: NO_SUCH_FILE, fileForErrorMessage: undefined}, ...conflicting] = [
loadJsConfig({projectDir, configFile}, true),
loadCjsConfig({projectDir, configFile}),
loadMjsConfig({projectDir, configFile}, true)
].filter(result => result !== null);
if (conflicting.length > 0) {
throw new Error(`Conflicting configuration in ${fileForErrorMessage} and ${conflicting.map(({fileForErrorMessage}) => fileForErrorMessage).join(' & ')}`);
}
let sawPromise = false;
if (fileConf !== NO_SUCH_FILE) {
if (allowConflictWithPackageJson) {
packageConf = {};
} else if (Object.keys(packageConf).length > 0) {
throw new Error(`Conflicting configuration in ${fileForErrorMessage} and package.json`);
}
if (fileConf && typeof fileConf.then === 'function') { // eslint-disable-line promise/prefer-await-to-then
sawPromise = true;
fileConf = await fileConf;
}
if (!isPlainObject(fileConf) && typeof fileConf !== 'function') {
throw new TypeError(`${fileForErrorMessage} must export a plain object or factory function`);
}
if (typeof fileConf === 'function') {
fileConf = fileConf({projectDir});
if (fileConf && typeof fileConf.then === 'function') { // eslint-disable-line promise/prefer-await-to-then
sawPromise = true;
fileConf = await fileConf;
}
if (!isPlainObject(fileConf)) {
throw new TypeError(`Factory method exported by ${fileForErrorMessage} must return a plain object`);
}
}
if ('ava' in fileConf) {
throw new Error(`Encountered ava property in ${fileForErrorMessage}; avoid wrapping the configuration`);
}
}
const config = {...defaults, nonSemVerExperiments: {}, ...fileConf, ...packageConf, projectDir};
const {nonSemVerExperiments: experiments} = config;
if (!isPlainObject(experiments)) {
throw new Error(`nonSemVerExperiments from ${fileForErrorMessage} must be an object`);
}
for (const key of Object.keys(experiments)) {
if (!EXPERIMENTS.has(key)) {
throw new Error(`nonSemVerExperiments.${key} from ${fileForErrorMessage} is not a supported experiment`);
}
}
if (sawPromise && experiments.nextGenConfig !== true) {
throw new Error(`${fileForErrorMessage} exported a promise or an asynchronous factory function. You must enable the asyncConfigurationLoading experiment for this to work.`);
}
return config;
}
exports.loadConfig = loadConfig;

10
node_modules/ava/lib/module-types.js generated vendored
View file

@ -54,12 +54,12 @@ const deriveFromArray = (extensions, defaultModuleType) => {
return moduleTypes;
};
module.exports = (configuredExtensions, defaultModuleType, experiments) => {
export default function moduleTypes(configuredExtensions, defaultModuleType) {
if (configuredExtensions === undefined) {
return {
cjs: 'commonjs',
mjs: 'module',
js: defaultModuleType
js: defaultModuleType,
};
}
@ -67,9 +67,5 @@ module.exports = (configuredExtensions, defaultModuleType, experiments) => {
return deriveFromArray(configuredExtensions, defaultModuleType);
}
if (!experiments.configurableModuleFormat) {
throw new Error('You must enable the `configurableModuleFormat` experiment in order to specify module types');
}
return deriveFromObject(configuredExtensions, defaultModuleType);
};
}

View file

@ -1,7 +1,8 @@
'use strict';
const arrgv = require('arrgv');
import process from 'node:process';
function normalizeNodeArguments(fromConf = [], fromArgv = '') {
import arrgv from 'arrgv';
export default function normalizeNodeArguments(fromConf = [], fromArgv = '') {
let parsedArgv = [];
if (fromArgv !== '') {
try {
@ -13,5 +14,3 @@ function normalizeNodeArguments(fromConf = [], fromArgv = '') {
return [...process.execArgv, ...fromConf, ...parsedArgv];
}
module.exports = normalizeNodeArguments;

View file

@ -1,15 +1,26 @@
'use strict';
function parseTestArgs(args) {
const rawTitle = typeof args[0] === 'string' ? args.shift() : undefined;
const receivedImplementationArray = Array.isArray(args[0]);
const implementations = receivedImplementationArray ? args.shift() : args.splice(0, 1);
const buildTitle = (raw, implementation, args) => {
let value = implementation && implementation.title ? implementation.title(raw, ...args) : raw;
const isValid = typeof value === 'string';
if (isValid) {
value = value.trim().replace(/\s+/g, ' ');
}
const buildTitle = implementation => {
const title = implementation.title ? implementation.title(rawTitle, ...args) : rawTitle;
return {title, isSet: typeof title !== 'undefined', isValid: typeof title === 'string', isEmpty: !title};
return {
raw,
value,
isSet: value !== undefined,
isValid,
isEmpty: !isValid || value === '',
};
};
return {args, buildTitle, implementations, rawTitle, receivedImplementationArray};
export default function parseTestArgs(args) {
const rawTitle = typeof args[0] === 'string' ? args.shift() : undefined;
const implementation = args.shift();
return {
args,
implementation: implementation && implementation.exec ? implementation.exec : implementation,
title: buildTitle(rawTitle, implementation, args),
};
}
module.exports = parseTestArgs;

2
node_modules/ava/lib/pkg.cjs generated vendored Normal file
View file

@ -0,0 +1,2 @@
'use strict';
module.exports = require('../package.json');

View file

@ -1,20 +1,30 @@
const {EventEmitter, on} = require('events');
const v8 = require('v8');
const {workerData, parentPort} = require('worker_threads');
const pkg = require('../../package.json');
import {EventEmitter, on} from 'node:events';
import process from 'node:process';
import {workerData, parentPort, threadId} from 'node:worker_threads';
// Used to forward messages received over the `parentPort`. Every subscription
// adds a listener, so do not enforce any maximums.
import pkg from '../pkg.cjs';
// Used to forward messages received over the `parentPort` and any direct ports
// to test workers. Every subscription adds a listener, so do not enforce any
// maximums.
const events = new EventEmitter().setMaxListeners(0);
const emitMessage = message => {
// Wait for a turn of the event loop, to allow new subscriptions to be
// set up in response to the previous message.
setImmediate(() => events.emit('message', message));
};
// Map of active test workers, used in receiveMessages() to get a reference to
// the TestWorker instance, and relevant release functions.
const activeTestWorkers = new Map();
const internalMessagePort = Symbol('Internal MessagePort');
class TestWorker {
constructor(id, file) {
constructor(id, file, port) {
this.id = id;
this.file = file;
this[internalMessagePort] = port;
}
teardown(fn) {
@ -47,10 +57,10 @@ class TestWorker {
}
class ReceivedMessage {
constructor(testWorker, id, serializedData) {
constructor(testWorker, id, data) {
this.testWorker = testWorker;
this.id = id;
this.data = v8.deserialize(new Uint8Array(serializedData));
this.data = data;
}
reply(data) {
@ -98,7 +108,7 @@ async function * receiveMessages(fromTestWorker, replyTo) {
let received = messageCache.get(message);
if (received === undefined) {
received = new ReceivedMessage(active.instance, message.messageId, message.serializedData);
received = new ReceivedMessage(active.instance, message.messageId, message.data);
messageCache.set(message, received);
}
@ -107,59 +117,47 @@ async function * receiveMessages(fromTestWorker, replyTo) {
}
let messageCounter = 0;
const messageIdPrefix = `${workerData.id}/message`;
const messageIdPrefix = `${threadId}/message`;
const nextMessageId = () => `${messageIdPrefix}/${++messageCounter}`;
function publishMessage(testWorker, data, replyTo) {
const id = nextMessageId();
parentPort.postMessage({
testWorker[internalMessagePort].postMessage({
type: 'message',
messageId: id,
testWorkerId: testWorker.id,
serializedData: [...v8.serialize(data)],
replyTo
data,
replyTo,
});
return {
id,
async * replies() {
yield * receiveMessages(testWorker, id);
}
},
};
}
function broadcastMessage(data) {
const id = nextMessageId();
parentPort.postMessage({
type: 'broadcast',
messageId: id,
serializedData: [...v8.serialize(data)]
});
for (const trackedWorker of activeTestWorkers.values()) {
trackedWorker.instance[internalMessagePort].postMessage({
type: 'message',
messageId: id,
data,
});
}
return {
id,
async * replies() {
yield * receiveMessages(undefined, id);
}
},
};
}
async function loadFactory() {
try {
const mod = require(workerData.filename);
if (typeof mod === 'function') {
return mod;
}
return mod.default;
} catch (error) {
if (error && (error.code === 'ERR_REQUIRE_ESM' || (error.code === 'MODULE_NOT_FOUND' && workerData.filename.startsWith('file://')))) {
const {default: factory} = await import(workerData.filename); // eslint-disable-line node/no-unsupported-features/es-syntax
return factory;
}
throw error;
}
const {default: factory} = await import(workerData.filename); // eslint-disable-line node/no-unsupported-features/es-syntax
return factory;
}
let signalAvailable = () => {
@ -175,7 +173,7 @@ loadFactory(workerData.filename).then(factory => {
factory({
negotiateProtocol(supported) {
if (!supported.includes('experimental')) {
if (!supported.includes('ava-4')) {
fatal = new Error(`This version of AVA (${pkg.version}) is not compatible with shared worker plugin at ${workerData.filename}`);
throw fatal;
}
@ -184,12 +182,13 @@ loadFactory(workerData.filename).then(factory => {
parentPort.on('message', async message => {
if (message.type === 'register-test-worker') {
const {id, file} = message;
const instance = new TestWorker(id, file);
const {id, file, port} = message;
const instance = new TestWorker(id, file, port);
activeTestWorkers.set(id, {instance, teardownFns: new Set()});
produceTestWorker(instance);
port.on('message', message => emitMessage({testWorkerId: id, ...message}));
}
if (message.type === 'deregister-test-worker') {
@ -205,18 +204,16 @@ loadFactory(workerData.filename).then(factory => {
parentPort.postMessage({
type: 'deregistered-test-worker',
id
id,
});
}
// Wait for a turn of the event loop, to allow new subscriptions to be
// set up in response to the previous message.
setImmediate(() => events.emit('message', message));
emitMessage(message);
}
});
return {
initialData: workerData.initialData,
protocol: 'experimental',
protocol: 'ava-4',
ready() {
signalAvailable();
@ -235,9 +232,9 @@ loadFactory(workerData.filename).then(factory => {
for await (const [worker] of on(events, 'testWorker')) {
yield worker;
}
}
},
};
}
},
});
}).catch(error => {
if (fatal === undefined) {

View file

@ -1,14 +1,11 @@
const events = require('events');
const serializeError = require('../serialize-error');
import events from 'node:events';
import {pathToFileURL} from 'node:url';
import {Worker} from 'node:worker_threads';
let Worker;
try {
({Worker} = require('worker_threads'));
} catch {}
import serializeError from '../serialize-error.js';
const LOADER = require.resolve('./shared-worker-loader');
const LOADER = new URL('shared-worker-loader.js', import.meta.url);
let sharedWorkerCounter = 0;
const launchedWorkers = new Map();
const waitForAvailable = async worker => {
@ -19,30 +16,28 @@ const waitForAvailable = async worker => {
}
};
function launchWorker({filename, initialData}) {
function launchWorker(filename, initialData) {
if (launchedWorkers.has(filename)) {
return launchedWorkers.get(filename);
}
const id = `shared-worker/${++sharedWorkerCounter}`;
const worker = new Worker(LOADER, {
// Ensure the worker crashes for unhandled rejections, rather than allowing undefined behavior.
execArgv: ['--unhandled-rejections=strict'],
workerData: {
filename,
id,
initialData
}
initialData,
},
});
worker.setMaxListeners(0);
const launched = {
statePromises: {
available: waitForAvailable(worker),
error: events.once(worker, 'error').then(([error]) => error) // eslint-disable-line promise/prefer-await-to-then
error: events.once(worker, 'error').then(([error]) => error),
},
exited: false,
worker
worker,
};
launchedWorkers.set(filename, launched);
@ -53,7 +48,7 @@ function launchWorker({filename, initialData}) {
return launched;
}
async function observeWorkerProcess(fork, runStatus) {
export async function observeWorkerProcess(fork, runStatus) {
let registrationCount = 0;
let signalDeregistered;
const deregistered = new Promise(resolve => {
@ -66,26 +61,11 @@ async function observeWorkerProcess(fork, runStatus) {
}
});
fork.onConnectSharedWorker(async channel => {
const launched = launchWorker(channel);
const handleChannelMessage = ({messageId, replyTo, serializedData}) => {
launched.worker.postMessage({
type: 'message',
testWorkerId: fork.forkId,
messageId,
replyTo,
serializedData
});
};
fork.onConnectSharedWorker(async ({filename, initialData, port, signalError}) => {
const launched = launchWorker(filename, initialData);
const handleWorkerMessage = async message => {
if (message.type === 'broadcast' || (message.type === 'message' && message.testWorkerId === fork.forkId)) {
const {messageId, replyTo, serializedData} = message;
channel.forwardMessageToFork({messageId, replyTo, serializedData});
}
if (message.type === 'deregistered-test-worker' && message.id === fork.forkId) {
if (message.type === 'deregistered-test-worker' && message.id === fork.threadId) {
launched.worker.off('message', handleWorkerMessage);
registrationCount--;
@ -95,35 +75,35 @@ async function observeWorkerProcess(fork, runStatus) {
}
};
launched.statePromises.error.then(error => { // eslint-disable-line promise/prefer-await-to-then
launched.statePromises.error.then(error => {
signalDeregistered();
launched.worker.off('message', handleWorkerMessage);
runStatus.emitStateChange({type: 'shared-worker-error', err: serializeError('Shared worker error', true, error)});
channel.signalError();
signalError();
});
try {
await launched.statePromises.available;
registrationCount++;
port.postMessage({type: 'ready'});
launched.worker.postMessage({
type: 'register-test-worker',
id: fork.forkId,
file: fork.file
});
id: fork.threadId,
file: pathToFileURL(fork.file).toString(),
port,
}, [port]);
fork.promise.finally(() => {
launched.worker.postMessage({
type: 'deregister-test-worker',
id: fork.forkId
id: fork.threadId,
});
channel.off('message', handleChannelMessage);
});
launched.worker.on('message', handleWorkerMessage);
channel.on('message', handleChannelMessage);
channel.signalReady();
} catch {
return;
} finally {
@ -136,5 +116,3 @@ async function observeWorkerProcess(fork, runStatus) {
return deregistered;
}
exports.observeWorkerProcess = observeWorkerProcess;

View file

@ -1,21 +1,21 @@
const pkg = require('../package.json');
const globs = require('./globs');
import * as globs from './globs.js';
import pkg from './pkg.cjs';
const levels = {
ava3: 1,
pathRewrites: 2
// As the protocol changes, comparing levels by integer allows AVA to be
// compatible with different versions. Currently there is only one supported
// version, so this is effectively unused. The infrastructure is retained for
// future use.
levelIntegersAreCurrentlyUnused: 0,
};
exports.levels = levels;
const levelsByProtocol = {
'ava-3': levels.ava3,
'ava-3.2': levels.pathRewrites
'ava-3.2': levels.levelIntegersAreCurrentlyUnused,
};
function load(providerModule, projectDir) {
async function load(providerModule, projectDir) {
const ava = {version: pkg.version};
const makeProvider = require(providerModule);
const {default: makeProvider} = await import(providerModule); // eslint-disable-line node/no-unsupported-features/es-syntax
let fatal;
let level;
@ -37,9 +37,9 @@ function load(providerModule, projectDir) {
},
identifier,
normalizeGlobPatterns: globs.normalizePatterns,
projectDir
projectDir,
};
}
},
});
if (fatal) {
@ -49,5 +49,11 @@ function load(providerModule, projectDir) {
return {...provider, level};
}
exports.babel = projectDir => load('@ava/babel', projectDir);
exports.typescript = projectDir => load('@ava/typescript', projectDir);
const providerManager = {
levels,
async typescript(projectDir) {
return load('@ava/typescript', projectDir);
},
};
export default providerManager;

View file

@ -1,16 +1,10 @@
'use strict';
const StackUtils = require('stack-utils');
import StackUtils from 'stack-utils';
const stackUtils = new StackUtils({
ignoredPackages: [
'@ava/babel',
'@ava/require-precompiled',
'@ava/typescript',
'append-transform',
'ava',
'empower-core',
'esm',
'nyc'
'nyc',
],
internals: [
// AVA internals, which ignoredPackages don't ignore when we run our own unit tests.
@ -20,8 +14,8 @@ const stackUtils = new StackUtils({
/\(internal\/process\/task_queues\.js:\d+:\d+\)$/,
/\(internal\/modules\/cjs\/.+?\.js:\d+:\d+\)$/,
/async Promise\.all \(index/,
/new Promise \(<anonymous>\)/
]
/new Promise \(<anonymous>\)/,
],
});
/*
@ -60,7 +54,7 @@ const stackUtils = new StackUtils({
* Module.runMain (module.js:604:10)
* ```
*/
module.exports = stack => {
export default function beautifyStack(stack) {
if (!stack) {
return [];
}
@ -70,4 +64,4 @@ module.exports = stack => {
.split('\n')
.map(line => line.trim())
.filter(line => line !== '');
};
}

View file

@ -1,17 +1,42 @@
'use strict';
const chalk = require('../chalk').get();
import {chalk} from '../chalk.js';
module.exports = {
log: chalk.gray,
title: chalk.bold,
error: chalk.red,
skip: chalk.yellow,
todo: chalk.blue,
pass: chalk.green,
duration: chalk.gray.dim,
errorSource: chalk.gray,
errorStack: chalk.gray,
errorStackInternal: chalk.gray.dim,
stack: chalk.red,
information: chalk.magenta
const colors = {
get log() {
return chalk.gray;
},
get title() {
return chalk.bold;
},
get error() {
return chalk.red;
},
get skip() {
return chalk.yellow;
},
get todo() {
return chalk.blue;
},
get pass() {
return chalk.green;
},
get duration() {
return chalk.gray.dim;
},
get errorSource() {
return chalk.gray;
},
get errorStack() {
return chalk.gray;
},
get errorStackInternal() {
return chalk.gray.dim;
},
get stack() {
return chalk.red;
},
get information() {
return chalk.magenta;
},
};
export default colors;

View file

@ -1,25 +1,24 @@
'use strict';
const os = require('os');
const path = require('path');
const stream = require('stream');
import os from 'node:os';
import path from 'node:path';
import stream from 'node:stream';
import {fileURLToPath} from 'node:url';
const cliCursor = require('cli-cursor');
const figures = require('figures');
const indentString = require('indent-string');
const ora = require('ora');
const plur = require('plur');
const prettyMs = require('pretty-ms');
const trimOffNewlines = require('trim-off-newlines');
import figures from 'figures';
import indentString from 'indent-string';
import plur from 'plur';
import prettyMs from 'pretty-ms';
import StackUtils from 'stack-utils';
const chalk = require('../chalk').get();
const codeExcerpt = require('../code-excerpt');
const beautifyStack = require('./beautify-stack');
const colors = require('./colors');
const formatSerializedError = require('./format-serialized-error');
const improperUsageMessages = require('./improper-usage-messages');
const prefixTitle = require('./prefix-title');
import {chalk} from '../chalk.js';
import codeExcerpt from '../code-excerpt.js';
const nodeInternals = require('stack-utils').nodeInternals();
import beautifyStack from './beautify-stack.js';
import colors from './colors.js';
import formatSerializedError from './format-serialized-error.js';
import improperUsageMessage from './improper-usage-messages.js';
import prefixTitle from './prefix-title.js';
const nodeInternals = StackUtils.nodeInternals();
class LineWriter extends stream.Writable {
constructor(dest) {
@ -52,129 +51,48 @@ class LineWriter extends stream.Writable {
}
}
class LineWriterWithSpinner extends LineWriter {
constructor(dest, spinner) {
super(dest);
this.lastSpinnerText = '';
this.spinner = spinner;
}
_write(chunk, _, callback) {
this.spinner.clear();
this._writeWithSpinner(chunk.toString('utf8'));
callback();
}
_writev(pieces, callback) {
// Discard the current spinner output. Any lines that were meant to be
// preserved should be rewritten.
this.spinner.clear();
const last = pieces.pop();
for (const piece of pieces) {
this.dest.write(piece.chunk);
}
this._writeWithSpinner(last.chunk.toString('utf8'));
callback();
}
_writeWithSpinner(string) {
if (!this.spinner.isSpinning) {
this.dest.write(string);
return;
}
this.lastSpinnerText = string;
// Ignore whitespace at the end of the chunk. We're continiously rewriting
// the last line through the spinner. Also be careful to remove the indent
// as the spinner adds its own.
this.spinner.text = string.trimEnd().slice(2);
this.spinner.render();
}
}
function manageCorking(stream) {
let corked = false;
const cork = () => {
corked = true;
stream.cork();
};
const uncork = () => {
corked = false;
stream.uncork();
};
return {
decorateFlushingWriter(fn) {
decorateWriter(fn) {
return function (...args) {
if (corked) {
stream.uncork();
}
stream.cork();
try {
return fn.apply(this, args);
} finally {
if (corked) {
stream.cork();
}
stream.uncork();
}
};
},
decorateWriter(fn) {
return function (...args) {
cork();
try {
return fn.apply(this, args);
} finally {
uncork();
}
};
}
};
}
class Reporter {
export default class Reporter {
constructor({
verbose,
extensions,
reportStream,
stdStream,
projectDir,
watching,
spinner,
durationThreshold
durationThreshold,
}) {
this.verbose = verbose;
this.extensions = extensions;
this.reportStream = reportStream;
this.stdStream = stdStream;
this.watching = watching;
this.relativeFile = file => path.relative(projectDir, file);
this.relativeFile = file => {
if (file.startsWith('file://')) {
file = fileURLToPath(file);
}
const {decorateWriter, decorateFlushingWriter} = manageCorking(this.reportStream);
return path.relative(projectDir, file);
};
const {decorateWriter} = manageCorking(this.reportStream);
this.consumeStateChange = decorateWriter(this.consumeStateChange);
this.endRun = decorateWriter(this.endRun);
if (this.verbose) {
this.durationThreshold = durationThreshold || 100;
this.spinner = null;
this.clearSpinner = () => {};
this.lineWriter = new LineWriter(this.reportStream);
} else {
this.spinner = ora({
isEnabled: true,
color: spinner ? spinner.color : 'gray',
discardStdin: !watching,
hideCursor: false,
spinner: spinner || (process.platform === 'win32' ? 'line' : 'dots'),
stream: reportStream
});
this.clearSpinner = decorateFlushingWriter(this.spinner.clear.bind(this.spinner));
this.lineWriter = new LineWriterWithSpinner(this.reportStream, this.spinner);
}
this.durationThreshold = durationThreshold || 100;
this.lineWriter = new LineWriter(this.reportStream);
this.reset();
}
@ -198,7 +116,6 @@ class Reporter {
this.sharedWorkerErrors = [];
this.uncaughtExceptions = [];
this.unhandledRejections = [];
this.unsavedSnapshots = [];
this.previousFailures = 0;
@ -221,9 +138,10 @@ class Reporter {
this.matching = plan.matching;
this.previousFailures = plan.previousFailures;
this.emptyParallelRun = plan.status.emptyParallelRun;
this.selectionInsights = plan.status.selectionInsights;
if (this.watching || plan.files.length > 1) {
this.prefixTitle = (testFile, title) => prefixTitle(plan.filePathPrefix, testFile, title);
this.prefixTitle = (testFile, title) => prefixTitle(this.extensions, plan.filePathPrefix, testFile, title);
}
this.removePreviousListener = plan.status.on('stateChange', evt => {
@ -234,13 +152,7 @@ class Reporter {
this.lineWriter.write(chalk.gray.dim('\u2500'.repeat(this.lineWriter.columns)) + os.EOL);
}
if (this.spinner === null) {
this.lineWriter.writeLine();
} else {
cliCursor.hide(this.reportStream);
this.lineWriter.writeLine();
this.spinner.start();
}
this.lineWriter.writeLine();
}
consumeStateChange(event) { // eslint-disable-line complexity
@ -296,12 +208,10 @@ class Reporter {
this.write(colors.error(`${figures.cross} Internal error`));
}
if (this.verbose) {
this.lineWriter.writeLine(colors.stack(event.err.summary));
this.lineWriter.writeLine(colors.errorStack(event.err.stack));
this.lineWriter.writeLine();
this.lineWriter.writeLine();
}
this.lineWriter.writeLine(colors.stack(event.err.summary));
this.lineWriter.writeLine(colors.errorStack(event.err.stack));
this.lineWriter.writeLine();
this.lineWriter.writeLine();
break;
}
@ -321,7 +231,7 @@ class Reporter {
}
case 'hook-finished': {
if (this.verbose && event.logs.length > 0) {
if (true && event.logs.length > 0) {
this.lineWriter.writeLine(` ${this.prefixTitle(event.testFile, event.title)}`);
this.writeLogs(event);
}
@ -330,12 +240,10 @@ class Reporter {
}
case 'selected-test': {
if (this.verbose) {
if (event.skip) {
this.lineWriter.writeLine(colors.skip(`- ${this.prefixTitle(event.testFile, event.title)}`));
} else if (event.todo) {
this.lineWriter.writeLine(colors.todo(`- ${this.prefixTitle(event.testFile, event.title)}`));
}
if (event.skip) {
this.lineWriter.writeLine(colors.skip(`- ${this.prefixTitle(event.testFile, event.title)}`));
} else if (event.todo) {
this.lineWriter.writeLine(colors.todo(`- ${this.prefixTitle(event.testFile, event.title)}`));
}
break;
@ -344,29 +252,21 @@ class Reporter {
case 'shared-worker-error': {
this.sharedWorkerErrors.push(event);
if (this.verbose) {
this.lineWriter.ensureEmptyLine();
this.lineWriter.writeLine(colors.error(`${figures.cross} Error in shared worker`));
this.lineWriter.writeLine();
this.writeErr(event);
}
this.lineWriter.ensureEmptyLine();
this.lineWriter.writeLine(colors.error(`${figures.cross} Error in shared worker`));
this.lineWriter.writeLine();
this.writeErr(event);
break;
}
case 'snapshot-error':
this.unsavedSnapshots.push(event);
break;
case 'uncaught-exception': {
this.uncaughtExceptions.push(event);
if (this.verbose) {
this.lineWriter.ensureEmptyLine();
this.lineWriter.writeLine(colors.title(`Uncaught exception in ${this.relativeFile(event.testFile)}`));
this.lineWriter.writeLine();
this.writeErr(event);
}
this.lineWriter.ensureEmptyLine();
this.lineWriter.writeLine(colors.title(`Uncaught exception in ${this.relativeFile(event.testFile)}`));
this.lineWriter.writeLine();
this.writeErr(event);
break;
}
@ -374,12 +274,10 @@ class Reporter {
case 'unhandled-rejection': {
this.unhandledRejections.push(event);
if (this.verbose) {
this.lineWriter.ensureEmptyLine();
this.lineWriter.writeLine(colors.title(`Unhandled rejection in ${this.relativeFile(event.testFile)}`));
this.lineWriter.writeLine();
this.writeErr(event);
}
this.lineWriter.ensureEmptyLine();
this.lineWriter.writeLine(colors.title(`Unhandled rejection in ${this.relativeFile(event.testFile)}`));
this.lineWriter.writeLine();
this.writeErr(event);
break;
}
@ -389,8 +287,12 @@ class Reporter {
this.filesWithoutDeclaredTests.add(event.testFile);
}
if (this.verbose && !this.filesWithMissingAvaImports.has(event.testFile)) {
if (event.nonZeroExitCode) {
if (!this.filesWithMissingAvaImports.has(event.testFile)) {
if (event.err) {
this.lineWriter.writeLine(colors.error(`${figures.cross} ${this.relativeFile(event.testFile)} exited due to an error:`));
this.lineWriter.writeLine();
this.writeErr(event);
} else if (event.nonZeroExitCode) {
this.lineWriter.writeLine(colors.error(`${figures.cross} ${this.relativeFile(event.testFile)} exited with a non-zero exit code: ${event.nonZeroExitCode}`));
} else {
this.lineWriter.writeLine(colors.error(`${figures.cross} ${this.relativeFile(event.testFile)} exited due to ${event.signal}`));
@ -410,7 +312,7 @@ class Reporter {
this.filesWithoutMatchedLineNumbers.add(event.testFile);
this.lineWriter.writeLine(colors.error(`${figures.cross} Line numbers for ${this.relativeFile(event.testFile)} did not match any tests`));
} else if (this.verbose && !this.failFastEnabled && fileStats.remainingTests > 0) {
} else if (true && !this.failFastEnabled && fileStats.remainingTests > 0) {
this.lineWriter.writeLine(colors.error(`${figures.cross} ${fileStats.remainingTests} ${plur('test', fileStats.remainingTests)} remaining in ${this.relativeFile(event.testFile)}`));
}
}
@ -419,9 +321,6 @@ class Reporter {
}
case 'worker-stderr': {
// Forcibly clear the spinner, writing the chunk corrupts the TTY.
this.clearSpinner();
this.stdStream.write(event.chunk);
// If the chunk does not end with a linebreak, *forcibly* write one to
// ensure it remains visible in the TTY.
@ -433,17 +332,10 @@ class Reporter {
this.reportStream.write(os.EOL);
}
if (this.spinner !== null) {
this.lineWriter.write(this.lineWriter.lastSpinnerText);
}
break;
}
case 'worker-stdout': {
// Forcibly clear the spinner, writing the chunk corrupts the TTY.
this.clearSpinner();
this.stdStream.write(event.chunk);
// If the chunk does not end with a linebreak, *forcibly* write one to
// ensure it remains visible in the TTY.
@ -454,10 +346,6 @@ class Reporter {
if (event.chunk[event.chunk.length - 1] !== 0x0A) {
this.reportStream.write(os.EOL);
}
if (this.spinner !== null) {
this.lineWriter.write(this.lineWriter.lastSpinnerText);
}
}
}
}
@ -478,11 +366,7 @@ class Reporter {
}
write(string) {
if (this.verbose) {
this.lineWriter.writeLine(string);
} else {
this.writeWithCounts(string);
}
this.lineWriter.writeLine(string);
}
writeWithCounts(string) {
@ -529,7 +413,7 @@ class Reporter {
writeErr(event) {
if (event.err.name === 'TSError' && event.err.object && event.err.object.diagnosticText) {
this.lineWriter.writeLine(colors.errorStack(trimOffNewlines(event.err.object.diagnosticText)));
this.lineWriter.writeLine(colors.errorStack(event.err.object.diagnosticText));
this.lineWriter.writeLine();
return;
}
@ -556,13 +440,13 @@ class Reporter {
this.lineWriter.writeLine();
}
const message = improperUsageMessages.forError(event.err);
const message = improperUsageMessage(event.err);
if (message) {
this.lineWriter.writeLine(message);
this.lineWriter.writeLine();
}
} else if (event.err.nonErrorObject) {
this.lineWriter.writeLine(trimOffNewlines(event.err.formatted));
this.lineWriter.writeLine(event.err.formatted);
this.lineWriter.writeLine();
} else {
this.lineWriter.writeLine(event.err.summary);
@ -618,27 +502,15 @@ class Reporter {
writeTestSummary(event) {
if (event.type === 'hook-failed' || event.type === 'test-failed') {
if (this.verbose) {
this.write(`${colors.error(figures.cross)} ${this.prefixTitle(event.testFile, event.title)} ${colors.error(event.err.message)}`);
} else {
this.write(this.prefixTitle(event.testFile, event.title));
}
this.write(`${colors.error(figures.cross)} ${this.prefixTitle(event.testFile, event.title)} ${colors.error(event.err.message)}`);
} else if (event.knownFailing) {
if (this.verbose) {
this.write(`${colors.error(figures.tick)} ${colors.error(this.prefixTitle(event.testFile, event.title))}`);
} else {
this.write(colors.error(this.prefixTitle(event.testFile, event.title)));
}
} else if (this.verbose) {
this.write(`${colors.error(figures.tick)} ${colors.error(this.prefixTitle(event.testFile, event.title))}`);
} else {
const duration = event.duration > this.durationThreshold ? colors.duration(' (' + prettyMs(event.duration) + ')') : '';
this.write(`${colors.pass(figures.tick)} ${this.prefixTitle(event.testFile, event.title)}${duration}`);
} else {
this.write(this.prefixTitle(event.testFile, event.title));
}
if (this.verbose) {
this.writeLogs(event);
}
this.writeLogs(event);
}
writeFailure(event) {
@ -652,19 +524,38 @@ class Reporter {
endRun() {// eslint-disable-line complexity
let firstLinePostfix = this.watching ? ` ${chalk.gray.dim(`[${new Date().toLocaleTimeString('en-US', {hour12: false})}]`)}` : '';
let wroteSomething = false;
if (!this.verbose) {
this.spinner.stop();
cliCursor.show(this.reportStream);
} else if (this.emptyParallelRun) {
if (this.emptyParallelRun) {
this.lineWriter.writeLine('No files tested in this parallel run');
this.lineWriter.writeLine();
return;
}
if (!this.stats) {
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldnt find any files to test` + firstLinePostfix));
if (this.selectionInsights.ignoredFilterPatternFiles.length > 0) {
this.write(colors.information(`${figures.warning} Paths for additional test files were disregarded:`));
this.lineWriter.writeLine();
for (const pattern of this.selectionInsights.ignoredFilterPatternFiles) {
this.lineWriter.writeLine(chalk.magenta(`* ${pattern}`));
}
this.lineWriter.writeLine();
this.write(colors.information('Files starting with underscores are never treated as test files.'));
this.write(colors.information('Files handled by @ava/typescript can only be selected if your configuration already selects them.'));
this.lineWriter.writeLine();
}
if (this.selectionInsights.selectionCount === 0) {
if (this.selectionInsights.testFileCount === 0) {
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldnt find any files to test` + firstLinePostfix));
} else {
const {testFileCount: count} = this.selectionInsights;
this.lineWriter.writeLine(colors.error(`${figures.cross} Based on your configuration, ${count} test ${plur('file was', 'files were', count)} found, but did not match the CLI arguments:` + firstLinePostfix));
this.lineWriter.writeLine();
for (const {pattern} of this.selectionInsights.filter) {
this.lineWriter.writeLine(colors.error(`* ${pattern}`));
}
}
this.lineWriter.writeLine();
return;
}
@ -675,53 +566,8 @@ class Reporter {
return;
}
if (this.verbose) {
this.lineWriter.writeLine(colors.log(figures.line));
this.lineWriter.writeLine();
} else {
if (this.filesWithMissingAvaImports.size > 0) {
for (const testFile of this.filesWithMissingAvaImports) {
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(testFile)}, make sure to import "ava" at the top of your test file`) + firstLinePostfix);
firstLinePostfix = '';
wroteSomething = true;
}
}
if (this.filesWithoutDeclaredTests.size > 0) {
for (const testFile of this.filesWithoutDeclaredTests) {
if (!this.filesWithMissingAvaImports.has(testFile)) {
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(testFile)}`) + firstLinePostfix);
firstLinePostfix = '';
wroteSomething = true;
}
}
}
if (this.lineNumberErrors.length > 0) {
for (const event of this.lineNumberErrors) {
this.lineWriter.writeLine(colors.information(`${figures.warning} Could not parse ${this.relativeFile(event.testFile)} for line number selection` + firstLinePostfix));
firstLinePostfix = '';
wroteSomething = true;
}
}
if (this.filesWithoutMatchedLineNumbers.size > 0) {
for (const testFile of this.filesWithoutMatchedLineNumbers) {
if (!this.filesWithMissingAvaImports.has(testFile) && !this.filesWithoutDeclaredTests.has(testFile)) {
this.lineWriter.writeLine(colors.error(`${figures.cross} Line numbers for ${this.relativeFile(testFile)} did not match any tests`) + firstLinePostfix);
firstLinePostfix = '';
wroteSomething = true;
}
}
}
if (wroteSomething) {
this.lineWriter.writeLine();
this.lineWriter.writeLine(colors.log(figures.line));
this.lineWriter.writeLine();
wroteSomething = false;
}
}
this.lineWriter.writeLine(colors.log(figures.line));
this.lineWriter.writeLine();
if (this.failures.length > 0) {
const writeTrailingLines = this.internalErrors.length > 0 || this.sharedWorkerErrors.length > 0 || this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
@ -732,106 +578,13 @@ class Reporter {
if (event !== lastFailure) {
this.lineWriter.writeLine();
this.lineWriter.writeLine();
} else if (!this.verbose && writeTrailingLines) {
} else if (!true && writeTrailingLines) {
this.lineWriter.writeLine();
this.lineWriter.writeLine();
}
wroteSomething = true;
}
if (this.verbose) {
this.lineWriter.writeLine(colors.log(figures.line));
this.lineWriter.writeLine();
}
}
if (!this.verbose) {
if (this.internalErrors.length > 0) {
const writeTrailingLines = this.sharedWorkerErrors.length > 0 || this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
const last = this.internalErrors[this.internalErrors.length - 1];
for (const event of this.internalErrors) {
if (event.testFile) {
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error when running ${this.relativeFile(event.testFile)}`));
} else {
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error`));
}
this.lineWriter.writeLine(colors.stack(event.err.summary));
this.lineWriter.writeLine(colors.errorStack(event.err.stack));
if (event !== last || writeTrailingLines) {
this.lineWriter.writeLine();
this.lineWriter.writeLine();
this.lineWriter.writeLine();
}
wroteSomething = true;
}
}
if (this.sharedWorkerErrors.length > 0) {
const writeTrailingLines = this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
const last = this.sharedWorkerErrors[this.sharedWorkerErrors.length - 1];
for (const evt of this.sharedWorkerErrors) {
this.lineWriter.writeLine(colors.error(`${figures.cross} Error in shared worker`));
this.lineWriter.writeLine();
this.writeErr(evt.err);
if (evt !== last || writeTrailingLines) {
this.lineWriter.writeLine();
this.lineWriter.writeLine();
}
wroteSomething = true;
}
}
if (this.uncaughtExceptions.length > 0) {
const writeTrailingLines = this.unhandledRejections.length > 0;
const last = this.uncaughtExceptions[this.uncaughtExceptions.length - 1];
for (const event of this.uncaughtExceptions) {
this.lineWriter.writeLine(colors.title(`Uncaught exception in ${this.relativeFile(event.testFile)}`));
this.lineWriter.writeLine();
this.writeErr(event);
if (event !== last || writeTrailingLines) {
this.lineWriter.writeLine();
this.lineWriter.writeLine();
}
wroteSomething = true;
}
}
if (this.unhandledRejections.length > 0) {
const last = this.unhandledRejections[this.unhandledRejections.length - 1];
for (const event of this.unhandledRejections) {
this.lineWriter.writeLine(colors.title(`Unhandled rejection in ${this.relativeFile(event.testFile)}`));
this.lineWriter.writeLine();
this.writeErr(event);
if (event !== last) {
this.lineWriter.writeLine();
this.lineWriter.writeLine();
}
wroteSomething = true;
}
}
if (wroteSomething) {
this.lineWriter.writeLine(colors.log(figures.line));
this.lineWriter.writeLine();
}
}
if (this.unsavedSnapshots.length > 0) {
this.lineWriter.writeLine(colors.title('Could not update snapshots for the following test files:'));
this.lineWriter.writeLine();
for (const event of this.unsavedSnapshots) {
this.lineWriter.writeLine(`${figures.warning} ${this.relativeFile(event.testFile)}`);
}
this.lineWriter.writeLine(colors.log(figures.line));
this.lineWriter.writeLine();
}
@ -853,16 +606,14 @@ class Reporter {
}
this.lineWriter.writeLine(colors.information(`\`--fail-fast\` is on. ${remaining}.`));
if (this.verbose) {
this.lineWriter.writeLine();
}
this.lineWriter.writeLine();
}
if (this.verbose && this.stats.parallelRuns) {
if (this.stats.parallelRuns) {
const {
currentFileCount,
currentIndex,
totalRuns
totalRuns,
} = this.stats.parallelRuns;
this.lineWriter.writeLine(colors.information(`Ran ${currentFileCount} test ${plur('file', currentFileCount)} out of ${this.stats.files} for job ${currentIndex + 1} of ${totalRuns}`));
this.lineWriter.writeLine();
@ -879,11 +630,11 @@ class Reporter {
}
if (
this.stats.failedHooks === 0 &&
this.stats.failedTests === 0 &&
this.stats.passedTests > 0
this.stats.failedHooks === 0
&& this.stats.failedTests === 0
&& this.stats.passedTests > 0
) {
this.lineWriter.writeLine(colors.pass(`${this.stats.passedTests} ${plur('test', this.stats.passedTests)} passed`) + firstLinePostfix
this.lineWriter.writeLine(colors.pass(`${this.stats.passedTests} ${plur('test', this.stats.passedTests)} passed`) + firstLinePostfix,
);
firstLinePostfix = '';
}
@ -917,4 +668,3 @@ class Reporter {
}
}
}
module.exports = Reporter;

View file

@ -1,27 +1,16 @@
'use strict';
const trimOffNewlines = require('trim-off-newlines');
const chalk = require('../chalk').get();
export default function formatSerializedError(error) {
const printMessage = error.values.length === 0
? Boolean(error.message)
: !error.values[0].label.startsWith(error.message);
function formatSerializedError(error) {
const printMessage = error.values.length === 0 ?
Boolean(error.message) :
!error.values[0].label.startsWith(error.message);
if (error.statements.length === 0 && error.values.length === 0) {
if (error.values.length === 0) {
return {formatted: null, printMessage};
}
let formatted = '';
for (const value of error.values) {
formatted += `${value.label}\n\n${trimOffNewlines(value.formatted)}\n\n`;
formatted += `${value.label}\n\n${value.formatted}\n\n`;
}
for (const statement of error.statements) {
formatted += `${statement[0]}\n${chalk.grey('=>')} ${trimOffNewlines(statement[1])}\n\n`;
}
formatted = trimOffNewlines(formatted);
return {formatted, printMessage};
return {formatted: formatted.trim(), printMessage};
}
module.exports = formatSerializedError;

View file

@ -1,8 +1,7 @@
'use strict';
const chalk = require('../chalk').get();
const pkg = require('../../package.json');
import {chalk} from '../chalk.js';
import pkg from '../pkg.cjs';
exports.forError = error => {
export default function buildMessage(error) {
if (!error.improperUsage) {
return null;
}
@ -21,7 +20,7 @@ Visit the following URL for more details:
if (assertion === 'snapshot') {
const {name, snapPath} = error.improperUsage;
if (name === 'ChecksumError') {
if (name === 'ChecksumError' || name === 'InvalidSnapshotError') {
return `The snapshot file is corrupted.
File path: ${chalk.yellow(snapPath)}
@ -39,9 +38,9 @@ Please run AVA again with the ${chalk.cyan('--update-snapshots')} flag to upgrad
if (name === 'VersionMismatchError') {
const {snapVersion, expectedVersion} = error.improperUsage;
const upgradeMessage = snapVersion < expectedVersion ?
`Please run AVA again with the ${chalk.cyan('--update-snapshots')} flag to upgrade.` :
'You should upgrade AVA.';
const upgradeMessage = snapVersion < expectedVersion
? `Please run AVA again with the ${chalk.cyan('--update-snapshots')} flag to upgrade.`
: 'You should upgrade AVA.';
return `The snapshot file is v${snapVersion}, but only v${expectedVersion} is supported.
@ -52,4 +51,4 @@ ${upgradeMessage}`;
}
return null;
};
}

View file

@ -1,21 +1,23 @@
'use strict';
const path = require('path');
const figures = require('figures');
const chalk = require('../chalk').get();
import path from 'node:path';
const SEPERATOR = ' ' + chalk.gray.dim(figures.pointerSmall) + ' ';
import figures from 'figures';
module.exports = (base, file, title) => {
const prefix = file
import {chalk} from '../chalk.js';
const SEPARATOR = ' ' + chalk.gray.dim(figures.pointerSmall) + ' ';
export default function prefixTitle(extensions, base, file, title) {
const parts = file
// Only replace base if it is found at the start of the path
.replace(base, (match, offset) => offset === 0 ? '' : match)
.replace(/\.spec/, '')
.replace(/\.test/, '')
.replace(/test-/g, '')
.replace(/\.js$/, '')
.split(path.sep)
.filter(p => p !== '__tests__')
.join(SEPERATOR);
.filter(p => p !== '__tests__');
return prefix + SEPERATOR + title;
};
const filename = parts.pop()
.replace(/\.spec\./, '.')
.replace(/\.test\./, '.')
.replace(/test-/, '')
.replace(new RegExp(`.(${extensions.join('|')})$`), '');
return [...parts, filename, title].join(SEPARATOR);
}

View file

@ -1,14 +1,13 @@
'use strict';
const os = require('os');
const path = require('path');
import os from 'node:os';
import path from 'node:path';
const plur = require('plur');
const stripAnsi = require('strip-ansi');
const supertap = require('supertap');
const indentString = require('indent-string');
import indentString from 'indent-string';
import plur from 'plur';
import stripAnsi from 'strip-ansi';
import supertap from 'supertap';
const beautifyStack = require('./beautify-stack');
const prefixTitle = require('./prefix-title');
import beautifyStack from './beautify-stack.js';
import prefixTitle from './prefix-title.js';
function dumpError(error) {
const object = {...error.object};
@ -30,10 +29,7 @@ function dumpError(error) {
}
if (error.values.length > 0) {
object.values = error.values.reduce((acc, value) => { // eslint-disable-line unicorn/no-reduce
acc[value.label] = stripAnsi(value.formatted);
return acc;
}, {});
object.values = Object.fromEntries(error.values.map(({label, formatted}) => [label, stripAnsi(formatted)]));
}
}
@ -49,10 +45,11 @@ function dumpError(error) {
return object;
}
class TapReporter {
export default class TapReporter {
constructor(options) {
this.i = 0;
this.extensions = options.extensions;
this.stdStream = options.stdStream;
this.reportStream = options.reportStream;
@ -65,7 +62,7 @@ class TapReporter {
startRun(plan) {
if (plan.files.length > 1) {
this.prefixTitle = (testFile, title) => prefixTitle(plan.filePathPrefix, testFile, title);
this.prefixTitle = (testFile, title) => prefixTitle(this.extensions, plan.filePathPrefix, testFile, title);
}
plan.status.on('stateChange', evt => this.consumeStateChange(evt));
@ -80,7 +77,7 @@ class TapReporter {
failed: this.stats.failedTests + this.stats.remainingTests,
passed: this.stats.passedTests + this.stats.passedKnownFailingTests,
skipped: this.stats.skippedTests,
todo: this.stats.todoTests
todo: this.stats.todoTests,
}) + os.EOL);
if (this.stats.parallelRuns) {
@ -93,7 +90,7 @@ class TapReporter {
failed: 0,
passed: 0,
skipped: 0,
todo: 0
todo: 0,
}) + os.EOL);
}
}
@ -105,7 +102,7 @@ class TapReporter {
index: ++this.i,
passed: flags.passed,
skip: flags.skip,
todo: flags.todo
todo: flags.todo,
}) + os.EOL);
}
@ -117,7 +114,7 @@ class TapReporter {
index: ++this.i,
passed: false,
skip: false,
todo: false
todo: false,
}) + os.EOL);
}
@ -132,11 +129,11 @@ class TapReporter {
}
writeTimeout(evt) {
const err = new Error(`Exited because no new tests completed within the last ${evt.period}ms of inactivity`);
const error = new Error(`Exited because no new tests completed within the last ${evt.period}ms of inactivity`);
for (const [testFile, tests] of evt.pendingTests) {
for (const title of tests) {
this.writeTest({testFile, title, err}, {passed: false, todo: false, skip: false});
this.writeTest({testFile, title, err: error}, {passed: false, todo: false, skip: false});
}
}
}
@ -168,9 +165,6 @@ class TapReporter {
this.writeTest(evt, {passed: false, todo: true, skip: false});
}
break;
case 'snapshot-error':
this.writeComment(evt, {title: 'Could not update snapshots'});
break;
case 'stats':
this.stats = evt.stats;
@ -219,4 +213,3 @@ class TapReporter {
}
}
}
module.exports = TapReporter;

52
node_modules/ava/lib/run-status.js generated vendored
View file

@ -1,17 +1,21 @@
'use strict';
const Emittery = require('emittery');
const cloneDeep = require('lodash/cloneDeep');
import v8 from 'node:v8';
class RunStatus extends Emittery {
constructor(files, parallelRuns) {
import Emittery from 'emittery';
const copyStats = stats => v8.deserialize(v8.serialize(stats));
export default class RunStatus extends Emittery {
constructor(files, parallelRuns, selectionInsights) {
super();
this.pendingTests = new Map();
this.emptyParallelRun = parallelRuns &&
parallelRuns.currentFileCount === 0 &&
parallelRuns.totalRuns > 1 &&
files > 0;
this.emptyParallelRun = parallelRuns
&& parallelRuns.currentFileCount === 0
&& parallelRuns.totalRuns > 1
&& files > 0;
this.selectionInsights = selectionInsights;
this.stats = {
byFile: new Map(),
@ -32,7 +36,7 @@ class RunStatus extends Emittery {
timeouts: 0,
todoTests: 0,
uncaughtExceptions: 0,
unhandledRejections: 0
unhandledRejections: 0,
};
}
@ -51,7 +55,7 @@ class RunStatus extends Emittery {
todoTests: 0,
uncaughtExceptions: 0,
unhandledRejections: 0,
...stats
...stats,
});
this.pendingTests.set(testFile, new Set());
@ -147,7 +151,7 @@ class RunStatus extends Emittery {
}
if (changedStats) {
this.emit('stateChange', {type: 'stats', stats: cloneDeep(stats)});
this.emit('stateChange', {type: 'stats', stats: copyStats(stats)});
}
this.emit('stateChange', event);
@ -163,15 +167,15 @@ class RunStatus extends Emittery {
}
if (
this.stats.declaredTests === 0 ||
this.stats.internalErrors > 0 ||
this.stats.failedHooks > 0 ||
this.stats.failedTests > 0 ||
this.stats.failedWorkers > 0 ||
this.stats.sharedWorkerErrors > 0 ||
this.stats.timeouts > 0 ||
this.stats.uncaughtExceptions > 0 ||
this.stats.unhandledRejections > 0
this.stats.declaredTests === 0
|| this.stats.internalErrors > 0
|| this.stats.failedHooks > 0
|| this.stats.failedTests > 0
|| this.stats.failedWorkers > 0
|| this.stats.sharedWorkerErrors > 0
|| this.stats.timeouts > 0
|| this.stats.uncaughtExceptions > 0
|| this.stats.unhandledRejections > 0
) {
return 1;
}
@ -194,6 +198,8 @@ class RunStatus extends Emittery {
this.pendingTests.get(event.testFile).delete(event.title);
}
}
}
module.exports = RunStatus;
getFailedTestFiles() {
return [...this.stats.byFile].filter(statByFile => statByFile[1].failedTests).map(statByFile => statByFile[0]);
}
}

337
node_modules/ava/lib/runner.js generated vendored
View file

@ -1,14 +1,19 @@
'use strict';
const Emittery = require('emittery');
const matcher = require('matcher');
const ContextRef = require('./context-ref');
const createChain = require('./create-chain');
const parseTestArgs = require('./parse-test-args');
const snapshotManager = require('./snapshot-manager');
const serializeError = require('./serialize-error');
const Runnable = require('./test');
import process from 'node:process';
import {pathToFileURL} from 'node:url';
class Runner extends Emittery {
import Emittery from 'emittery';
import {matcher} from 'matcher';
import ContextRef from './context-ref.js';
import createChain from './create-chain.js';
import parseTestArgs from './parse-test-args.js';
import serializeError from './serialize-error.js';
import {load as loadSnapshots, determineSnapshotDir} from './snapshot-manager.js';
import Runnable from './test.js';
import {waitForReady} from './worker/state.cjs';
const makeFileURL = file => file.startsWith('file://') ? file : pathToFileURL(file).toString();
export default class Runner extends Emittery {
constructor(options = {}) {
super();
@ -18,21 +23,18 @@ class Runner extends Emittery {
this.file = options.file;
this.checkSelectedByLineNumbers = options.checkSelectedByLineNumbers;
this.match = options.match || [];
this.powerAssert = undefined; // Assigned later.
this.projectDir = options.projectDir;
this.recordNewSnapshots = options.recordNewSnapshots === true;
this.runOnlyExclusive = options.runOnlyExclusive === true;
this.serial = options.serial === true;
this.skippingTests = false;
this.snapshotDir = options.snapshotDir;
this.updateSnapshots = options.updateSnapshots;
this.activeRunnables = new Set();
this.boundCompareTestSnapshot = this.compareTestSnapshot.bind(this);
this.skippedSnapshots = false;
this.boundSkipSnapshot = this.skipSnapshot.bind(this);
this.interrupted = false;
this.snapshots = null;
this.nextTaskIndex = 0;
this.tasks = {
after: [],
@ -43,9 +45,9 @@ class Runner extends Emittery {
beforeEach: [],
concurrent: [],
serial: [],
todo: []
todo: [],
};
this.waitForReady = [];
this.waitForReady = waitForReady;
const uniqueTestTitles = new Set();
this.registerUniqueTitle = title => {
@ -57,14 +59,21 @@ class Runner extends Emittery {
return true;
};
this.notifyTimeoutUpdate = timeoutMs => {
this.emit('stateChange', {
type: 'test-timeout-configured',
period: timeoutMs,
});
};
let hasStarted = false;
let scheduledStart = false;
const meta = Object.freeze({
file: options.file,
file: makeFileURL(options.file),
get snapshotDirectory() {
const {file, snapshotDir: fixedLocation, projectDir} = options;
return snapshotManager.determineSnapshotDir({file, fixedLocation, projectDir});
}
return makeFileURL(determineSnapshotDir({file, fixedLocation, projectDir}));
},
});
this.chain = createChain((metadata, testArgs) => { // eslint-disable-line complexity
if (hasStarted) {
@ -81,98 +90,96 @@ class Runner extends Emittery {
metadata.taskIndex = this.nextTaskIndex++;
const {args, buildTitle, implementations, rawTitle} = parseTestArgs(testArgs);
const {args, implementation, title} = parseTestArgs(testArgs);
if (this.checkSelectedByLineNumbers) {
metadata.selected = this.checkSelectedByLineNumbers();
}
if (metadata.todo) {
if (implementations.length > 0) {
if (implementation) {
throw new TypeError('`todo` tests are not allowed to have an implementation. Use `test.skip()` for tests with an implementation.');
}
if (!rawTitle) { // Either undefined or a string.
if (!title.raw) { // Either undefined or a string.
throw new TypeError('`todo` tests require a title');
}
if (!this.registerUniqueTitle(rawTitle)) {
throw new Error(`Duplicate test title: ${rawTitle}`);
if (!this.registerUniqueTitle(title.value)) {
throw new Error(`Duplicate test title: ${title.value}`);
}
if (this.match.length > 0) {
// --match selects TODO tests.
if (matcher([rawTitle], this.match).length === 1) {
metadata.exclusive = true;
this.runOnlyExclusive = true;
}
// --match selects TODO tests.
if (this.match.length > 0 && matcher(title.value, this.match).length === 1) {
metadata.exclusive = true;
this.runOnlyExclusive = true;
}
this.tasks.todo.push({title: rawTitle, metadata});
this.tasks.todo.push({title: title.value, metadata});
this.emit('stateChange', {
type: 'declared-test',
title: rawTitle,
title: title.value,
knownFailing: false,
todo: true
todo: true,
});
} else {
if (implementations.length === 0) {
if (!implementation) {
throw new TypeError('Expected an implementation. Use `test.todo()` for tests without an implementation.');
}
for (const implementation of implementations) {
let {title, isSet, isValid, isEmpty} = buildTitle(implementation);
if (Array.isArray(implementation)) {
throw new TypeError('AVA 4 no longer supports multiple implementations.');
}
if (isSet && !isValid) {
throw new TypeError('Test & hook titles must be strings');
}
if (isEmpty) {
if (metadata.type === 'test') {
throw new TypeError('Tests must have a title');
} else if (metadata.always) {
title = `${metadata.type}.always hook`;
} else {
title = `${metadata.type} hook`;
}
}
if (metadata.type === 'test' && !this.registerUniqueTitle(title)) {
throw new Error(`Duplicate test title: ${title}`);
}
const task = {
title,
implementation,
args,
metadata: {...metadata}
};
if (title.isSet && !title.isValid) {
throw new TypeError('Test & hook titles must be strings');
}
let fallbackTitle = title.value;
if (title.isEmpty) {
if (metadata.type === 'test') {
if (this.match.length > 0) {
// --match overrides .only()
task.metadata.exclusive = matcher([title], this.match).length === 1;
}
if (task.metadata.skipped) {
this.skippingTests = true;
}
if (task.metadata.exclusive) {
this.runOnlyExclusive = true;
}
this.tasks[metadata.serial ? 'serial' : 'concurrent'].push(task);
this.emit('stateChange', {
type: 'declared-test',
title,
knownFailing: metadata.failing,
todo: false
});
} else if (!metadata.skipped) {
this.tasks[metadata.type + (metadata.always ? 'Always' : '')].push(task);
throw new TypeError('Tests must have a title');
} else if (metadata.always) {
fallbackTitle = `${metadata.type}.always hook`;
} else {
fallbackTitle = `${metadata.type} hook`;
}
}
if (metadata.type === 'test' && !this.registerUniqueTitle(title.value)) {
throw new Error(`Duplicate test title: ${title.value}`);
}
const task = {
title: title.value || fallbackTitle,
implementation,
args,
metadata: {...metadata},
};
if (metadata.type === 'test') {
if (this.match.length > 0) {
// --match overrides .only()
task.metadata.exclusive = matcher(title.value, this.match).length === 1;
}
if (task.metadata.exclusive) {
this.runOnlyExclusive = true;
}
this.tasks[metadata.serial ? 'serial' : 'concurrent'].push(task);
this.snapshots.touch(title.value, metadata.taskIndex);
this.emit('stateChange', {
type: 'declared-test',
title: title.value,
knownFailing: metadata.failing,
todo: false,
});
} else if (!metadata.skipped) {
this.tasks[metadata.type + (metadata.always ? 'Always' : '')].push(task);
}
}
}, {
serial: false,
@ -182,54 +189,43 @@ class Runner extends Emittery {
failing: false,
callback: false,
inline: false, // Set for attempt metadata created by `t.try()`
always: false
always: false,
}, meta);
}
compareTestSnapshot(options) {
if (!this.snapshots) {
this.snapshots = snapshotManager.load({
file: this.file,
fixedLocation: this.snapshotDir,
projectDir: this.projectDir,
recordNewSnapshots: this.recordNewSnapshots,
updating: this.updateSnapshots && !this.runOnlyExclusive && !this.skippingTests
});
this.emit('dependency', this.snapshots.snapPath);
get snapshots() {
if (this._snapshots) {
return this._snapshots;
}
// Lazy load not when the runner is instantiated but when snapshots are
// needed. This should be after the test file has been loaded and source
// maps are available.
const snapshots = loadSnapshots({
file: this.file,
fixedLocation: this.snapshotDir,
projectDir: this.projectDir,
recordNewSnapshots: this.recordNewSnapshots,
updating: this.updateSnapshots,
});
if (snapshots.snapPath !== undefined) {
this.emit('dependency', snapshots.snapPath);
}
this._snapshots = snapshots;
return snapshots;
}
compareTestSnapshot(options) {
return this.snapshots.compare(options);
}
skipSnapshot() {
this.skippedSnapshots = true;
skipSnapshot(options) {
return this.snapshots.skipSnapshot(options);
}
saveSnapshotState() {
if (
this.updateSnapshots &&
(
this.runOnlyExclusive ||
this.skippingTests ||
this.skippedSnapshots
)
) {
return {cannotSave: true};
}
if (this.snapshots) {
return {touchedFiles: this.snapshots.save()};
}
if (this.updateSnapshots) {
return {touchedFiles: snapshotManager.cleanSnapshots({
file: this.file,
fixedLocation: this.snapshotDir,
projectDir: this.projectDir
})};
}
return {};
async saveSnapshotState() {
return {touchedFiles: await this.snapshots.save()};
}
onRun(runnable) {
@ -240,16 +236,6 @@ class Runner extends Emittery {
this.activeRunnables.delete(runnable);
}
attributeLeakedError(err) {
for (const runnable of this.activeRunnables) {
if (runnable.attributeLeakedError(err)) {
return true;
}
}
return false;
}
beforeExitHandler() {
for (const runnable of this.activeRunnables) {
runnable.finishDueToInactivity();
@ -269,25 +255,25 @@ class Runner extends Emittery {
};
let waitForSerial = Promise.resolve();
await runnables.reduce((previous, runnable) => { // eslint-disable-line unicorn/no-reduce
await runnables.reduce((previous, runnable) => { // eslint-disable-line unicorn/no-array-reduce
if (runnable.metadata.serial || this.serial) {
waitForSerial = previous.then(() => {
waitForSerial = previous.then(() =>
// Serial runnables run as long as there was no previous failure, unless
// the runnable should always be run.
return (allPassed || runnable.metadata.always) && runAndStoreResult(runnable);
});
(allPassed || runnable.metadata.always) && runAndStoreResult(runnable),
);
return waitForSerial;
}
return Promise.all([
previous,
waitForSerial.then(() => {
waitForSerial.then(() =>
// Concurrent runnables are kicked off after the previous serial
// runnables have completed, as long as there was no previous failure
// (or if the runnable should always be run). One concurrent runnable's
// failure does not prevent the next runnable from running.
return (allPassed || runnable.metadata.always) && runAndStoreResult(runnable);
})
(allPassed || runnable.metadata.always) && runAndStoreResult(runnable),
),
]);
}, waitForSerial);
@ -303,22 +289,22 @@ class Runner extends Emittery {
return result;
}
async runHooks(tasks, contextRef, {titleSuffix, testPassed, associatedTaskIndex} = {}) {
async runHooks(tasks, contextRef, {titleSuffix, testPassed} = {}) {
const hooks = tasks.map(task => new Runnable({
contextRef,
experiments: this.experiments,
failWithoutAssertions: false,
fn: task.args.length === 0 ?
task.implementation :
t => task.implementation.apply(null, [t].concat(task.args)),
fn: task.args.length === 0
? task.implementation
: t => Reflect.apply(task.implementation, null, [t, ...task.args]),
compareTestSnapshot: this.boundCompareTestSnapshot,
skipSnapshot: this.boundSkipSnapshot,
updateSnapshots: this.updateSnapshots,
metadata: {...task.metadata, associatedTaskIndex},
powerAssert: this.powerAssert,
metadata: task.metadata,
title: `${task.title}${titleSuffix || ''}`,
isHook: true,
testPassed
testPassed,
notifyTimeoutUpdate: this.notifyTimeoutUpdate,
}));
const outcome = await this.runMultiple(hooks, this.serial);
for (const result of outcome.storedResults) {
@ -327,7 +313,7 @@ class Runner extends Emittery {
type: 'hook-finished',
title: result.title,
duration: result.duration,
logs: result.logs
logs: result.logs,
});
} else {
this.emit('stateChange', {
@ -335,7 +321,7 @@ class Runner extends Emittery {
title: result.title,
err: serializeError('Hook failure', true, result.error),
duration: result.duration,
logs: result.logs
logs: result.logs,
});
}
}
@ -350,8 +336,7 @@ class Runner extends Emittery {
contextRef,
{
titleSuffix: hookSuffix,
associatedTaskIndex: task.metadata.taskIndex
}
},
);
let testOk = false;
@ -361,16 +346,16 @@ class Runner extends Emittery {
contextRef,
experiments: this.experiments,
failWithoutAssertions: this.failWithoutAssertions,
fn: task.args.length === 0 ?
task.implementation :
t => task.implementation.apply(null, [t].concat(task.args)),
fn: task.args.length === 0
? task.implementation
: t => Reflect.apply(task.implementation, null, [t, ...task.args]),
compareTestSnapshot: this.boundCompareTestSnapshot,
skipSnapshot: this.boundSkipSnapshot,
updateSnapshots: this.updateSnapshots,
metadata: task.metadata,
powerAssert: this.powerAssert,
title: task.title,
registerUniqueTitle: this.registerUniqueTitle
registerUniqueTitle: this.registerUniqueTitle,
notifyTimeoutUpdate: this.notifyTimeoutUpdate,
});
const result = await this.runSingle(test);
@ -382,7 +367,7 @@ class Runner extends Emittery {
title: result.title,
duration: result.duration,
knownFailing: result.metadata.failing,
logs: result.logs
logs: result.logs,
});
hooksOk = await this.runHooks(
@ -391,7 +376,6 @@ class Runner extends Emittery {
{
titleSuffix: hookSuffix,
testPassed: testOk,
associatedTaskIndex: task.metadata.taskIndex
});
} else {
this.emit('stateChange', {
@ -400,7 +384,7 @@ class Runner extends Emittery {
err: serializeError('Test failure', true, result.error, this.file),
duration: result.duration,
knownFailing: result.metadata.failing,
logs: result.logs
logs: result.logs,
});
// Don't run `afterEach` hooks if the test failed.
}
@ -412,20 +396,21 @@ class Runner extends Emittery {
{
titleSuffix: hookSuffix,
testPassed: testOk,
associatedTaskIndex: task.metadata.taskIndex
});
return alwaysOk && hooksOk && testOk;
}
async start() {
async start() { // eslint-disable-line complexity
const concurrentTests = [];
const serialTests = [];
for (const task of this.tasks.serial) {
if (this.runOnlyExclusive && !task.metadata.exclusive) {
this.snapshots.skipBlock(task.title, task.metadata.taskIndex);
continue;
}
if (this.checkSelectedByLineNumbers && !task.metadata.selected) {
this.snapshots.skipBlock(task.title, task.metadata.taskIndex);
continue;
}
@ -434,20 +419,24 @@ class Runner extends Emittery {
title: task.title,
knownFailing: task.metadata.failing,
skip: task.metadata.skipped,
todo: false
todo: false,
});
if (!task.metadata.skipped) {
if (task.metadata.skipped) {
this.snapshots.skipBlock(task.title, task.metadata.taskIndex);
} else {
serialTests.push(task);
}
}
for (const task of this.tasks.concurrent) {
if (this.runOnlyExclusive && !task.metadata.exclusive) {
this.snapshots.skipBlock(task.title, task.metadata.taskIndex);
continue;
}
if (this.checkSelectedByLineNumbers && !task.metadata.selected) {
this.snapshots.skipBlock(task.title, task.metadata.taskIndex);
continue;
}
@ -456,15 +445,15 @@ class Runner extends Emittery {
title: task.title,
knownFailing: task.metadata.failing,
skip: task.metadata.skipped,
todo: false
todo: false,
});
if (!task.metadata.skipped) {
if (this.serial) {
serialTests.push(task);
} else {
concurrentTests.push(task);
}
if (task.metadata.skipped) {
this.snapshots.skipBlock(task.title, task.metadata.taskIndex);
} else if (this.serial) {
serialTests.push(task);
} else {
concurrentTests.push(task);
}
}
@ -482,7 +471,7 @@ class Runner extends Emittery {
title: task.title,
knownFailing: false,
skip: false,
todo: true
todo: true,
});
}
@ -498,13 +487,13 @@ class Runner extends Emittery {
// Note that the hooks and tests always begin running asynchronously.
const beforePromise = this.runHooks(this.tasks.before, contextRef);
const serialPromise = beforePromise.then(beforeHooksOk => { // eslint-disable-line promise/prefer-await-to-then
const serialPromise = beforePromise.then(beforeHooksOk => {
// Don't run tests if a `before` hook failed.
if (!beforeHooksOk) {
return false;
}
return serialTests.reduce(async (previous, task) => { // eslint-disable-line unicorn/no-reduce
return serialTests.reduce(async (previous, task) => { // eslint-disable-line unicorn/no-array-reduce
const previousOk = await previous;
// Don't start tests after an interrupt.
if (this.interrupted) {
@ -520,7 +509,7 @@ class Runner extends Emittery {
return this.runTest(task, contextRef.copy());
}, true);
});
const concurrentPromise = Promise.all([beforePromise, serialPromise]).then(async ([beforeHooksOk, serialOk]) => { // eslint-disable-line promise/prefer-await-to-then
const concurrentPromise = Promise.all([beforePromise, serialPromise]).then(async ([beforeHooksOk, serialOk]) => {
// Don't run tests if a `before` hook failed, or if `failFast` is enabled
// and a previous serial test failed.
if (!beforeHooksOk || (!serialOk && this.failFast)) {
@ -534,9 +523,7 @@ class Runner extends Emittery {
// If a concurrent test fails, even if `failFast` is enabled it won't
// stop other concurrent tests from running.
const allOkays = await Promise.all(concurrentTests.map(task => {
return this.runTest(task, contextRef.copy());
}));
const allOkays = await Promise.all(concurrentTests.map(task => this.runTest(task, contextRef.copy())));
return allOkays.every(ok => ok);
});
@ -563,5 +550,3 @@ class Runner extends Emittery {
this.interrupted = true;
}
}
module.exports = Runner;

53
node_modules/ava/lib/scheduler.js generated vendored Normal file
View file

@ -0,0 +1,53 @@
import fs from 'node:fs';
import path from 'node:path';
import writeFileAtomic from 'write-file-atomic';
import isCi from './is-ci.js';
const FILENAME = 'failing-tests.json';
const scheduler = {
storeFailedTestFiles(runStatus, cacheDir) {
if (isCi || !cacheDir) {
return;
}
try {
writeFileAtomic.sync(path.join(cacheDir, FILENAME), JSON.stringify(runStatus.getFailedTestFiles()));
} catch {}
},
// Order test-files, so that files with failing tests come first
failingTestsFirst(selectedFiles, cacheDir, cacheEnabled) {
if (isCi || cacheEnabled === false) {
return selectedFiles;
}
const filePath = path.join(cacheDir, FILENAME);
let failedTestFiles;
try {
failedTestFiles = JSON.parse(fs.readFileSync(filePath));
} catch {
return selectedFiles;
}
return [...selectedFiles].sort((f, s) => {
if (failedTestFiles.includes(f) && failedTestFiles.includes(s)) {
return 0;
}
if (failedTestFiles.includes(f)) {
return -1;
}
if (failedTestFiles.includes(s)) {
return 1;
}
return 0;
});
},
};
export default scheduler;

View file

@ -1,42 +1,45 @@
'use strict';
const path = require('path');
const cleanYamlObject = require('clean-yaml-object');
const concordance = require('concordance');
const isError = require('is-error');
const slash = require('slash');
const StackUtils = require('stack-utils');
const assert = require('./assert');
const concordanceOptions = require('./concordance-options').default;
import path from 'node:path';
import process from 'node:process';
import {fileURLToPath, pathToFileURL} from 'node:url';
import cleanYamlObject from 'clean-yaml-object';
import concordance from 'concordance';
import isError from 'is-error';
import StackUtils from 'stack-utils';
import {AssertionError} from './assert.js';
import concordanceOptions from './concordance-options.js';
function isAvaAssertionError(source) {
return source instanceof assert.AssertionError;
return source instanceof AssertionError;
}
function filter(propertyName, isRoot) {
return !isRoot || (propertyName !== 'message' && propertyName !== 'name' && propertyName !== 'stack');
}
function normalizeFile(file, ...base) {
return file.startsWith('file://') ? file : pathToFileURL(path.resolve(...base, file)).toString();
}
const stackUtils = new StackUtils();
function extractSource(stack, testFile) {
if (!stack || !testFile) {
return null;
}
// Normalize the test file so it matches `callSite.file`.
const relFile = path.relative(process.cwd(), testFile);
const normalizedFile = process.platform === 'win32' ? slash(relFile) : relFile;
testFile = normalizeFile(testFile);
for (const line of stack.split('\n')) {
try {
const callSite = stackUtils.parseLine(line);
if (callSite.file === normalizedFile) {
return {
isDependency: false,
isWithinProject: true,
file: path.resolve(process.cwd(), callSite.file),
line: callSite.line
};
}
} catch {}
const callSite = stackUtils.parseLine(line);
if (callSite && normalizeFile(callSite.file) === testFile) {
return {
isDependency: false,
isWithinProject: true,
file: testFile,
line: callSite.line,
};
}
}
return null;
@ -52,8 +55,8 @@ function buildSource(source) {
// directory set to the project directory.
const projectDir = process.cwd();
const file = path.resolve(projectDir, source.file.trim());
const rel = path.relative(projectDir, file);
const file = normalizeFile(source.file.trim(), projectDir);
const rel = path.relative(projectDir, fileURLToPath(file));
const [segment] = rel.split(path.sep);
const isWithinProject = segment !== '..' && (process.platform !== 'win32' || !segment.includes(':'));
@ -63,60 +66,59 @@ function buildSource(source) {
isDependency,
isWithinProject,
file,
line: source.line
line: source.line,
};
}
function trySerializeError(err, shouldBeautifyStack, testFile) {
const stack = err.savedError ? err.savedError.stack : err.stack;
function trySerializeError(error, shouldBeautifyStack, testFile) {
const stack = error.savedError ? error.savedError.stack : error.stack;
const retval = {
avaAssertionError: isAvaAssertionError(err),
avaAssertionError: isAvaAssertionError(error),
nonErrorObject: false,
source: extractSource(stack, testFile),
stack,
shouldBeautifyStack
shouldBeautifyStack,
};
if (err.actualStack) {
retval.stack = err.actualStack;
if (error.actualStack) {
retval.stack = error.actualStack;
}
if (retval.avaAssertionError) {
retval.improperUsage = err.improperUsage;
retval.message = err.message;
retval.name = err.name;
retval.statements = err.statements;
retval.values = err.values;
retval.improperUsage = error.improperUsage;
retval.message = error.message;
retval.name = error.name;
retval.values = error.values;
if (err.fixedSource) {
const source = buildSource(err.fixedSource);
if (error.fixedSource) {
const source = buildSource(error.fixedSource);
if (source) {
retval.source = source;
}
}
if (err.assertion) {
retval.assertion = err.assertion;
if (error.assertion) {
retval.assertion = error.assertion;
}
if (err.operator) {
retval.operator = err.operator;
if (error.operator) {
retval.operator = error.operator;
}
} else {
retval.object = cleanYamlObject(err, filter); // Cleanly copy non-standard properties
if (typeof err.message === 'string') {
retval.message = err.message;
retval.object = cleanYamlObject(error, filter); // Cleanly copy non-standard properties
if (typeof error.message === 'string') {
retval.message = error.message;
}
if (typeof err.name === 'string') {
retval.name = err.name;
if (typeof error.name === 'string') {
retval.name = error.name;
}
}
if (typeof err.stack === 'string') {
const lines = err.stack.split('\n');
if (err.name === 'SyntaxError' && !lines[0].startsWith('SyntaxError')) {
if (typeof error.stack === 'string') {
const lines = error.stack.split('\n');
if (error.name === 'SyntaxError' && !lines[0].startsWith('SyntaxError')) {
retval.summary = '';
for (const line of lines) {
retval.summary += line + '\n';
@ -127,11 +129,8 @@ function trySerializeError(err, shouldBeautifyStack, testFile) {
retval.summary = retval.summary.trim();
} else {
// Skip the source line header inserted by `esm`:
// <https://github.com/standard-things/esm/wiki/improved-errors>
const start = lines.findIndex(line => !/:\d+$/.test(line));
retval.summary = '';
for (let index = start; index < lines.length; index++) {
for (let index = 0; index < lines.length; index++) {
if (lines[index].startsWith(' at')) {
break;
}
@ -146,17 +145,17 @@ function trySerializeError(err, shouldBeautifyStack, testFile) {
return retval;
}
function serializeError(origin, shouldBeautifyStack, err, testFile) {
if (!isError(err)) {
export default function serializeError(origin, shouldBeautifyStack, error, testFile) {
if (!isError(error)) {
return {
avaAssertionError: false,
nonErrorObject: true,
formatted: concordance.formatDescriptor(concordance.describe(err, concordanceOptions), concordanceOptions)
formatted: concordance.formatDescriptor(concordance.describe(error, concordanceOptions), concordanceOptions),
};
}
try {
return trySerializeError(err, shouldBeautifyStack, testFile);
return trySerializeError(error, shouldBeautifyStack, testFile);
} catch {
const replacement = new Error(`${origin}: Could not serialize error`);
return {
@ -165,9 +164,7 @@ function serializeError(origin, shouldBeautifyStack, err, testFile) {
name: replacement.name,
message: replacement.message,
stack: replacement.stack,
summary: replacement.message
summary: replacement.message,
};
}
}
module.exports = serializeError;

View file

@ -1,24 +1,24 @@
'use strict';
import {Buffer} from 'node:buffer';
import crypto from 'node:crypto';
import fs from 'node:fs';
import {findSourceMap} from 'node:module';
import path from 'node:path';
import {fileURLToPath} from 'node:url';
import zlib from 'node:zlib';
const crypto = require('crypto');
const fs = require('fs');
const path = require('path');
const zlib = require('zlib');
import cbor from 'cbor';
import concordance from 'concordance';
import indentString from 'indent-string';
import mem from 'mem';
import slash from 'slash';
import writeFileAtomic from 'write-file-atomic';
const concordance = require('concordance');
const indentString = require('indent-string');
const md5Hex = require('md5-hex');
const convertSourceMap = require('convert-source-map');
const slash = require('slash');
const writeFileAtomic = require('write-file-atomic');
const mem = require('mem');
const concordanceOptions = require('./concordance-options').snapshotManager;
import {snapshotManager as concordanceOptions} from './concordance-options.js';
// Increment if encoding layout or Concordance serialization versions change. Previous AVA versions will not be able to
// decode buffers generated by a newer version, so changing this value will require a major version bump of AVA itself.
// The version is encoded as an unsigned 16 bit integer.
const VERSION = 2;
const VERSION = 3;
const VERSION_HEADER = Buffer.alloc(2);
VERSION_HEADER.writeUInt16LE(VERSION);
@ -28,26 +28,24 @@ const READABLE_PREFIX = Buffer.from(`AVA Snapshot v${VERSION}\n`, 'ascii');
const REPORT_SEPARATOR = Buffer.from('\n\n', 'ascii');
const REPORT_TRAILING_NEWLINE = Buffer.from('\n', 'ascii');
const MD5_HASH_LENGTH = 16;
const SHA_256_HASH_LENGTH = 32;
class SnapshotError extends Error {
export class SnapshotError extends Error {
constructor(message, snapPath) {
super(message);
this.name = 'SnapshotError';
this.snapPath = snapPath;
}
}
exports.SnapshotError = SnapshotError;
class ChecksumError extends SnapshotError {
export class ChecksumError extends SnapshotError {
constructor(snapPath) {
super('Checksum mismatch', snapPath);
this.name = 'ChecksumError';
}
}
exports.ChecksumError = ChecksumError;
class VersionMismatchError extends SnapshotError {
export class VersionMismatchError extends SnapshotError {
constructor(snapPath, version) {
super('Unexpected snapshot version', snapPath);
this.name = 'VersionMismatchError';
@ -55,20 +53,25 @@ class VersionMismatchError extends SnapshotError {
this.expectedVersion = VERSION;
}
}
exports.VersionMismatchError = VersionMismatchError;
export class InvalidSnapshotError extends SnapshotError {
constructor(snapPath) {
super('Invalid snapshot file', snapPath);
this.name = 'InvalidSnapshotError';
}
}
const LEGACY_SNAPSHOT_HEADER = Buffer.from('// Jest Snapshot v1');
function isLegacySnapshot(buffer) {
return LEGACY_SNAPSHOT_HEADER.equals(buffer.slice(0, LEGACY_SNAPSHOT_HEADER.byteLength));
}
class LegacyError extends SnapshotError {
export class LegacyError extends SnapshotError {
constructor(snapPath) {
super('Legacy snapshot file', snapPath);
this.name = 'LegacyError';
}
}
exports.LegacyError = LegacyError;
function tryRead(file) {
try {
@ -82,168 +85,117 @@ function tryRead(file) {
}
}
function withoutLineEndings(buffer) {
let checkPosition = buffer.byteLength - 1;
while (buffer[checkPosition] === 0x0A || buffer[checkPosition] === 0x0D) {
checkPosition--;
}
function formatEntry(snapshot, index) {
const {
data,
label = `Snapshot ${index + 1}`, // Human-readable labels start counting at 1.
} = snapshot;
return buffer.slice(0, checkPosition + 1);
const description = data
? concordance.formatDescriptor(concordance.deserialize(data), concordanceOptions)
: '<No Data>';
const blockquote = label.split(/\n/).map(line => '> ' + line).join('\n');
return `${blockquote}\n\n${indentString(description, 4)}`;
}
function formatEntry(label, descriptor) {
if (label) {
label = `> ${label}\n\n`;
}
function combineEntries({blocks}) {
const combined = new BufferBuilder();
const codeBlock = indentString(concordance.formatDescriptor(descriptor, concordanceOptions), 4);
return Buffer.from(label + codeBlock, 'utf8');
}
for (const {title, snapshots} of blocks) {
const last = snapshots[snapshots.length - 1];
combined.write(`\n\n## ${title}\n\n`);
function combineEntries(entries) {
const buffers = [];
let byteLength = 0;
for (const [index, snapshot] of snapshots.entries()) {
combined.write(formatEntry(snapshot, index));
const sortedKeys = [...entries.keys()].sort((keyA, keyB) => {
const [a, b] = [entries.get(keyA), entries.get(keyB)];
const taskDifference = a.taskIndex - b.taskIndex;
if (taskDifference !== 0) {
return taskDifference;
}
const [assocA, assocB] = [a.associatedTaskIndex, b.associatedTaskIndex];
if (assocA !== undefined && assocB !== undefined) {
const assocDifference = assocA - assocB;
if (assocDifference !== 0) {
return assocDifference;
}
}
return a.snapIndex - b.snapIndex;
});
for (const key of sortedKeys) {
const keyBuffer = Buffer.from(`\n\n## ${key}\n\n`, 'utf8');
buffers.push(keyBuffer);
byteLength += keyBuffer.byteLength;
const formattedEntries = entries.get(key).buffers;
const last = formattedEntries[formattedEntries.length - 1];
for (const entry of formattedEntries) {
buffers.push(entry);
byteLength += entry.byteLength;
if (entry !== last) {
buffers.push(REPORT_SEPARATOR);
byteLength += REPORT_SEPARATOR.byteLength;
if (snapshot !== last) {
combined.write(REPORT_SEPARATOR);
}
}
}
return {buffers, byteLength};
return combined;
}
function generateReport(relFile, snapFile, entries) {
const combined = combineEntries(entries);
const {buffers} = combined;
let {byteLength} = combined;
const header = Buffer.from(`# Snapshot report for \`${slash(relFile)}\`
function generateReport(relFile, snapFile, snapshots) {
return new BufferBuilder()
.write(`# Snapshot report for \`${slash(relFile)}\`
The actual snapshot is saved in \`${snapFile}\`.
Generated by [AVA](https://avajs.dev).`, 'utf8');
buffers.unshift(header);
byteLength += header.byteLength;
buffers.push(REPORT_TRAILING_NEWLINE);
byteLength += REPORT_TRAILING_NEWLINE.byteLength;
return Buffer.concat(buffers, byteLength);
Generated by [AVA](https://avajs.dev).`)
.append(combineEntries(snapshots))
.write(REPORT_TRAILING_NEWLINE)
.toBuffer();
}
function appendReportEntries(existingReport, entries) {
const combined = combineEntries(entries);
const {buffers} = combined;
let {byteLength} = combined;
class BufferBuilder {
constructor() {
this.buffers = [];
this.byteOffset = 0;
}
const prepend = withoutLineEndings(existingReport);
buffers.unshift(prepend);
byteLength += prepend.byteLength;
append(builder) {
this.buffers.push(...builder.buffers);
this.byteOffset += builder.byteOffset;
return this;
}
buffers.push(REPORT_TRAILING_NEWLINE);
byteLength += REPORT_TRAILING_NEWLINE.byteLength;
return Buffer.concat(buffers, byteLength);
}
function encodeSnapshots(buffersByHash) {
const buffers = [];
let byteOffset = 0;
// Entry start and end pointers are relative to the header length. This means
// it's possible to append new entries to an existing snapshot file, without
// having to rewrite pointers for existing entries.
const headerLength = Buffer.alloc(4);
buffers.push(headerLength);
byteOffset += 4;
// Allows 65535 hashes (tests or identified snapshots) per file.
const numberHashes = Buffer.alloc(2);
numberHashes.writeUInt16LE(buffersByHash.size);
buffers.push(numberHashes);
byteOffset += 2;
const entries = [];
// Maps can't have duplicate keys, so all items in [...buffersByHash.keys()]
// are unique, so sortedHashes should be deterministic.
const sortedHashes = [...buffersByHash.keys()].sort();
const sortedBuffersByHash = [...sortedHashes.map(hash => [hash, buffersByHash.get(hash)])];
for (const [hash, snapshotBuffers] of sortedBuffersByHash) {
buffers.push(Buffer.from(hash, 'hex'));
byteOffset += MD5_HASH_LENGTH;
// Allows 65535 snapshots per hash.
const numberSnapshots = Buffer.alloc(2);
numberSnapshots.writeUInt16LE(snapshotBuffers.length, 0);
buffers.push(numberSnapshots);
byteOffset += 2;
for (const value of snapshotBuffers) {
// Each pointer is 32 bits, restricting the total, uncompressed buffer to
// 4 GiB.
const start = Buffer.alloc(4);
const end = Buffer.alloc(4);
entries.push({start, end, value});
buffers.push(start, end);
byteOffset += 8;
write(data) {
if (typeof data === 'string') {
this.write(Buffer.from(data, 'utf8'));
} else {
this.buffers.push(data);
this.byteOffset += data.byteLength;
}
return this;
}
headerLength.writeUInt32LE(byteOffset, 0);
let bodyOffset = 0;
for (const entry of entries) {
const start = bodyOffset;
const end = bodyOffset + entry.value.byteLength;
entry.start.writeUInt32LE(start, 0);
entry.end.writeUInt32LE(end, 0);
buffers.push(entry.value);
bodyOffset = end;
toBuffer() {
return Buffer.concat(this.buffers, this.byteOffset);
}
}
byteOffset += bodyOffset;
function sortBlocks(blocksByTitle, blockIndices) {
return [...blocksByTitle].sort(
([aTitle], [bTitle]) => {
const a = blockIndices.get(aTitle);
const b = blockIndices.get(bTitle);
const compressed = zlib.gzipSync(Buffer.concat(buffers, byteOffset));
if (a === undefined) {
if (b === undefined) {
return 0;
}
return 1;
}
if (b === undefined) {
return -1;
}
return a - b;
},
);
}
async function encodeSnapshots(snapshotData) {
const encoded = await cbor.encodeAsync(snapshotData, {
omitUndefinedProperties: true,
canonical: true,
});
const compressed = zlib.gzipSync(encoded);
compressed[9] = 0x03; // Override the GZip header containing the OS to always be Linux
const md5sum = crypto.createHash('md5').update(compressed).digest();
const sha256sum = crypto.createHash('sha256').update(compressed).digest();
return Buffer.concat([
READABLE_PREFIX,
VERSION_HEADER,
md5sum,
compressed
], READABLE_PREFIX.byteLength + VERSION_HEADER.byteLength + MD5_HASH_LENGTH + compressed.byteLength);
sha256sum,
compressed,
], READABLE_PREFIX.byteLength + VERSION_HEADER.byteLength + SHA_256_HASH_LENGTH + compressed.byteLength);
}
function decodeSnapshots(buffer, snapPath) {
@ -253,182 +205,207 @@ function decodeSnapshots(buffer, snapPath) {
// The version starts after the readable prefix, which is ended by a newline
// byte (0x0A).
const versionOffset = buffer.indexOf(0x0A) + 1;
const newline = buffer.indexOf(0x0A);
if (newline === -1) {
throw new InvalidSnapshotError(snapPath);
}
const versionOffset = newline + 1;
const version = buffer.readUInt16LE(versionOffset);
if (version !== VERSION) {
throw new VersionMismatchError(snapPath, version);
}
const md5sumOffset = versionOffset + 2;
const compressedOffset = md5sumOffset + MD5_HASH_LENGTH;
const sha256sumOffset = versionOffset + 2;
const compressedOffset = sha256sumOffset + SHA_256_HASH_LENGTH;
const compressed = buffer.slice(compressedOffset);
const md5sum = crypto.createHash('md5').update(compressed).digest();
const expectedSum = buffer.slice(md5sumOffset, compressedOffset);
if (!md5sum.equals(expectedSum)) {
const sha256sum = crypto.createHash('sha256').update(compressed).digest();
const expectedSum = buffer.slice(sha256sumOffset, compressedOffset);
if (!sha256sum.equals(expectedSum)) {
throw new ChecksumError(snapPath);
}
const decompressed = zlib.gunzipSync(compressed);
let byteOffset = 0;
const headerLength = decompressed.readUInt32LE(byteOffset);
byteOffset += 4;
const snapshotsByHash = new Map();
const numberHashes = decompressed.readUInt16LE(byteOffset);
byteOffset += 2;
for (let count = 0; count < numberHashes; count++) {
const hash = decompressed.toString('hex', byteOffset, byteOffset + MD5_HASH_LENGTH);
byteOffset += MD5_HASH_LENGTH;
const numberSnapshots = decompressed.readUInt16LE(byteOffset);
byteOffset += 2;
const snapshotsBuffers = new Array(numberSnapshots);
for (let index = 0; index < numberSnapshots; index++) {
const start = decompressed.readUInt32LE(byteOffset) + headerLength;
byteOffset += 4;
const end = decompressed.readUInt32LE(byteOffset) + headerLength;
byteOffset += 4;
snapshotsBuffers[index] = decompressed.slice(start, end);
}
// Allow for new entries to be appended to an existing header, which could
// lead to the same hash being present multiple times.
if (snapshotsByHash.has(hash)) {
snapshotsByHash.set(hash, snapshotsByHash.get(hash).concat(snapshotsBuffers));
} else {
snapshotsByHash.set(hash, snapshotsBuffers);
}
}
return snapshotsByHash;
return cbor.decode(decompressed);
}
class Manager {
constructor(options) {
this.appendOnly = options.appendOnly;
this.dir = options.dir;
this.recordNewSnapshots = options.recordNewSnapshots;
this.updating = options.updating;
this.relFile = options.relFile;
this.reportFile = options.reportFile;
this.reportPath = options.reportPath;
this.snapFile = options.snapFile;
this.snapPath = options.snapPath;
this.snapshotsByHash = options.snapshotsByHash;
this.oldBlocksByTitle = options.oldBlocksByTitle;
this.newBlocksByTitle = options.newBlocksByTitle;
this.blockIndices = new Map();
this.error = options.error;
this.hasChanges = false;
this.reportEntries = new Map();
}
touch(title, taskIndex) {
this.blockIndices.set(title, taskIndex);
}
compare(options) {
const hash = md5Hex(options.belongsTo);
const entries = this.snapshotsByHash.get(hash) || [];
const snapshotBuffer = entries[options.index];
if (this.error) {
throw this.error;
}
if (!snapshotBuffer) {
const block = this.newBlocksByTitle.get(options.belongsTo);
const snapshot = block && block.snapshots[options.index];
const data = snapshot && snapshot.data;
if (!data) {
if (!this.recordNewSnapshots) {
return {pass: false};
}
if (options.deferRecording) {
const record = this.deferRecord(hash, options);
const record = this.deferRecord(options);
return {pass: true, record};
}
this.record(hash, options);
this.record(options);
return {pass: true};
}
const actual = concordance.deserialize(snapshotBuffer, concordanceOptions);
const actual = concordance.deserialize(data, concordanceOptions);
const expected = concordance.describe(options.expected, concordanceOptions);
const pass = concordance.compareDescriptors(actual, expected);
return {actual, expected, pass};
}
deferRecord(hash, options) {
const descriptor = concordance.describe(options.expected, concordanceOptions);
const snapshot = concordance.serialize(descriptor);
const entry = formatEntry(options.label, descriptor);
const {taskIndex, snapIndex, associatedTaskIndex} = options;
recordSerialized({data, label, belongsTo, index}) {
let block = this.newBlocksByTitle.get(belongsTo);
if (!block) {
block = {snapshots: []};
}
const {snapshots} = block;
if (index > snapshots.length) {
throw new RangeError(`Cannot record snapshot ${index} for ${JSON.stringify(belongsTo)}, exceeds expected index of ${snapshots.length}`);
} else if (index < snapshots.length) {
if (snapshots[index].data) {
throw new RangeError(`Cannot record snapshot ${index} for ${JSON.stringify(belongsTo)}, already exists`);
}
snapshots[index] = {data, label};
} else {
snapshots.push({data, label});
}
this.newBlocksByTitle.set(belongsTo, block);
}
deferRecord(options) {
const {expected, belongsTo, label, index} = options;
const descriptor = concordance.describe(expected, concordanceOptions);
const data = concordance.serialize(descriptor);
return () => { // Must be called in order!
this.hasChanges = true;
let snapshots = this.snapshotsByHash.get(hash);
if (!snapshots) {
snapshots = [];
this.snapshotsByHash.set(hash, snapshots);
}
if (options.index > snapshots.length) {
throw new RangeError(`Cannot record snapshot ${options.index} for ${JSON.stringify(options.belongsTo)}, exceeds expected index of ${snapshots.length}`);
}
if (options.index < snapshots.length) {
throw new RangeError(`Cannot record snapshot ${options.index} for ${JSON.stringify(options.belongsTo)}, already exists`);
}
snapshots.push(snapshot);
if (this.reportEntries.has(options.belongsTo)) {
this.reportEntries.get(options.belongsTo).buffers.push(entry);
} else {
this.reportEntries.set(options.belongsTo, {buffers: [entry], taskIndex, snapIndex, associatedTaskIndex});
}
this.recordSerialized({data, label, belongsTo, index});
};
}
record(hash, options) {
const record = this.deferRecord(hash, options);
record(options) {
const record = this.deferRecord(options);
record();
}
save() {
skipBlock(title) {
const block = this.oldBlocksByTitle.get(title);
if (block) {
this.newBlocksByTitle.set(title, block);
}
}
skipSnapshot({belongsTo, index, deferRecording}) {
const oldBlock = this.oldBlocksByTitle.get(belongsTo);
let snapshot = oldBlock && oldBlock.snapshots[index];
if (!snapshot) {
snapshot = {};
}
// Retain the label from the old snapshot, so as not to assume that the
// snapshot.skip() arguments are well-formed.
// Defer recording if called in a try().
if (deferRecording) {
return () => { // Must be called in order!
this.recordSerialized({belongsTo, index, ...snapshot});
};
}
this.recordSerialized({belongsTo, index, ...snapshot});
}
async save() {
const {dir, relFile, snapFile, snapPath, reportPath} = this;
if (this.updating && this.newBlocksByTitle.size === 0) {
return {
changedFiles: [cleanFile(snapPath), cleanFile(reportPath)].flat(),
temporaryFiles: [],
};
}
if (!this.hasChanges) {
return null;
}
const {snapPath} = this;
const buffer = encodeSnapshots(this.snapshotsByHash);
const snapshots = {
blocks: sortBlocks(this.newBlocksByTitle, this.blockIndices).map(
([title, block]) => ({title, ...block}),
),
};
const reportPath = path.join(this.dir, this.reportFile);
const existingReport = this.appendOnly ? tryRead(reportPath) : null;
const reportBuffer = existingReport ?
appendReportEntries(existingReport, this.reportEntries) :
generateReport(this.relFile, this.snapFile, this.reportEntries);
const buffer = await encodeSnapshots(snapshots);
const reportBuffer = generateReport(relFile, snapFile, snapshots);
fs.mkdirSync(this.dir, {recursive: true});
await fs.promises.mkdir(dir, {recursive: true});
const paths = [snapPath, reportPath];
const tmpfileCreated = tmpfile => paths.push(tmpfile);
writeFileAtomic.sync(snapPath, buffer, {tmpfileCreated});
writeFileAtomic.sync(reportPath, reportBuffer, {tmpfileCreated});
return paths;
const temporaryFiles = [];
const tmpfileCreated = file => temporaryFiles.push(file);
await Promise.all([
writeFileAtomic(snapPath, buffer, {tmpfileCreated}),
writeFileAtomic(reportPath, reportBuffer, {tmpfileCreated}),
]);
return {
changedFiles: [snapPath, reportPath],
temporaryFiles,
};
}
}
const resolveSourceFile = mem(file => {
const testDir = path.dirname(file);
const buffer = tryRead(file);
if (!buffer) {
return file; // Assume the file is stubbed in our test suite.
const sourceMap = findSourceMap(file);
if (sourceMap === undefined) {
return file;
}
const source = buffer.toString();
const converter = convertSourceMap.fromSource(source) || convertSourceMap.fromMapFileSource(source, testDir);
if (converter) {
const map = converter.toObject();
const firstSource = `${map.sourceRoot || ''}${map.sources[0]}`;
return path.resolve(testDir, firstSource);
const {payload} = sourceMap;
if (payload.sources.length === 0) { // Hypothetical?
return file;
}
return file;
return payload.sources[0].startsWith('file://')
? fileURLToPath(payload.sources[0])
: payload.sources[0];
});
const determineSnapshotDir = mem(({file, fixedLocation, projectDir}) => {
export const determineSnapshotDir = mem(({file, fixedLocation, projectDir}) => {
const testDir = path.dirname(resolveSourceFile(file));
if (fixedLocation) {
const relativeTestLocation = path.relative(projectDir, testDir);
@ -447,8 +424,6 @@ const determineSnapshotDir = mem(({file, fixedLocation, projectDir}) => {
return testDir;
}, {cacheKey: ([{file}]) => file});
exports.determineSnapshotDir = determineSnapshotDir;
function determineSnapshotPaths({file, fixedLocation, projectDir}) {
const dir = determineSnapshotDir({file, fixedLocation, projectDir});
const relFile = path.relative(projectDir, resolveSourceFile(file));
@ -460,7 +435,9 @@ function determineSnapshotPaths({file, fixedLocation, projectDir}) {
dir,
relFile,
snapFile,
reportFile
reportFile,
snapPath: path.join(dir, snapFile),
reportPath: path.join(dir, reportFile),
};
}
@ -477,45 +454,52 @@ function cleanFile(file) {
}
}
// Remove snapshot and report if they exist. Returns an array containing the
// paths of the touched files.
function cleanSnapshots({file, fixedLocation, projectDir}) {
const {dir, snapFile, reportFile} = determineSnapshotPaths({file, fixedLocation, projectDir});
export function load({file, fixedLocation, projectDir, recordNewSnapshots, updating}) {
// Keep runner unit tests that use `new Runner()` happy
if (file === undefined || projectDir === undefined) {
return new Manager({
recordNewSnapshots,
updating,
oldBlocksByTitle: new Map(),
newBlocksByTitle: new Map(),
});
}
return [
...cleanFile(path.join(dir, snapFile)),
...cleanFile(path.join(dir, reportFile))
];
}
const paths = determineSnapshotPaths({file, fixedLocation, projectDir});
const buffer = tryRead(paths.snapPath);
exports.cleanSnapshots = cleanSnapshots;
if (!buffer) {
return new Manager({
recordNewSnapshots,
updating,
...paths,
oldBlocksByTitle: new Map(),
newBlocksByTitle: new Map(),
});
}
function load({file, fixedLocation, projectDir, recordNewSnapshots, updating}) {
const {dir, relFile, snapFile, reportFile} = determineSnapshotPaths({file, fixedLocation, projectDir});
const snapPath = path.join(dir, snapFile);
let blocksByTitle;
let snapshotError;
let appendOnly = !updating;
let snapshotsByHash;
try {
const data = decodeSnapshots(buffer, paths.snapPath);
blocksByTitle = new Map(data.blocks.map(({title, ...block}) => [title, block]));
} catch (error) {
blocksByTitle = new Map();
if (!updating) {
const buffer = tryRead(snapPath);
if (buffer) {
snapshotsByHash = decodeSnapshots(buffer, snapPath);
} else {
appendOnly = false;
if (!updating) { // Discard all decoding errors when updating snapshots
snapshotError = error instanceof SnapshotError
? error
: new InvalidSnapshotError(paths.snapPath);
}
}
return new Manager({
appendOnly,
dir,
recordNewSnapshots,
relFile,
reportFile,
snapFile,
snapPath,
snapshotsByHash: snapshotsByHash || new Map()
updating,
...paths,
oldBlocksByTitle: blocksByTitle,
newBlocksByTitle: updating ? new Map() : blocksByTitle,
error: snapshotError,
});
}
exports.load = load;

422
node_modules/ava/lib/test.js generated vendored
View file

@ -1,11 +1,11 @@
'use strict';
const concordance = require('concordance');
const isPromise = require('is-promise');
const plur = require('plur');
const assert = require('./assert');
const nowAndTimers = require('./now-and-timers');
const parseTestArgs = require('./parse-test-args');
const concordanceOptions = require('./concordance-options').default;
import concordance from 'concordance';
import isPromise from 'is-promise';
import plur from 'plur';
import {AssertionError, Assertions, checkAssertionMessage} from './assert.js';
import concordanceOptions from './concordance-options.js';
import nowAndTimers from './now-and-timers.cjs';
import parseTestArgs from './parse-test-args.js';
function formatErrorValue(label, error) {
const formatted = concordance.format(error, concordanceOptions);
@ -15,13 +15,13 @@ function formatErrorValue(label, error) {
const captureSavedError = () => {
const limitBefore = Error.stackTraceLimit;
Error.stackTraceLimit = 1;
const err = new Error();
const error = new Error(); // eslint-disable-line unicorn/error-message
Error.stackTraceLimit = limitBefore;
return err;
return error;
};
const testMap = new WeakMap();
class ExecutionContext extends assert.Assertions {
class ExecutionContext extends Assertions {
constructor(test) {
super({
pass: () => {
@ -30,18 +30,15 @@ class ExecutionContext extends assert.Assertions {
pending: promise => {
test.addPendingAssertion(promise);
},
fail: err => {
test.addFailedAssertion(err);
fail: error => {
test.addFailedAssertion(error);
},
skip: () => {
test.countPassedAssertion();
},
compareWithSnapshot: options => {
return test.compareWithSnapshot(options);
},
powerAssert: test.powerAssert,
compareWithSnapshot: options => test.compareWithSnapshot(options),
experiments: test.experiments,
disableSnapshots: test.isHook === true
disableSnapshots: test.isHook === true,
});
testMap.set(this, test);
@ -50,11 +47,9 @@ class ExecutionContext extends assert.Assertions {
};
this.log = (...inputArgs) => {
const args = inputArgs.map(value => {
return typeof value === 'string' ?
value :
concordance.format(value, concordanceOptions);
});
const args = inputArgs.map(value => typeof value === 'string'
? value
: concordance.format(value, concordanceOptions));
if (args.length > 0) {
test.addLog(args.join(' '));
}
@ -81,99 +76,89 @@ class ExecutionContext extends assert.Assertions {
throw error;
}
const {args, buildTitle, implementations, receivedImplementationArray} = parseTestArgs(attemptArgs);
const {args, implementation, title} = parseTestArgs(attemptArgs);
if (implementations.length === 0) {
if (!implementation) {
throw new TypeError('Expected an implementation.');
}
const attemptPromises = implementations.map((implementation, index) => {
let {title, isSet, isValid, isEmpty} = buildTitle(implementation);
if (Array.isArray(implementation)) {
throw new TypeError('AVA 4 no longer supports t.try() with multiple implementations.');
}
if (!isSet || isEmpty) {
title = `${test.title} ─ attempt ${test.attemptCount + 1 + index}`;
} else if (isValid) {
title = `${test.title}${title}`;
} else {
throw new TypeError('`t.try()` titles must be strings'); // Throw synchronously!
}
let attemptTitle;
if (!title.isSet || title.isEmpty) {
attemptTitle = `${test.title} ─ attempt ${test.attemptCount + 1}`;
} else if (title.isValid) {
attemptTitle = `${test.title}${title.value}`;
} else {
throw new TypeError('`t.try()` titles must be strings');
}
if (!test.registerUniqueTitle(title)) {
throw new Error(`Duplicate test title: ${title}`);
}
if (!test.registerUniqueTitle(attemptTitle)) {
throw new Error(`Duplicate test title: ${attemptTitle}`);
}
return {implementation, title};
}).map(async ({implementation, title}) => {
let committed = false;
let discarded = false;
let committed = false;
let discarded = false;
const {assertCount, deferredSnapshotRecordings, errors, logs, passed, snapshotCount, startingSnapshotCount} = await test.runAttempt(title, t => implementation(t, ...args));
const {assertCount, deferredSnapshotRecordings, errors, logs, passed, snapshotCount, startingSnapshotCount} = await test.runAttempt(attemptTitle, t => implementation(t, ...args));
return {
errors,
logs: [...logs], // Don't allow modification of logs.
passed,
title,
commit: ({retainLogs = true} = {}) => {
if (committed) {
return;
}
if (discarded) {
test.saveFirstError(new Error('Cant commit a result that was previously discarded'));
return;
}
committed = true;
test.finishAttempt({
assertCount,
commit: true,
deferredSnapshotRecordings,
errors,
logs,
passed,
retainLogs,
snapshotCount,
startingSnapshotCount
});
},
discard: ({retainLogs = false} = {}) => {
if (committed) {
test.saveFirstError(new Error('Cant discard a result that was previously committed'));
return;
}
if (discarded) {
return;
}
discarded = true;
test.finishAttempt({
assertCount: 0,
commit: false,
deferredSnapshotRecordings,
errors,
logs,
passed,
retainLogs,
snapshotCount,
startingSnapshotCount
});
return {
errors,
logs: [...logs], // Don't allow modification of logs.
passed,
title: attemptTitle,
commit: ({retainLogs = true} = {}) => {
if (committed) {
return;
}
};
});
const results = await Promise.all(attemptPromises);
return receivedImplementationArray ? results : results[0];
if (discarded) {
test.saveFirstError(new Error('Cant commit a result that was previously discarded'));
return;
}
committed = true;
test.finishAttempt({
assertCount,
commit: true,
deferredSnapshotRecordings,
errors,
logs,
passed,
retainLogs,
snapshotCount,
startingSnapshotCount,
});
},
discard: ({retainLogs = false} = {}) => {
if (committed) {
test.saveFirstError(new Error('Cant discard a result that was previously committed'));
return;
}
if (discarded) {
return;
}
discarded = true;
test.finishAttempt({
assertCount: 0,
commit: false,
deferredSnapshotRecordings,
errors,
logs,
passed,
retainLogs,
snapshotCount,
startingSnapshotCount,
});
},
};
};
}
get end() {
const end = testMap.get(this).bindEndCallback();
const endFn = error => end(error, captureSavedError());
return endFn;
}
get title() {
return testMap.get(this).title;
}
@ -190,17 +175,9 @@ class ExecutionContext extends assert.Assertions {
const test = testMap.get(this);
return test.isHook ? test.testPassed : !test.assertError;
}
_throwsArgStart(assertion, file, line) {
testMap.get(this).trackThrows({assertion, file, line});
}
_throwsArgEnd() {
testMap.get(this).trackThrows(null);
}
}
class Test {
export default class Test {
constructor(options) {
this.contextRef = options.contextRef;
this.experiments = options.experiments || {};
@ -208,12 +185,12 @@ class Test {
this.fn = options.fn;
this.isHook = options.isHook === true;
this.metadata = options.metadata;
this.powerAssert = options.powerAssert;
this.title = options.title;
this.testPassed = options.testPassed;
this.registerUniqueTitle = options.registerUniqueTitle;
this.logs = [];
this.teardowns = [];
this.notifyTimeoutUpdate = options.notifyTimeoutUpdate;
const {snapshotBelongsTo = this.title, nextSnapshotIndex = 0} = options;
this.snapshotBelongsTo = snapshotBelongsTo;
@ -222,24 +199,20 @@ class Test {
const deferRecording = this.metadata.inline;
this.deferredSnapshotRecordings = [];
this.compareWithSnapshot = ({expected, id, message}) => {
this.compareWithSnapshot = ({expected, message}) => {
this.snapshotCount++;
// TODO: In a breaking change, reject non-undefined, falsy IDs and messages.
const belongsTo = id || snapshotBelongsTo;
const index = id ? 0 : this.nextSnapshotIndex++;
const label = id ? '' : message || `Snapshot ${index + 1}`; // Human-readable labels start counting at 1.
const belongsTo = snapshotBelongsTo;
const index = this.nextSnapshotIndex++;
const label = message;
const {taskIndex, associatedTaskIndex} = this.metadata;
const {record, ...result} = options.compareTestSnapshot({
belongsTo,
deferRecording,
expected,
index,
label,
taskIndex,
snapIndex: this.snapshotCount,
associatedTaskIndex
taskIndex: this.metadata.taskIndex,
});
if (record) {
this.deferredSnapshotRecordings.push(record);
@ -250,16 +223,20 @@ class Test {
this.skipSnapshot = () => {
if (typeof options.skipSnapshot === 'function') {
options.skipSnapshot();
const record = options.skipSnapshot({
belongsTo: snapshotBelongsTo,
index: this.nextSnapshotIndex,
deferRecording,
taskIndex: this.metadata.taskIndex,
});
if (record) {
this.deferredSnapshotRecordings.push(record);
}
}
if (options.updateSnapshots) {
this.addFailedAssertion(new Error('Snapshot assertions cannot be skipped when updating snapshots'));
} else {
this.nextSnapshotIndex++;
this.snapshotCount++;
this.countPassedAssertion();
}
this.nextSnapshotIndex++;
this.snapshotCount++;
this.countPassedAssertion();
};
this.runAttempt = async (title, fn) => {
@ -274,11 +251,11 @@ class Test {
const attempt = new Test({
...options,
fn,
metadata: {...options.metadata, callback: false, failing: false, inline: true},
metadata: {...options.metadata, failing: false, inline: true},
contextRef: contextRef.copy(),
snapshotBelongsTo,
nextSnapshotIndex,
title
title,
});
const {deferredSnapshotRecordings, error, logs, passed, assertCount, snapshotCount} = await attempt.run();
@ -291,53 +268,17 @@ class Test {
this.attemptCount = 0;
this.calledEnd = false;
this.duration = null;
this.endCallbackFinisher = null;
this.finishDueToAttributedError = null;
this.finishDueToInactivity = null;
this.finishDueToTimeout = null;
this.finishing = false;
this.pendingAssertionCount = 0;
this.pendingAttemptCount = 0;
this.pendingThrowsAssertion = null;
this.planCount = null;
this.startedAt = 0;
this.timeoutMs = 0;
this.timeoutTimer = null;
}
bindEndCallback() {
if (this.metadata.callback) {
return (error, savedError) => {
this.endCallback(error, savedError);
};
}
const error_ = this.metadata.inline ? new Error('`t.end()` is not supported inside `t.try()`') : new Error('`t.end()` is not supported in this context. To use `t.end()` as a callback, you must use "callback mode" via `test.cb(testName, fn)`');
throw error_;
}
endCallback(error, savedError) {
if (this.calledEnd) {
this.saveFirstError(new Error('`t.end()` called more than once'));
return;
}
this.calledEnd = true;
if (error) {
this.saveFirstError(new assert.AssertionError({
actual: error,
message: 'Callback called with an error',
savedError,
values: [formatErrorValue('Callback called with an error:', error)]
}));
}
if (this.endCallbackFinisher) {
this.endCallbackFinisher();
}
}
createExecutionContext() {
return new ExecutionContext(this);
}
@ -374,7 +315,7 @@ class Test {
promise
.catch(error => this.saveFirstError(error))
.then(() => { // eslint-disable-line promise/prefer-await-to-then
.then(() => {
this.pendingAssertionCount--;
this.refreshTimeout();
});
@ -451,7 +392,7 @@ class Test {
}
timeout(ms, message) {
const result = assert.checkAssertionMessage('timeout', message);
const result = checkAssertionMessage('timeout', message);
if (result !== true) {
this.saveFirstError(result);
// Allow the timeout to be set even when the message is invalid.
@ -471,6 +412,8 @@ class Test {
this.finishDueToTimeout();
}
}, ms);
this.notifyTimeoutUpdate(this.timeoutMs);
}
refreshTimeout() {
@ -509,11 +452,7 @@ class Test {
}
async runTeardowns() {
const teardowns = [...this.teardowns];
if (this.experiments.reverseTeardowns) {
teardowns.reverse();
}
const teardowns = [...this.teardowns].reverse();
for (const teardown of teardowns) {
try {
@ -526,11 +465,11 @@ class Test {
verifyPlan() {
if (!this.assertError && this.planCount !== null && this.planCount !== this.assertCount) {
this.saveFirstError(new assert.AssertionError({
this.saveFirstError(new AssertionError({
assertion: 'plan',
message: `Planned for ${this.planCount} ${plur('assertion', this.planCount)}, but got ${this.assertCount}.`,
operator: '===',
savedError: this.planError
savedError: this.planError,
}));
}
}
@ -561,70 +500,16 @@ class Test {
}
}
trackThrows(pending) {
this.pendingThrowsAssertion = pending;
}
detectImproperThrows(error) {
if (!this.pendingThrowsAssertion) {
return false;
}
const pending = this.pendingThrowsAssertion;
this.pendingThrowsAssertion = null;
const values = [];
if (error) {
values.push(formatErrorValue(`The following error was thrown, possibly before \`t.${pending.assertion}()\` could be called:`, error));
}
this.saveFirstError(new assert.AssertionError({
assertion: pending.assertion,
fixedSource: {file: pending.file, line: pending.line},
improperUsage: true,
message: `Improper usage of \`t.${pending.assertion}()\` detected`,
savedError: error instanceof Error && error,
values
}));
return true;
}
waitForPendingThrowsAssertion() {
return new Promise(resolve => {
this.finishDueToAttributedError = () => {
resolve(this.finish());
};
this.finishDueToInactivity = () => {
this.detectImproperThrows();
resolve(this.finish());
};
// Wait up to a second to see if an error can be attributed to the
// pending assertion.
nowAndTimers.setTimeout(() => this.finishDueToInactivity(), 1000).unref();
});
}
attributeLeakedError(error) {
if (!this.detectImproperThrows(error)) {
return false;
}
this.finishDueToAttributedError();
return true;
}
callFn() {
try {
return {
ok: true,
retval: this.fn.call(null, this.createExecutionContext())
retval: this.fn.call(null, this.createExecutionContext()),
};
} catch (error) {
return {
ok: false,
error
error,
};
}
}
@ -634,13 +519,11 @@ class Test {
const result = this.callFn();
if (!result.ok) {
if (!this.detectImproperThrows(result.error)) {
this.saveFirstError(new assert.AssertionError({
message: 'Error thrown in test',
savedError: result.error instanceof Error && result.error,
values: [formatErrorValue('Error thrown in test:', result.error)]
}));
}
this.saveFirstError(new AssertionError({
message: 'Error thrown in test',
savedError: result.error instanceof Error && result.error,
values: [formatErrorValue('Error thrown in test:', result.error)],
}));
return this.finish();
}
@ -653,7 +536,7 @@ class Test {
promise = new Promise((resolve, reject) => {
result.retval.subscribe({
error: reject,
complete: () => resolve()
complete: () => resolve(),
});
});
} else if (returnedPromise) {
@ -661,37 +544,6 @@ class Test {
promise = Promise.resolve(result.retval);
}
if (this.metadata.callback) {
if (returnedObservable || returnedPromise) {
const asyncType = returnedObservable ? 'observables' : 'promises';
this.saveFirstError(new Error(`Do not return ${asyncType} from tests declared via \`test.cb(…)\`. Use \`test.cb(…)\` for legacy callback APIs. When using promises, observables or async functions, use \`test(…)\`.`));
return this.finish();
}
if (this.calledEnd) {
return this.finish();
}
return new Promise(resolve => {
this.endCallbackFinisher = () => {
resolve(this.finish());
};
this.finishDueToAttributedError = () => {
resolve(this.finish());
};
this.finishDueToTimeout = () => {
resolve(this.finish());
};
this.finishDueToInactivity = () => {
this.saveFirstError(new Error('`t.end()` was never called'));
resolve(this.finish());
};
});
}
if (promise) {
return new Promise(resolve => {
this.finishDueToAttributedError = () => {
@ -703,24 +555,22 @@ class Test {
};
this.finishDueToInactivity = () => {
const error = returnedObservable ?
new Error('Observable returned by test never completed') :
new Error('Promise returned by test never resolved');
const error = returnedObservable
? new Error('Observable returned by test never completed')
: new Error('Promise returned by test never resolved');
this.saveFirstError(error);
resolve(this.finish());
};
promise
.catch(error => {
if (!this.detectImproperThrows(error)) {
this.saveFirstError(new assert.AssertionError({
message: 'Rejected promise returned by test',
savedError: error instanceof Error && error,
values: [formatErrorValue('Rejected promise returned by test. Reason:', error)]
}));
}
this.saveFirstError(new AssertionError({
message: 'Rejected promise returned by test',
savedError: error instanceof Error && error,
values: [formatErrorValue('Rejected promise returned by test. Reason:', error)],
}));
})
.then(() => resolve(this.finish())); // eslint-disable-line promise/prefer-await-to-then
.then(() => resolve(this.finish()));
});
}
@ -730,10 +580,6 @@ class Test {
async finish() {
this.finishing = true;
if (!this.assertError && this.pendingThrowsAssertion) {
return this.waitForPendingThrowsAssertion();
}
this.clearTimeout();
this.verifyPlan();
this.verifyAssertions();
@ -759,9 +605,7 @@ class Test {
passed,
snapshotCount: this.snapshotCount,
assertCount: this.assertCount,
title: this.title
title: this.title,
};
}
}
module.exports = Test;

111
node_modules/ava/lib/watcher.js generated vendored
View file

@ -1,18 +1,26 @@
'use strict';
const nodePath = require('path');
const debug = require('debug')('ava:watcher');
const chokidar = require('chokidar');
const diff = require('lodash/difference');
const flatten = require('lodash/flatten');
const chalk = require('./chalk').get();
const {applyTestFileFilter, classify, getChokidarIgnorePatterns} = require('./globs');
const {levels: providerLevels} = require('./provider-manager');
import nodePath from 'node:path';
function rethrowAsync(err) {
import chokidar_ from 'chokidar';
import createDebug from 'debug';
import {chalk} from './chalk.js';
import {applyTestFileFilter, classify, getChokidarIgnorePatterns} from './globs.js';
let chokidar = chokidar_;
export function _testOnlyReplaceChokidar(replacement) {
chokidar = replacement;
}
let debug = createDebug('ava:watcher');
export function _testOnlyReplaceDebug(replacement) {
debug = replacement('ava:watcher');
}
function rethrowAsync(error) {
// Don't swallow exceptions. Note that any
// expected error should already have been logged
setImmediate(() => {
throw err;
throw error;
});
}
@ -77,7 +85,7 @@ class TestDependency {
}
}
class Watcher {
export default class Watcher {
constructor({api, filter = [], globs, projectDir, providers, reporter}) {
this.debouncer = new Debouncer(this);
@ -88,7 +96,7 @@ class Watcher {
const patternFilters = filter.map(({pattern}) => pattern);
this.providers = providers.filter(({level}) => level >= providerLevels.pathRewrites);
this.providers = providers;
this.run = (specificFiles = [], updateSnapshots = false) => {
const clearLogOnNextRun = this.clearLogOnNextRun && this.runVector > 0;
if (this.runVector > 0) {
@ -104,12 +112,18 @@ class Watcher {
if (runOnlyExclusive) {
// The test files that previously contained exclusive tests are always
// run, together with the remaining specific files.
const remainingFiles = diff(specificFiles, exclusiveFiles);
specificFiles = this.filesWithExclusiveTests.concat(remainingFiles);
const remainingFiles = specificFiles.filter(file => !exclusiveFiles.includes(file));
specificFiles = [...this.filesWithExclusiveTests, ...remainingFiles];
}
if (filter.length > 0) {
specificFiles = applyTestFileFilter({cwd: projectDir, filter: patternFilters, testFiles: specificFiles});
specificFiles = applyTestFileFilter({
cwd: projectDir,
expandDirectories: false,
filter: patternFilters,
testFiles: specificFiles,
treatFilterPatternsAsFiles: false,
});
}
this.pruneFailures(specificFiles);
@ -125,21 +139,21 @@ class Watcher {
previousFailures: this.sumPreviousFailures(this.runVector),
runOnlyExclusive,
runVector: this.runVector,
updateSnapshots: updateSnapshots === true
}
updateSnapshots: updateSnapshots === true,
},
})
.then(runStatus => { // eslint-disable-line promise/prefer-await-to-then
.then(runStatus => {
reporter.endRun();
reporter.lineWriter.writeLine(END_MESSAGE);
if (this.clearLogOnNextRun && (
runStatus.stats.failedHooks > 0 ||
runStatus.stats.failedTests > 0 ||
runStatus.stats.failedWorkers > 0 ||
runStatus.stats.internalErrors > 0 ||
runStatus.stats.timeouts > 0 ||
runStatus.stats.uncaughtExceptions > 0 ||
runStatus.stats.unhandledRejections > 0
runStatus.stats.failedHooks > 0
|| runStatus.stats.failedTests > 0
|| runStatus.stats.failedWorkers > 0
|| runStatus.stats.internalErrors > 0
|| runStatus.stats.timeouts > 0
|| runStatus.stats.uncaughtExceptions > 0
|| runStatus.stats.unhandledRejections > 0
)) {
this.clearLogOnNextRun = false;
}
@ -150,6 +164,7 @@ class Watcher {
this.testDependencies = [];
this.trackTestDependencies(api);
this.temporaryFiles = new Set();
this.touchedFiles = new Set();
this.trackTouchedFiles(api);
@ -168,7 +183,7 @@ class Watcher {
chokidar.watch(['**/*'], {
cwd: this.globs.cwd,
ignored: getChokidarIgnorePatterns(this.globs),
ignoreInitial: true
ignoreInitial: true,
}).on('all', (event, path) => {
if (event === 'add' || event === 'change' || event === 'unlink') {
debug('Detected %s of %s', event, path);
@ -231,9 +246,13 @@ class Watcher {
return;
}
for (const file of evt.files) {
for (const file of evt.files.changedFiles) {
this.touchedFiles.add(file);
}
for (const file of evt.files.temporaryFiles) {
this.temporaryFiles.add(file);
}
});
});
}
@ -307,7 +326,7 @@ class Watcher {
this.filesWithFailures.push({
file,
vector,
count: 1
count: 1,
});
}
}
@ -379,6 +398,14 @@ class Watcher {
return false;
}
// Unlike touched files, temporary files are never cleared. We may see
// adds and unlinks detected separately, so we track the temporary files
// as long as AVA is running.
if (this.temporaryFiles.has(path)) {
debug('Ignoring known temporary file %s', path);
return false;
}
return true;
});
@ -394,21 +421,23 @@ class Watcher {
}
const dirtyHelpersAndSources = [];
const dirtyTests = [];
const addedOrChangedTests = [];
const unlinkedTests = [];
for (const filePath of dirtyPaths) {
const {isIgnoredByWatcher, isTest} = classify(filePath, this.globs);
if (!isIgnoredByWatcher) {
if (isTest) {
dirtyTests.push(filePath);
if (dirtyStates[filePath] === 'unlink') {
unlinkedTests.push(filePath);
} else {
addedOrChangedTests.push(filePath);
}
} else {
dirtyHelpersAndSources.push(filePath);
}
}
}
const addedOrChangedTests = dirtyTests.filter(path => dirtyStates[path] !== 'unlink');
const unlinkedTests = diff(dirtyTests, addedOrChangedTests);
this.cleanUnlinkedTests(unlinkedTests);
// No need to rerun tests if the only change is that tests were deleted
@ -423,12 +452,10 @@ class Watcher {
}
// Try to find tests that depend on the changed source files
const testsByHelpersOrSource = dirtyHelpersAndSources.map(path => {
return this.testDependencies.filter(dep => dep.contains(path)).map(dep => {
debug('%s is a dependency of %s', path, dep.file);
return dep.file;
});
}, this).filter(tests => tests.length > 0);
const testsByHelpersOrSource = dirtyHelpersAndSources.map(path => this.testDependencies.filter(dep => dep.contains(path)).map(dep => {
debug('%s is a dependency of %s', path, dep.file);
return dep.file;
})).filter(tests => tests.length > 0);
// Rerun all tests if source files were changed that could not be traced to
// specific tests
@ -440,8 +467,6 @@ class Watcher {
}
// Run all affected tests
this.run([...new Set(addedOrChangedTests.concat(flatten(testsByHelpersOrSource)))]);
this.run([...new Set([addedOrChangedTests, testsByHelpersOrSource].flat(2))]);
}
}
module.exports = Watcher;

208
node_modules/ava/lib/worker/base.js generated vendored Normal file
View file

@ -0,0 +1,208 @@
import {createRequire} from 'node:module';
import process from 'node:process';
import {pathToFileURL} from 'node:url';
import {workerData} from 'node:worker_threads';
import setUpCurrentlyUnhandled from 'currently-unhandled';
import {set as setChalk} from '../chalk.js';
import nowAndTimers from '../now-and-timers.cjs';
import providerManager from '../provider-manager.js';
import Runner from '../runner.js';
import serializeError from '../serialize-error.js';
import channel from './channel.cjs';
import dependencyTracking from './dependency-tracker.js';
import lineNumberSelection from './line-numbers.js';
import {set as setOptions} from './options.cjs';
import {flags, refs, sharedWorkerTeardowns} from './state.cjs';
import {isRunningInThread, isRunningInChildProcess} from './utils.cjs';
const currentlyUnhandled = setUpCurrentlyUnhandled();
const run = async options => {
setOptions(options);
setChalk(options.chalkOptions);
if (options.chalkOptions.level > 0) {
const {stdout, stderr} = process;
global.console = Object.assign(global.console, new console.Console({stdout, stderr, colorMode: true}));
}
async function exit(code) {
if (!process.exitCode) {
process.exitCode = code;
}
dependencyTracking.flush();
await channel.flush();
process.exit(); // eslint-disable-line unicorn/no-process-exit
}
let checkSelectedByLineNumbers;
try {
checkSelectedByLineNumbers = lineNumberSelection({
file: options.file,
lineNumbers: options.lineNumbers,
});
} catch (error) {
channel.send({type: 'line-number-selection-error', err: serializeError('Line number selection error', false, error, options.file)});
checkSelectedByLineNumbers = () => false;
}
const runner = new Runner({
checkSelectedByLineNumbers,
experiments: options.experiments,
failFast: options.failFast,
failWithoutAssertions: options.failWithoutAssertions,
file: options.file,
match: options.match,
projectDir: options.projectDir,
recordNewSnapshots: options.recordNewSnapshots,
runOnlyExclusive: options.runOnlyExclusive,
serial: options.serial,
snapshotDir: options.snapshotDir,
updateSnapshots: options.updateSnapshots,
});
refs.runnerChain = runner.chain;
channel.peerFailed.then(() => {
runner.interrupt();
});
runner.on('dependency', dependencyTracking.track);
runner.on('stateChange', state => channel.send(state));
runner.on('error', error => {
channel.send({type: 'internal-error', err: serializeError('Internal runner error', false, error, runner.file)});
exit(1);
});
runner.on('finish', async () => {
try {
const {touchedFiles} = await runner.saveSnapshotState();
if (touchedFiles) {
channel.send({type: 'touched-files', files: touchedFiles});
}
} catch (error) {
channel.send({type: 'internal-error', err: serializeError('Internal runner error', false, error, runner.file)});
exit(1);
return;
}
try {
await Promise.all(sharedWorkerTeardowns.map(fn => fn()));
} catch (error) {
channel.send({type: 'uncaught-exception', err: serializeError('Shared worker teardown error', false, error, runner.file)});
exit(1);
return;
}
nowAndTimers.setImmediate(() => {
for (const rejection of currentlyUnhandled()) {
channel.send({type: 'unhandled-rejection', err: serializeError('Unhandled rejection', true, rejection.reason, runner.file)});
}
exit(0);
});
});
process.on('uncaughtException', error => {
channel.send({type: 'uncaught-exception', err: serializeError('Uncaught exception', true, error, runner.file)});
exit(1);
});
// Store value to prevent required modules from modifying it.
const testPath = options.file;
const extensionsToLoadAsModules = Object.entries(options.moduleTypes)
.filter(([, type]) => type === 'module')
.map(([extension]) => extension);
// Install before processing options.require, so if helpers are added to the
// require configuration the *compiled* helper will be loaded.
const {projectDir, providerStates = []} = options;
const providers = [];
await Promise.all(providerStates.map(async ({type, state}) => {
if (type === 'typescript') {
const provider = await providerManager.typescript(projectDir);
providers.push(provider.worker({extensionsToLoadAsModules, state}));
}
}));
const require = createRequire(import.meta.url);
const load = async ref => {
for (const provider of providers) {
if (provider.canLoad(ref)) {
return provider.load(ref, {requireFn: require});
}
}
for (const extension of extensionsToLoadAsModules) {
if (ref.endsWith(`.${extension}`)) {
return import(pathToFileURL(ref)); // eslint-disable-line node/no-unsupported-features/es-syntax
}
}
// We still support require() since it's more easily monkey-patched.
return require(ref);
};
try {
for await (const ref of (options.require || [])) {
await load(ref);
}
// Install dependency tracker after the require configuration has been evaluated
// to make sure we also track dependencies with custom require hooks
dependencyTracking.install(require.extensions, testPath);
if (options.debug && options.debug.port !== undefined && options.debug.host !== undefined) {
// If an inspector was active when the main process started, and is
// already active for the worker process, do not open a new one.
const {default: inspector} = await import('node:inspector'); // eslint-disable-line node/no-unsupported-features/es-syntax
if (!options.debug.active || inspector.url() === undefined) {
inspector.open(options.debug.port, options.debug.host, true);
}
if (options.debug.break) {
debugger; // eslint-disable-line no-debugger
}
}
await load(testPath);
if (flags.loadedMain) {
// Unreference the channel if the test file required AVA. This stops it
// from keeping the event loop busy, which means the `beforeExit` event can be
// used to detect when tests stall.
channel.unref();
} else {
channel.send({type: 'missing-ava-import'});
exit(1);
}
} catch (error) {
channel.send({type: 'uncaught-exception', err: serializeError('Uncaught exception', true, error, runner.file)});
exit(1);
}
};
const onError = error => {
// There shouldn't be any errors, but if there are we may not have managed
// to bootstrap enough code to serialize them. Re-throw and let the process
// crash.
setImmediate(() => {
throw error;
});
};
if (isRunningInThread) {
channel.send({type: 'starting'}); // AVA won't terminate the worker thread until it's seen this message.
const {options} = workerData;
delete workerData.options; // Don't allow user code access.
run(options).catch(onError);
} else if (isRunningInChildProcess) {
channel.send({type: 'ready-for-options'});
channel.options.then(run).catch(onError);
}

290
node_modules/ava/lib/worker/channel.cjs generated vendored Normal file
View file

@ -0,0 +1,290 @@
'use strict';
const events = require('events');
const process = require('process');
const {MessageChannel, threadId} = require('worker_threads');
const timers = require('../now-and-timers.cjs');
const {isRunningInChildProcess, isRunningInThread} = require('./utils.cjs');
let pEvent = async (emitter, event, options) => {
// We need to import p-event, but import() is asynchronous. Buffer any events
// emitted in the meantime. Don't handle errors.
const buffer = [];
const addToBuffer = (...args) => buffer.push(args);
emitter.on(event, addToBuffer);
try {
({pEvent} = await import('p-event')); // eslint-disable-line node/no-unsupported-features/es-syntax
} finally {
emitter.off(event, addToBuffer);
}
if (buffer.length === 0) {
return pEvent(emitter, event, options);
}
// Now replay buffered events.
const replayEmitter = new events.EventEmitter();
const promise = pEvent(replayEmitter, event, options);
for (const args of buffer) {
replayEmitter.emit(event, ...args);
}
const replay = (...args) => replayEmitter.emit(event, ...args);
emitter.on(event, replay);
try {
return await promise;
} finally {
emitter.off(event, replay);
}
};
const selectAvaMessage = type => message => message.ava && message.ava.type === type;
class RefCounter {
constructor() {
this.count = 0;
}
refAndTest() {
return ++this.count === 1;
}
testAndUnref() {
return this.count > 0 && --this.count === 0;
}
}
class MessagePortHandle {
constructor(port) {
this.counter = new RefCounter();
this.unreferenceable = false;
this.channel = port;
// Referencing the port does not immediately prevent the thread from
// exiting. Use a timer to keep a reference for at least a second.
this.workaroundTimer = timers.setTimeout(() => {}, 1000).unref();
}
forceUnref() {
if (this.unreferenceable) {
return;
}
this.unreferenceable = true;
this.workaroundTimer.unref();
this.channel.unref();
}
ref() {
if (!this.unreferenceable && this.counter.refAndTest()) {
this.workaroundTimer.refresh().ref();
this.channel.ref();
}
}
unref() {
if (!this.unreferenceable && this.counter.testAndUnref()) {
this.workaroundTimer.unref();
this.channel.unref();
}
}
send(evt, transferList) {
this.channel.postMessage({ava: evt}, transferList);
}
}
class IpcHandle {
constructor(bufferedSend) {
this.counter = new RefCounter();
this.channel = process;
this.sendRaw = bufferedSend;
}
ref() {
if (this.counter.refAndTest()) {
process.channel.ref();
}
}
unref() {
if (this.counter.testAndUnref()) {
process.channel.unref();
}
}
send(evt) {
this.sendRaw({ava: evt});
}
}
let handle;
if (isRunningInChildProcess) {
const {controlFlow} = require('../ipc-flow-control.cjs');
handle = new IpcHandle(controlFlow(process));
} else if (isRunningInThread) {
const {parentPort} = require('worker_threads');
handle = new MessagePortHandle(parentPort);
}
// The attaching of message listeners will cause the port to be referenced by
// Node.js. In order to keep track, explicitly reference before attaching.
handle.ref();
exports.options = pEvent(handle.channel, 'message', selectAvaMessage('options')).then(message => message.ava.options);
exports.peerFailed = pEvent(handle.channel, 'message', selectAvaMessage('peer-failed'));
exports.send = handle.send.bind(handle);
exports.unref = handle.unref.bind(handle);
let pendingPings = Promise.resolve();
async function flush() {
handle.ref();
const promise = pendingPings.then(async () => {
handle.send({type: 'ping'});
await pEvent(handle.channel, 'message', selectAvaMessage('pong'));
if (promise === pendingPings) {
handle.unref();
}
});
pendingPings = promise;
await promise;
}
exports.flush = flush;
let channelCounter = 0;
let messageCounter = 0;
const channelEmitters = new Map();
function createChannelEmitter(channelId) {
if (channelEmitters.size === 0) {
handle.channel.on('message', message => {
if (!message.ava) {
return;
}
const {channelId, type, ...payload} = message.ava;
if (type === 'shared-worker-error') {
const emitter = channelEmitters.get(channelId);
if (emitter !== undefined) {
emitter.emit(type, payload);
}
}
});
}
const emitter = new events.EventEmitter();
channelEmitters.set(channelId, emitter);
return [emitter, () => channelEmitters.delete(channelId)];
}
function registerSharedWorker(filename, initialData) {
const channelId = `${threadId}/channel/${++channelCounter}`;
const {port1: ourPort, port2: theirPort} = new MessageChannel();
const sharedWorkerHandle = new MessagePortHandle(ourPort);
const [channelEmitter, unsubscribe] = createChannelEmitter(channelId);
handle.send({
type: 'shared-worker-connect',
channelId,
filename,
initialData,
port: theirPort,
}, [theirPort]);
let currentlyAvailable = false;
let error = null;
// The attaching of message listeners will cause the port to be referenced by
// Node.js. In order to keep track, explicitly reference before attaching.
sharedWorkerHandle.ref();
const ready = pEvent(ourPort, 'message', ({type}) => type === 'ready').then(() => {
currentlyAvailable = error === null;
}).finally(() => {
// Once ready, it's up to user code to subscribe to messages, which (see
// below) causes us to reference the port.
sharedWorkerHandle.unref();
});
const messageEmitters = new Set();
// Errors are received over the test worker channel, not the message port
// dedicated to the shared worker.
pEvent(channelEmitter, 'shared-worker-error').then(() => {
unsubscribe();
sharedWorkerHandle.forceUnref();
error = new Error('The shared worker is no longer available');
currentlyAvailable = false;
for (const emitter of messageEmitters) {
emitter.emit('error', error);
}
});
ourPort.on('message', message => {
if (message.type === 'message') {
// Wait for a turn of the event loop, to allow new subscriptions to be set
// up in response to the previous message.
setImmediate(() => {
for (const emitter of messageEmitters) {
emitter.emit('message', message);
}
});
}
});
return {
forceUnref: () => sharedWorkerHandle.forceUnref(),
ready,
channel: {
available: ready,
get currentlyAvailable() {
return currentlyAvailable;
},
async * receive() {
if (error !== null) {
throw error;
}
const emitter = new events.EventEmitter();
messageEmitters.add(emitter);
try {
sharedWorkerHandle.ref();
for await (const [message] of events.on(emitter, 'message')) {
yield message;
}
} finally {
sharedWorkerHandle.unref();
messageEmitters.delete(emitter);
}
},
post(data, replyTo) {
if (error !== null) {
throw error;
}
if (!currentlyAvailable) {
throw new Error('Shared worker is not yet available');
}
const messageId = `${channelId}/message/${++messageCounter}`;
ourPort.postMessage({
type: 'message',
messageId,
replyTo,
data,
});
return messageId;
},
},
};
}
exports.registerSharedWorker = registerSharedWorker;

View file

@ -1,20 +1,19 @@
/* eslint-disable node/no-deprecated-api */
'use strict';
const ipc = require('./ipc');
import process from 'node:process';
import channel from './channel.cjs';
const seenDependencies = new Set();
let newDependencies = [];
function flush() {
if (newDependencies.length === 0) {
return;
}
ipc.send({type: 'dependencies', dependencies: newDependencies});
channel.send({type: 'dependencies', dependencies: newDependencies});
newDependencies = [];
}
exports.flush = flush;
function track(filename) {
if (seenDependencies.has(filename)) {
return;
@ -28,20 +27,22 @@ function track(filename) {
newDependencies.push(filename);
}
exports.track = track;
const tracker = {
flush,
track,
install(extensions, testPath) {
for (const ext of Object.keys(extensions)) {
const wrappedHandler = extensions[ext];
function install(testPath) {
for (const ext of Object.keys(require.extensions)) {
const wrappedHandler = require.extensions[ext];
extensions[ext] = (module, filename) => {
if (filename !== testPath) {
track(filename);
}
require.extensions[ext] = (module, filename) => {
if (filename !== testPath) {
track(filename);
}
wrappedHandler(module, filename);
};
}
},
};
wrappedHandler(module, filename);
};
}
}
exports.install = install;
export default tracker;

View file

@ -1,15 +1,16 @@
'use strict';
const path = require('path');
const chalk = require('chalk'); // Use default Chalk instance.
const process = require('process');
const {isRunningInThread, isRunningInChildProcess} = require('./utils.cjs');
// Check if the test is being run without AVA cli
const isForked = typeof process.send === 'function';
if (!isForked) {
if (!isRunningInChildProcess && !isRunningInThread) {
if (process.argv[1]) {
const fp = path.relative('.', process.argv[1]);
console.log();
console.error(`Test files must be run with the AVA CLI:\n\n ${chalk.grey.dim('$')} ${chalk.cyan('ava ' + fp)}\n`);
console.error(`Test files must be run with the AVA CLI:\n\n $ ava ${fp}\n`);
process.exit(1); // eslint-disable-line unicorn/no-process-exit
} else {

201
node_modules/ava/lib/worker/ipc.js generated vendored
View file

@ -1,201 +0,0 @@
'use strict';
const events = require('events');
const pEvent = require('p-event');
const {controlFlow} = require('../ipc-flow-control');
const {get: getOptions} = require('./options');
const selectAvaMessage = type => message => message.ava && message.ava.type === type;
exports.options = pEvent(process, 'message', selectAvaMessage('options')).then(message => message.ava.options);
exports.peerFailed = pEvent(process, 'message', selectAvaMessage('peer-failed'));
const bufferedSend = controlFlow(process);
function send(evt) {
bufferedSend({ava: evt});
}
exports.send = send;
let refs = 1;
function ref() {
if (++refs === 1) {
process.channel.ref();
}
}
function unref() {
if (refs > 0 && --refs === 0) {
process.channel.unref();
}
}
exports.unref = unref;
let pendingPings = Promise.resolve();
async function flush() {
ref();
const promise = pendingPings.then(async () => { // eslint-disable-line promise/prefer-await-to-then
send({type: 'ping'});
await pEvent(process, 'message', selectAvaMessage('pong'));
if (promise === pendingPings) {
unref();
}
});
pendingPings = promise;
await promise;
}
exports.flush = flush;
let channelCounter = 0;
let messageCounter = 0;
const channelEmitters = new Map();
function createChannelEmitter(channelId) {
if (channelEmitters.size === 0) {
process.on('message', message => {
if (!message.ava) {
return;
}
const {channelId, type, ...payload} = message.ava;
if (
type === 'shared-worker-error' ||
type === 'shared-worker-message' ||
type === 'shared-worker-ready'
) {
const emitter = channelEmitters.get(channelId);
if (emitter !== undefined) {
emitter.emit(type, payload);
}
}
});
}
const emitter = new events.EventEmitter();
channelEmitters.set(channelId, emitter);
return [emitter, () => channelEmitters.delete(channelId)];
}
function registerSharedWorker(filename, initialData) {
const channelId = `${getOptions().forkId}/channel/${++channelCounter}`;
const [channelEmitter, unsubscribe] = createChannelEmitter(channelId);
let forcedUnref = false;
let refs = 0;
const forceUnref = () => {
if (forcedUnref) {
return;
}
forcedUnref = true;
if (refs > 0) {
unref();
}
};
const refChannel = () => {
if (!forcedUnref && ++refs === 1) {
ref();
}
};
const unrefChannel = () => {
if (!forcedUnref && refs > 0 && --refs === 0) {
unref();
}
};
send({
type: 'shared-worker-connect',
channelId,
filename,
initialData
});
let currentlyAvailable = false;
let error = null;
refChannel();
const ready = pEvent(channelEmitter, 'shared-worker-ready').then(() => { // eslint-disable-line promise/prefer-await-to-then
currentlyAvailable = error === null;
}).finally(unrefChannel);
const messageEmitters = new Set();
const handleMessage = message => {
// Wait for a turn of the event loop, to allow new subscriptions to be set
// up in response to the previous message.
setImmediate(() => {
for (const emitter of messageEmitters) {
emitter.emit('message', message);
}
});
};
channelEmitter.on('shared-worker-message', handleMessage);
pEvent(channelEmitter, 'shared-worker-error').then(() => { // eslint-disable-line promise/prefer-await-to-then
unsubscribe();
forceUnref();
error = new Error('The shared worker is no longer available');
currentlyAvailable = false;
for (const emitter of messageEmitters) {
emitter.emit('error', error);
}
});
return {
forceUnref,
ready,
channel: {
available: ready,
get currentlyAvailable() {
return currentlyAvailable;
},
async * receive() {
if (error !== null) {
throw error;
}
const emitter = new events.EventEmitter();
messageEmitters.add(emitter);
try {
refChannel();
for await (const [message] of events.on(emitter, 'message')) {
yield message;
}
} finally {
unrefChannel();
messageEmitters.delete(emitter);
}
},
post(serializedData, replyTo) {
if (error !== null) {
throw error;
}
if (!currentlyAvailable) {
throw new Error('Shared worker is not yet available');
}
const messageId = `${channelId}/message/${++messageCounter}`;
send({
type: 'shared-worker-message',
channelId,
messageId,
replyTo,
serializedData
});
return messageId;
}
}
};
}
exports.registerSharedWorker = registerSharedWorker;

View file

@ -1,18 +1,27 @@
import * as fs from 'node:fs';
import {createRequire, findSourceMap} from 'node:module';
import {pathToFileURL} from 'node:url';
import callsites from 'callsites';
const require = createRequire(import.meta.url);
function parse(file) {
const fs = require('fs');
// Avoid loading these until we actually need to select tests by line number.
const acorn = require('acorn');
const walk = require('acorn-walk');
const ast = acorn.parse(fs.readFileSync(file, 'utf8'), {
ecmaVersion: 11,
locations: true
locations: true,
sourceType: 'module',
});
const locations = [];
walk.simple(ast, {
CallExpression(node) {
locations.push(node.loc);
}
},
});
// Walking is depth-first, but we want to sort these breadth-first.
@ -49,36 +58,65 @@ function findTest(locations, declaration) {
return spans.pop();
}
const range = (start, end) => new Array(end - start + 1).fill(start).map((element, index) => element + index);
const range = (start, end) => Array.from({length: end - start + 1}).fill(start).map((element, index) => element + index);
module.exports = ({file, lineNumbers = []}) => {
const translate = (sourceMap, pos) => {
if (sourceMap === undefined) {
return pos;
}
const entry = sourceMap.findEntry(pos.line - 1, pos.column); // Source maps are 0-based
return {
line: entry.originalLine + 1, // Readjust for Acorn.
column: entry.originalColumn,
};
};
export default function lineNumberSelection({file, lineNumbers = []}) {
if (lineNumbers.length === 0) {
return undefined;
}
// Avoid loading these until we actually need to select tests by line number.
const callsites = require('callsites');
const sourceMapSupport = require('source-map-support');
const locations = parse(file);
const selected = new Set(lineNumbers);
let locations = parse(file);
let lookedForSourceMap = false;
let sourceMap;
return () => {
if (!lookedForSourceMap) {
lookedForSourceMap = true;
// The returned function is called *after* the file has been loaded.
// Source maps are not available before then.
sourceMap = findSourceMap(file);
if (sourceMap !== undefined) {
locations = locations.map(({start, end}) => ({
start: translate(sourceMap, start),
end: translate(sourceMap, end),
}));
}
}
// Assume this is called from a test declaration, which is located in the file.
// If not… don't select the test!
const callSite = callsites().find(callSite => callSite.getFileName() === file);
const callSite = callsites().find(callSite => {
const current = callSite.getFileName();
if (file.startsWith('file://')) {
return current.startsWith('file://') ? file === current : file === pathToFileURL(current).toString();
}
return current.startsWith('file://') ? pathToFileURL(file).toString() === current : file === current;
});
if (!callSite) {
return false;
}
// FIXME: This assumes the callSite hasn't already been adjusted. It's likely
// that if `source-map-support/register` has been loaded, this would result
// in the wrong location.
const sourceCallSite = sourceMapSupport.wrapCallSite(callSite);
const start = {
line: sourceCallSite.getLineNumber(),
column: sourceCallSite.getColumnNumber() - 1 // Use 0-indexed columns.
};
const start = translate(sourceMap, {
line: callSite.getLineNumber(), // 1-based
column: callSite.getColumnNumber() - 1, // Comes out as 1-based, Acorn wants 0-based
});
const test = findTest(locations, start);
if (!test) {
@ -87,4 +125,4 @@ module.exports = ({file, lineNumbers = []}) => {
return range(test.start.line, test.end.line).some(line => selected.has(line));
};
};
}

12
node_modules/ava/lib/worker/main.cjs generated vendored Normal file
View file

@ -0,0 +1,12 @@
'use strict';
require('./guard-environment.cjs'); // eslint-disable-line import/no-unassigned-import
const assert = require('assert');
const {flags, refs} = require('./state.cjs');
assert(refs.runnerChain);
flags.loadedMain = true;
module.exports = refs.runnerChain;

21
node_modules/ava/lib/worker/main.js generated vendored
View file

@ -1,21 +0,0 @@
'use strict';
const runner = require('./subprocess').getRunner();
const makeCjsExport = () => {
function test(...args) {
return runner.chain(...args);
}
return Object.assign(test, runner.chain);
};
// Support CommonJS modules by exporting a test function that can be fully
// chained. Also support ES module loaders by exporting __esModule and a
// default. Support `import * as ava from 'ava'` use cases by exporting a
// `test` member. Do all this whilst preventing `test.test.test() or
// `test.default.test()` chains, though in CommonJS `test.test()` is
// unavoidable.
module.exports = Object.assign(makeCjsExport(), {
__esModule: true,
default: runner.chain
});

View file

@ -1,18 +1,29 @@
const v8 = require('v8');
const pkg = require('../../package.json');
const subprocess = require('./subprocess');
const options = require('./options');
const {registerSharedWorker: register} = require('./channel.cjs');
const options = require('./options.cjs');
const {sharedWorkerTeardowns, waitForReady} = require('./state.cjs');
require('./guard-environment.cjs'); // eslint-disable-line import/no-unassigned-import
const workers = new Map();
const workerTeardownFns = new WeakMap();
function createSharedWorker(filename, initialData, teardown) {
const channel = subprocess.registerSharedWorker(filename, initialData, teardown);
const {channel, forceUnref, ready} = register(filename, initialData, teardown);
waitForReady.push(ready);
sharedWorkerTeardowns.push(async () => {
try {
await teardown();
} finally {
forceUnref();
}
});
class ReceivedMessage {
constructor(id, serializedData) {
constructor(id, data) {
this.id = id;
this.data = v8.deserialize(new Uint8Array(serializedData));
this.data = data;
}
reply(data) {
@ -35,7 +46,7 @@ function createSharedWorker(filename, initialData, teardown) {
let message = messageCache.get(evt);
if (message === undefined) {
message = new ReceivedMessage(evt.messageId, evt.serializedData);
message = new ReceivedMessage(evt.messageId, evt.data);
messageCache.set(evt, message);
}
@ -44,19 +55,19 @@ function createSharedWorker(filename, initialData, teardown) {
}
function publishMessage(data, replyTo) {
const id = channel.post([...v8.serialize(data)], replyTo);
const id = channel.post(data, replyTo);
return {
id,
async * replies() {
yield * receiveMessages(id);
}
},
};
}
return {
available: channel.available,
protocol: 'experimental',
protocol: 'ava-4',
get currentlyAvailable() {
return channel.currentlyAvailable;
@ -68,29 +79,27 @@ function createSharedWorker(filename, initialData, teardown) {
async * subscribe() {
yield * receiveMessages();
}
},
};
}
const supportsSharedWorkers = process.versions.node >= '12.17.0';
function registerSharedWorker({
filename,
initialData,
supportedProtocols,
teardown
teardown,
}) {
if (!options.get().experiments.sharedWorkers) {
throw new Error('Shared workers are experimental. Opt in to them in your AVA configuration');
const options_ = options.get();
if (!options_.workerThreads) {
throw new Error('Shared workers can be used only when worker threads are enabled');
}
if (!supportsSharedWorkers) {
throw new Error('Shared workers require Node.js 12.17 or newer');
if (!supportedProtocols.includes('ava-4')) {
throw new Error(`This version of AVA (${pkg.version}) does not support any of the desired shared worker protocols: ${supportedProtocols.join(',')}`);
}
if (!supportedProtocols.includes('experimental')) {
throw new Error(`This version of AVA (${pkg.version}) does not support any of the desired shared worker protocols: ${supportedProtocols.join()}`);
}
filename = String(filename); // Allow URL instances.
let worker = workers.get(filename);
if (worker === undefined) {

5
node_modules/ava/lib/worker/state.cjs generated vendored Normal file
View file

@ -0,0 +1,5 @@
'use strict';
exports.flags = {loadedMain: false};
exports.refs = {runnerChain: null};
exports.sharedWorkerTeardowns = [];
exports.waitForReady = [];

View file

@ -1,266 +0,0 @@
'use strict';
const {pathToFileURL} = require('url');
const currentlyUnhandled = require('currently-unhandled')();
require('./ensure-forked'); // eslint-disable-line import/no-unassigned-import
const ipc = require('./ipc');
const supportsESM = async () => {
try {
await import('data:text/javascript,'); // eslint-disable-line node/no-unsupported-features/es-syntax
return true;
} catch {}
return false;
};
ipc.send({type: 'ready-for-options'});
ipc.options.then(async options => {
require('./options').set(options);
require('../chalk').set(options.chalkOptions);
if (options.chalkOptions.level > 0) {
const {stdout, stderr} = process;
global.console = Object.assign(global.console, new console.Console({stdout, stderr, colorMode: true}));
}
const nowAndTimers = require('../now-and-timers');
const providerManager = require('../provider-manager');
const Runner = require('../runner');
const serializeError = require('../serialize-error');
const dependencyTracking = require('./dependency-tracker');
const lineNumberSelection = require('./line-numbers');
const sharedWorkerTeardowns = [];
async function exit(code) {
if (!process.exitCode) {
process.exitCode = code;
}
dependencyTracking.flush();
await ipc.flush();
process.exit(); // eslint-disable-line unicorn/no-process-exit
}
// TODO: Initialize providers here, then pass to lineNumberSelection() so they
// can be used to parse the test file.
let checkSelectedByLineNumbers;
try {
checkSelectedByLineNumbers = lineNumberSelection({
file: options.file,
lineNumbers: options.lineNumbers
});
} catch (error) {
ipc.send({type: 'line-number-selection-error', err: serializeError('Line number selection error', false, error, options.file)});
checkSelectedByLineNumbers = () => false;
}
const runner = new Runner({
checkSelectedByLineNumbers,
experiments: options.experiments,
failFast: options.failFast,
failWithoutAssertions: options.failWithoutAssertions,
file: options.file,
match: options.match,
projectDir: options.projectDir,
recordNewSnapshots: options.recordNewSnapshots,
runOnlyExclusive: options.runOnlyExclusive,
serial: options.serial,
snapshotDir: options.snapshotDir,
updateSnapshots: options.updateSnapshots
});
ipc.peerFailed.then(() => { // eslint-disable-line promise/prefer-await-to-then
runner.interrupt();
});
const attributedRejections = new Set();
process.on('unhandledRejection', (reason, promise) => {
if (runner.attributeLeakedError(reason)) {
attributedRejections.add(promise);
}
});
runner.on('dependency', dependencyTracking.track);
runner.on('stateChange', state => ipc.send(state));
runner.on('error', error => {
ipc.send({type: 'internal-error', err: serializeError('Internal runner error', false, error, runner.file)});
exit(1);
});
runner.on('finish', async () => {
try {
const {cannotSave, touchedFiles} = runner.saveSnapshotState();
if (cannotSave) {
ipc.send({type: 'snapshot-error'});
} else if (touchedFiles) {
ipc.send({type: 'touched-files', files: touchedFiles});
}
} catch (error) {
ipc.send({type: 'internal-error', err: serializeError('Internal runner error', false, error, runner.file)});
exit(1);
return;
}
try {
await Promise.all(sharedWorkerTeardowns.map(fn => fn()));
} catch (error) {
ipc.send({type: 'uncaught-exception', err: serializeError('Shared worker teardown error', false, error, runner.file)});
exit(1);
return;
}
nowAndTimers.setImmediate(() => {
currentlyUnhandled()
.filter(rejection => !attributedRejections.has(rejection.promise))
.forEach(rejection => {
ipc.send({type: 'unhandled-rejection', err: serializeError('Unhandled rejection', true, rejection.reason, runner.file)});
});
exit(0);
});
});
process.on('uncaughtException', error => {
if (runner.attributeLeakedError(error)) {
return;
}
ipc.send({type: 'uncaught-exception', err: serializeError('Uncaught exception', true, error, runner.file)});
exit(1);
});
let accessedRunner = false;
exports.getRunner = () => {
accessedRunner = true;
return runner;
};
exports.registerSharedWorker = (filename, initialData, teardown) => {
const {channel, forceUnref, ready} = ipc.registerSharedWorker(filename, initialData);
runner.waitForReady.push(ready);
sharedWorkerTeardowns.push(async () => {
try {
await teardown();
} finally {
forceUnref();
}
});
return channel;
};
// Store value to prevent required modules from modifying it.
const testPath = options.file;
// Install basic source map support.
const sourceMapSupport = require('source-map-support');
sourceMapSupport.install({
environment: 'node',
handleUncaughtExceptions: false
});
const extensionsToLoadAsModules = Object.entries(options.moduleTypes)
.filter(([, type]) => type === 'module')
.map(([extension]) => extension);
// Install before processing options.require, so if helpers are added to the
// require configuration the *compiled* helper will be loaded.
const {projectDir, providerStates = []} = options;
const providers = providerStates.map(({type, state}) => {
if (type === 'babel') {
const provider = providerManager.babel(projectDir).worker({extensionsToLoadAsModules, state});
runner.powerAssert = provider.powerAssert;
return provider;
}
if (type === 'typescript') {
return providerManager.typescript(projectDir).worker({extensionsToLoadAsModules, state});
}
return null;
}).filter(provider => provider !== null);
let requireFn = require;
let isESMSupported;
const load = async ref => {
for (const extension of extensionsToLoadAsModules) {
if (ref.endsWith(`.${extension}`)) {
if (typeof isESMSupported !== 'boolean') {
// Lazily determine support since this prints an experimental warning.
// eslint-disable-next-line no-await-in-loop
isESMSupported = await supportsESM();
}
if (isESMSupported) {
return import(pathToFileURL(ref)); // eslint-disable-line node/no-unsupported-features/es-syntax
}
ipc.send({type: 'internal-error', err: serializeError('Internal runner error', false, new Error('ECMAScript Modules are not supported in this Node.js version.'))});
exit(1);
return;
}
}
for (const provider of providers) {
if (provider.canLoad(ref)) {
return provider.load(ref, {requireFn});
}
}
return requireFn(ref);
};
try {
for await (const ref of (options.require || [])) {
const mod = await load(ref);
try {
if (Reflect.has(mod, Symbol.for('esm:package'))) {
requireFn = mod(module);
}
} catch {}
}
// Install dependency tracker after the require configuration has been evaluated
// to make sure we also track dependencies with custom require hooks
dependencyTracking.install(testPath);
if (options.debug && options.debug.port !== undefined && options.debug.host !== undefined) {
// If an inspector was active when the main process started, and is
// already active for the worker process, do not open a new one.
const inspector = require('inspector'); // eslint-disable-line node/no-unsupported-features/node-builtins
if (!options.debug.active || inspector.url() === undefined) {
inspector.open(options.debug.port, options.debug.host, true);
}
if (options.debug.break) {
debugger; // eslint-disable-line no-debugger
}
}
await load(testPath);
if (accessedRunner) {
// Unreference the IPC channel if the test file required AVA. This stops it
// from keeping the event loop busy, which means the `beforeExit` event can be
// used to detect when tests stall.
ipc.unref();
} else {
ipc.send({type: 'missing-ava-import'});
exit(1);
}
} catch (error) {
ipc.send({type: 'uncaught-exception', err: serializeError('Uncaught exception', true, error, runner.file)});
exit(1);
}
}).catch(error => {
// There shouldn't be any errors, but if there are we may not have managed
// to bootstrap enough code to serialize them. Re-throw and let the process
// crash.
setImmediate(() => {
throw error;
});
});

6
node_modules/ava/lib/worker/utils.cjs generated vendored Normal file
View file

@ -0,0 +1,6 @@
'use strict';
const process = require('process');
const {isMainThread} = require('worker_threads');
exports.isRunningInThread = isMainThread === false;
exports.isRunningInChildProcess = typeof process.send === 'function';

View file

@ -1,3 +1,27 @@
## 8.7.0 (2021-12-27)
### New features
Support quoted export names.
Upgrade to Unicode 14.
Add support for Unicode 13 properties in regular expressions.
### Bug fixes
Use a loop to find line breaks, because the existing regexp search would overrun the end of the searched range and waste a lot of time in minified code.
## 8.6.0 (2021-11-18)
### Bug fixes
Fix a bug where an object literal with multiple `__proto__` properties would incorrectly be accepted if a later property value held an assigment.
### New features
Support class private fields with the `in` operator.
## 8.5.0 (2021-09-06)
### Bug fixes
@ -36,7 +60,7 @@ A new option, `allowSuperOutsideMethod`, can be used to suppress the error when
Default `allowAwaitOutsideFunction` to true for ECMAScript 2022 an higher.
Add support for the `p` ([indices](https://github.com/tc39/proposal-regexp-match-indices)) regexp flag.
Add support for the `d` ([indices](https://github.com/tc39/proposal-regexp-match-indices)) regexp flag.
## 8.2.4 (2021-05-04)

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -4,13 +4,33 @@ var path = require('path');
var fs = require('fs');
var acorn = require('./acorn.js');
function _interopNamespace(e) {
if (e && e.__esModule) return e;
var n = Object.create(null);
if (e) {
Object.keys(e).forEach(function (k) {
if (k !== 'default') {
var d = Object.getOwnPropertyDescriptor(e, k);
Object.defineProperty(n, k, d.get ? d : {
enumerable: true,
get: function () { return e[k]; }
});
}
});
}
n["default"] = e;
return Object.freeze(n);
}
var acorn__namespace = /*#__PURE__*/_interopNamespace(acorn);
var inputFilePaths = [], forceFileName = false, fileMode = false, silent = false, compact = false, tokenize = false;
var options = {};
function help(status) {
var print = (status === 0) ? console.log : console.error;
print("usage: " + path.basename(process.argv[1]) + " [--ecma3|--ecma5|--ecma6|--ecma7|--ecma8|--ecma9|...|--ecma2015|--ecma2016|--ecma2017|--ecma2018|...]");
print(" [--tokenize] [--locations] [---allow-hash-bang] [--allow-await-outside-function] [--compact] [--silent] [--module] [--help] [--] [<infile>...]");
print(" [--tokenize] [--locations] [--allow-hash-bang] [--allow-await-outside-function] [--compact] [--silent] [--module] [--help] [--] [<infile>...]");
process.exit(status);
}
@ -44,14 +64,14 @@ function run(codeList) {
codeList.forEach(function (code, idx) {
fileIdx = idx;
if (!tokenize) {
result = acorn.parse(code, options);
result = acorn__namespace.parse(code, options);
options.program = result;
} else {
var tokenizer = acorn.tokenizer(code, options), token;
var tokenizer = acorn__namespace.tokenizer(code, options), token;
do {
token = tokenizer.getToken();
result.push(token);
} while (token.type !== acorn.tokTypes.eof)
} while (token.type !== acorn__namespace.tokTypes.eof)
}
});
} catch (e) {

View file

@ -16,7 +16,7 @@
],
"./package.json": "./package.json"
},
"version": "8.5.0",
"version": "8.7.0",
"engines": {
"node": ">=0.4.0"
},
@ -42,7 +42,7 @@
},
"license": "MIT",
"scripts": {
"prepare": "cd ..; npm run build:main && npm run build:bin"
"prepare": "cd ..; npm run build:main"
},
"bin": {
"acorn": "./bin/acorn"

View file

@ -0,0 +1,47 @@
/**
Create an error from multiple errors.
*/
export default class AggregateError<T extends Error = Error> extends Error {
readonly name: 'AggregateError';
readonly errors: readonly [T];
/**
@param errors - If a string, a new `Error` is created with the string as the error message. If a non-Error object, a new `Error` is created with all properties from the object copied over.
@example
```
import AggregateError from 'aggregate-error';
const error = new AggregateError([new Error('foo'), 'bar', {message: 'baz'}]);
throw error;
// AggregateError:
// Error: foo
// at Object.<anonymous> (/Users/sindresorhus/dev/aggregate-error/example.js:3:33)
// Error: bar
// at Object.<anonymous> (/Users/sindresorhus/dev/aggregate-error/example.js:3:13)
// Error: baz
// at Object.<anonymous> (/Users/sindresorhus/dev/aggregate-error/example.js:3:13)
// at AggregateError (/Users/sindresorhus/dev/aggregate-error/index.js:19:3)
// at Object.<anonymous> (/Users/sindresorhus/dev/aggregate-error/example.js:3:13)
// at Module._compile (module.js:556:32)
// at Object.Module._extensions..js (module.js:565:10)
// at Module.load (module.js:473:32)
// at tryModuleLoad (module.js:432:12)
// at Function.Module._load (module.js:424:3)
// at Module.runMain (module.js:590:10)
// at run (bootstrap_node.js:394:7)
// at startup (bootstrap_node.js:149:9)
for (const individualError of error.errors) {
console.log(individualError);
}
//=> [Error: foo]
//=> [Error: bar]
//=> [Error: baz]
```
*/
constructor(errors: ReadonlyArray<T | Record<string, any> | string>);
}

44
node_modules/ava/node_modules/aggregate-error/index.js generated vendored Normal file
View file

@ -0,0 +1,44 @@
import indentString from 'indent-string';
import cleanStack from 'clean-stack';
const cleanInternalStack = stack => stack.replace(/\s+at .*aggregate-error\/index.js:\d+:\d+\)?/g, '');
export default class AggregateError extends Error {
#errors;
name = 'AggregateError';
constructor(errors) {
if (!Array.isArray(errors)) {
throw new TypeError(`Expected input to be an Array, got ${typeof errors}`);
}
errors = errors.map(error => {
if (error instanceof Error) {
return error;
}
if (error !== null && typeof error === 'object') {
// Handle plain error objects with message property and/or possibly other metadata
return Object.assign(new Error(error.message), error);
}
return new Error(error);
});
let message = errors
.map(error => {
// The `stack` property is not standardized, so we can't assume it exists
return typeof error.stack === 'string' ? cleanInternalStack(cleanStack(error.stack)) : String(error);
})
.join('\n');
message = '\n' + indentString(message, 4);
super(message);
this.#errors = errors;
}
get errors() {
return this.#errors.slice();
}
}

View file

@ -1,6 +1,6 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:

View file

@ -0,0 +1,45 @@
{
"name": "aggregate-error",
"version": "4.0.0",
"description": "Create an error from multiple errors",
"license": "MIT",
"repository": "sindresorhus/aggregate-error",
"funding": "https://github.com/sponsors/sindresorhus",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"type": "module",
"exports": "./index.js",
"engines": {
"node": ">=12"
},
"scripts": {
"//test": "xo && ava && tsd",
"test": "ava && tsd"
},
"files": [
"index.js",
"index.d.ts"
],
"keywords": [
"aggregate",
"error",
"combine",
"multiple",
"many",
"collection",
"iterable",
"iterator"
],
"dependencies": {
"clean-stack": "^4.0.0",
"indent-string": "^5.0.0"
},
"devDependencies": {
"ava": "^3.15.0",
"tsd": "^0.14.0",
"xo": "^0.38.2"
}
}

View file

@ -0,0 +1,60 @@
# aggregate-error
> Create an error from multiple errors
*Note: With [Node.js 15](https://medium.com/@nodejs/node-js-v15-0-0-is-here-deb00750f278), there's now a built-in [`AggregateError`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/AggregateError) type.*
## Install
```
$ npm install aggregate-error
```
## Usage
```js
import AggregateError from 'aggregate-error';
const error = new AggregateError([new Error('foo'), 'bar', {message: 'baz'}]);
throw error;
/*
AggregateError:
Error: foo
at Object.<anonymous> (/Users/sindresorhus/dev/aggregate-error/example.js:3:33)
Error: bar
at Object.<anonymous> (/Users/sindresorhus/dev/aggregate-error/example.js:3:13)
Error: baz
at Object.<anonymous> (/Users/sindresorhus/dev/aggregate-error/example.js:3:13)
at AggregateError (/Users/sindresorhus/dev/aggregate-error/index.js:19:3)
at Object.<anonymous> (/Users/sindresorhus/dev/aggregate-error/example.js:3:13)
at Module._compile (module.js:556:32)
at Object.Module._extensions..js (module.js:565:10)
at Module.load (module.js:473:32)
at tryModuleLoad (module.js:432:12)
at Function.Module._load (module.js:424:3)
at Module.runMain (module.js:590:10)
at run (bootstrap_node.js:394:7)
at startup (bootstrap_node.js:149:9)
*/
for (const individualError of error.errors) {
console.log(individualError);
}
//=> [Error: foo]
//=> [Error: bar]
//=> [Error: baz]
```
## API
### AggregateError(errors)
Returns an `Error`.
#### errors
Type: `Array<Error|object|string>`
If a string, a new `Error` is created with the string as the error message.\
If a non-Error object, a new `Error` is created with all properties from the object copied over.

33
node_modules/ava/node_modules/ansi-regex/index.d.ts generated vendored Normal file
View file

@ -0,0 +1,33 @@
export interface Options {
/**
Match only the first ANSI escape.
@default false
*/
readonly onlyFirst: boolean;
}
/**
Regular expression for matching ANSI escape codes.
@example
```
import ansiRegex from 'ansi-regex';
ansiRegex().test('\u001B[4mcake\u001B[0m');
//=> true
ansiRegex().test('cake');
//=> false
'\u001B[4mcake\u001B[0m'.match(ansiRegex());
//=> ['\u001B[4m', '\u001B[0m']
'\u001B[4mcake\u001B[0m'.match(ansiRegex({onlyFirst: true}));
//=> ['\u001B[4m']
'\u001B]8;;https://github.com\u0007click\u001B]8;;\u0007'.match(ansiRegex());
//=> ['\u001B]8;;https://github.com\u0007', '\u001B]8;;\u0007']
```
*/
export default function ansiRegex(options?: Options): RegExp;

8
node_modules/ava/node_modules/ansi-regex/index.js generated vendored Normal file
View file

@ -0,0 +1,8 @@
export default function ansiRegex({onlyFirst = false} = {}) {
const pattern = [
'[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)',
'(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))'
].join('|');
return new RegExp(pattern, onlyFirst ? undefined : 'g');
}

View file

@ -1,6 +1,6 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:

58
node_modules/ava/node_modules/ansi-regex/package.json generated vendored Normal file
View file

@ -0,0 +1,58 @@
{
"name": "ansi-regex",
"version": "6.0.1",
"description": "Regular expression for matching ANSI escape codes",
"license": "MIT",
"repository": "chalk/ansi-regex",
"funding": "https://github.com/chalk/ansi-regex?sponsor=1",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"type": "module",
"exports": "./index.js",
"engines": {
"node": ">=12"
},
"scripts": {
"test": "xo && ava && tsd",
"view-supported": "node fixtures/view-codes.js"
},
"files": [
"index.js",
"index.d.ts"
],
"keywords": [
"ansi",
"styles",
"color",
"colour",
"colors",
"terminal",
"console",
"cli",
"string",
"tty",
"escape",
"formatting",
"rgb",
"256",
"shell",
"xterm",
"command-line",
"text",
"regex",
"regexp",
"re",
"match",
"test",
"find",
"pattern"
],
"devDependencies": {
"ava": "^3.15.0",
"tsd": "^0.14.0",
"xo": "^0.38.2"
}
}

72
node_modules/ava/node_modules/ansi-regex/readme.md generated vendored Normal file
View file

@ -0,0 +1,72 @@
# ansi-regex
> Regular expression for matching [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code)
## Install
```
$ npm install ansi-regex
```
## Usage
```js
import ansiRegex from 'ansi-regex';
ansiRegex().test('\u001B[4mcake\u001B[0m');
//=> true
ansiRegex().test('cake');
//=> false
'\u001B[4mcake\u001B[0m'.match(ansiRegex());
//=> ['\u001B[4m', '\u001B[0m']
'\u001B[4mcake\u001B[0m'.match(ansiRegex({onlyFirst: true}));
//=> ['\u001B[4m']
'\u001B]8;;https://github.com\u0007click\u001B]8;;\u0007'.match(ansiRegex());
//=> ['\u001B]8;;https://github.com\u0007', '\u001B]8;;\u0007']
```
## API
### ansiRegex(options?)
Returns a regex for matching ANSI escape codes.
#### options
Type: `object`
##### onlyFirst
Type: `boolean`\
Default: `false` *(Matches any ANSI escape codes in a string)*
Match only the first ANSI escape.
## FAQ
### Why do you test for codes not in the ECMA 48 standard?
Some of the codes we run as a test are codes that we acquired finding various lists of non-standard or manufacturer specific codes. We test for both standard and non-standard codes, as most of them follow the same or similar format and can be safely matched in strings without the risk of removing actual string content. There are a few non-standard control codes that do not follow the traditional format (i.e. they end in numbers) thus forcing us to exclude them from the test because we cannot reliably match them.
On the historical side, those ECMA standards were established in the early 90's whereas the VT100, for example, was designed in the mid/late 70's. At that point in time, control codes were still pretty ungoverned and engineers used them for a multitude of things, namely to activate hardware ports that may have been proprietary. Somewhere else you see a similar 'anarchy' of codes is in the x86 architecture for processors; there are a ton of "interrupts" that can mean different things on certain brands of processors, most of which have been phased out.
## Maintainers
- [Sindre Sorhus](https://github.com/sindresorhus)
- [Josh Junon](https://github.com/qix-)
---
<div align="center">
<b>
<a href="https://tidelift.com/subscription/pkg/npm-ansi-regex?utm_source=npm-ansi-regex&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
</b>
<br>
<sub>
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
</sub>
</div>

View file

@ -1,167 +1,190 @@
declare namespace ansiStyles {
interface CSPair {
/**
The ANSI terminal control sequence for starting this style.
*/
readonly open: string;
export interface CSPair {
/**
The ANSI terminal control sequence for starting this style.
*/
readonly open: string;
/**
The ANSI terminal control sequence for ending this style.
*/
readonly close: string;
}
/**
The ANSI terminal control sequence for ending this style.
*/
readonly close: string;
}
interface ColorBase {
/**
The ANSI terminal control sequence for ending this color.
*/
readonly close: string;
export interface ColorBase {
/**
The ANSI terminal control sequence for ending this color.
*/
readonly close: string;
ansi256(code: number): string;
ansi(code: number): string;
ansi16m(red: number, green: number, blue: number): string;
}
ansi256(code: number): string;
interface Modifier {
/**
Resets the current color chain.
*/
readonly reset: CSPair;
ansi16m(red: number, green: number, blue: number): string;
}
/**
Make text bold.
*/
readonly bold: CSPair;
export interface Modifier {
/**
Resets the current color chain.
*/
readonly reset: CSPair;
/**
Emitting only a small amount of light.
*/
readonly dim: CSPair;
/**
Make text bold.
*/
readonly bold: CSPair;
/**
Make text italic. (Not widely supported)
*/
readonly italic: CSPair;
/**
Emitting only a small amount of light.
*/
readonly dim: CSPair;
/**
Make text underline. (Not widely supported)
*/
readonly underline: CSPair;
/**
Make text italic. (Not widely supported)
*/
readonly italic: CSPair;
/**
Make text overline.
/**
Make text underline. (Not widely supported)
*/
readonly underline: CSPair;
Supported on VTE-based terminals, the GNOME terminal, mintty, and Git Bash.
*/
readonly overline: CSPair;
/**
Make text overline.
/**
Inverse background and foreground colors.
*/
readonly inverse: CSPair;
Supported on VTE-based terminals, the GNOME terminal, mintty, and Git Bash.
*/
readonly overline: CSPair;
/**
Prints the text, but makes it invisible.
*/
readonly hidden: CSPair;
/**
Inverse background and foreground colors.
*/
readonly inverse: CSPair;
/**
Puts a horizontal line through the center of the text. (Not widely supported)
*/
readonly strikethrough: CSPair;
}
/**
Prints the text, but makes it invisible.
*/
readonly hidden: CSPair;
interface ForegroundColor {
readonly black: CSPair;
readonly red: CSPair;
readonly green: CSPair;
readonly yellow: CSPair;
readonly blue: CSPair;
readonly cyan: CSPair;
readonly magenta: CSPair;
readonly white: CSPair;
/**
Puts a horizontal line through the center of the text. (Not widely supported)
*/
readonly strikethrough: CSPair;
}
/**
Alias for `blackBright`.
*/
readonly gray: CSPair;
export interface ForegroundColor {
readonly black: CSPair;
readonly red: CSPair;
readonly green: CSPair;
readonly yellow: CSPair;
readonly blue: CSPair;
readonly cyan: CSPair;
readonly magenta: CSPair;
readonly white: CSPair;
/**
Alias for `blackBright`.
*/
readonly grey: CSPair;
/**
Alias for `blackBright`.
*/
readonly gray: CSPair;
readonly blackBright: CSPair;
readonly redBright: CSPair;
readonly greenBright: CSPair;
readonly yellowBright: CSPair;
readonly blueBright: CSPair;
readonly cyanBright: CSPair;
readonly magentaBright: CSPair;
readonly whiteBright: CSPair;
}
/**
Alias for `blackBright`.
*/
readonly grey: CSPair;
interface BackgroundColor {
readonly bgBlack: CSPair;
readonly bgRed: CSPair;
readonly bgGreen: CSPair;
readonly bgYellow: CSPair;
readonly bgBlue: CSPair;
readonly bgCyan: CSPair;
readonly bgMagenta: CSPair;
readonly bgWhite: CSPair;
readonly blackBright: CSPair;
readonly redBright: CSPair;
readonly greenBright: CSPair;
readonly yellowBright: CSPair;
readonly blueBright: CSPair;
readonly cyanBright: CSPair;
readonly magentaBright: CSPair;
readonly whiteBright: CSPair;
}
/**
Alias for `bgBlackBright`.
*/
readonly bgGray: CSPair;
export interface BackgroundColor {
readonly bgBlack: CSPair;
readonly bgRed: CSPair;
readonly bgGreen: CSPair;
readonly bgYellow: CSPair;
readonly bgBlue: CSPair;
readonly bgCyan: CSPair;
readonly bgMagenta: CSPair;
readonly bgWhite: CSPair;
/**
Alias for `bgBlackBright`.
*/
readonly bgGrey: CSPair;
/**
Alias for `bgBlackBright`.
*/
readonly bgGray: CSPair;
readonly bgBlackBright: CSPair;
readonly bgRedBright: CSPair;
readonly bgGreenBright: CSPair;
readonly bgYellowBright: CSPair;
readonly bgBlueBright: CSPair;
readonly bgCyanBright: CSPair;
readonly bgMagentaBright: CSPair;
readonly bgWhiteBright: CSPair;
}
/**
Alias for `bgBlackBright`.
*/
readonly bgGrey: CSPair;
interface ConvertColor {
/**
Convert from the RGB color space to the ANSI 256 color space.
readonly bgBlackBright: CSPair;
readonly bgRedBright: CSPair;
readonly bgGreenBright: CSPair;
readonly bgYellowBright: CSPair;
readonly bgBlueBright: CSPair;
readonly bgCyanBright: CSPair;
readonly bgMagentaBright: CSPair;
readonly bgWhiteBright: CSPair;
}
@param red - (`0...255`)
@param green - (`0...255`)
@param blue - (`0...255`)
*/
rgbToAnsi256(red: number, green: number, blue: number): number;
export interface ConvertColor {
/**
Convert from the RGB color space to the ANSI 256 color space.
/**
Convert from the RGB HEX color space to the RGB color space.
@param red - (`0...255`)
@param green - (`0...255`)
@param blue - (`0...255`)
*/
rgbToAnsi256(red: number, green: number, blue: number): number;
@param hex - A hexadecimal string containing RGB data.
*/
hexToRgb(hex: string): [red: number, green: number, blue: number];
/**
Convert from the RGB HEX color space to the RGB color space.
/**
Convert from the RGB HEX color space to the ANSI 256 color space.
@param hex - A hexadecimal string containing RGB data.
*/
hexToRgb(hex: string): [red: number, green: number, blue: number];
@param hex - A hexadecimal string containing RGB data.
*/
hexToAnsi256(hex: string): number;
}
/**
Convert from the RGB HEX color space to the ANSI 256 color space.
@param hex - A hexadecimal string containing RGB data.
*/
hexToAnsi256(hex: string): number;
/**
Convert from the ANSI 256 color space to the ANSI 16 color space.
@param code - A number representing the ANSI 256 color.
*/
ansi256ToAnsi(code: number): number;
/**
Convert from the RGB color space to the ANSI 16 color space.
@param red - (`0...255`)
@param green - (`0...255`)
@param blue - (`0...255`)
*/
rgbToAnsi(red: number, green: number, blue: number): number;
/**
Convert from the RGB HEX color space to the ANSI 16 color space.
@param hex - A hexadecimal string containing RGB data.
*/
hexToAnsi(hex: string): number;
}
declare const ansiStyles: {
readonly modifier: ansiStyles.Modifier;
readonly color: ansiStyles.ForegroundColor & ansiStyles.ColorBase;
readonly bgColor: ansiStyles.BackgroundColor & ansiStyles.ColorBase;
readonly modifier: Modifier;
readonly color: ColorBase & ForegroundColor;
readonly bgColor: ColorBase & BackgroundColor;
readonly codes: ReadonlyMap<number, number>;
} & ansiStyles.BackgroundColor & ansiStyles.ForegroundColor & ansiStyles.Modifier & ansiStyles.ConvertColor;
} & ForegroundColor & BackgroundColor & Modifier & ConvertColor;
export = ansiStyles;
export default ansiStyles;

View file

@ -1,7 +1,7 @@
'use strict';
const ANSI_BACKGROUND_OFFSET = 10;
const wrapAnsi16 = (offset = 0) => code => `\u001B[${code + offset}m`;
const wrapAnsi256 = (offset = 0) => code => `\u001B[${38 + offset};5;${code}m`;
const wrapAnsi16m = (offset = 0) => (red, green, blue) => `\u001B[${38 + offset};2;${red};${green};${blue}m`;
@ -95,8 +95,10 @@ function assembleStyles() {
styles.color.close = '\u001B[39m';
styles.bgColor.close = '\u001B[49m';
styles.color.ansi = wrapAnsi16();
styles.color.ansi256 = wrapAnsi256();
styles.color.ansi16m = wrapAnsi16m();
styles.bgColor.ansi = wrapAnsi16(ANSI_BACKGROUND_OFFSET);
styles.bgColor.ansi256 = wrapAnsi256(ANSI_BACKGROUND_OFFSET);
styles.bgColor.ansi16m = wrapAnsi16m(ANSI_BACKGROUND_OFFSET);
@ -151,14 +153,64 @@ function assembleStyles() {
hexToAnsi256: {
value: hex => styles.rgbToAnsi256(...styles.hexToRgb(hex)),
enumerable: false
},
ansi256ToAnsi: {
value: code => {
if (code < 8) {
return 30 + code;
}
if (code < 16) {
return 90 + (code - 8);
}
let red;
let green;
let blue;
if (code >= 232) {
red = (((code - 232) * 10) + 8) / 255;
green = red;
blue = red;
} else {
code -= 16;
const remainder = code % 36;
red = Math.floor(code / 36) / 5;
green = Math.floor(remainder / 6) / 5;
blue = (remainder % 6) / 5;
}
const value = Math.max(red, green, blue) * 2;
if (value === 0) {
return 30;
}
let result = 30 + ((Math.round(blue) << 2) | (Math.round(green) << 1) | Math.round(red));
if (value === 2) {
result += 60;
}
return result;
},
enumerable: false
},
rgbToAnsi: {
value: (red, green, blue) => styles.ansi256ToAnsi(styles.rgbToAnsi256(red, green, blue)),
enumerable: false
},
hexToAnsi: {
value: hex => styles.ansi256ToAnsi(styles.hexToAnsi256(hex)),
enumerable: false
}
});
return styles;
}
// Make the export immutable
Object.defineProperty(module, 'exports', {
enumerable: true,
get: assembleStyles
});
const ansiStyles = assembleStyles();
export default ansiStyles;

View file

@ -1,6 +1,6 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:

View file

@ -1,6 +1,6 @@
{
"name": "ansi-styles",
"version": "5.2.0",
"version": "6.1.0",
"description": "ANSI escape codes for styling strings in the terminal",
"license": "MIT",
"repository": "chalk/ansi-styles",
@ -10,8 +10,10 @@
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"type": "module",
"exports": "./index.js",
"engines": {
"node": ">=10"
"node": ">=12"
},
"scripts": {
"test": "xo && ava && tsd",
@ -44,9 +46,9 @@
"text"
],
"devDependencies": {
"ava": "^2.4.0",
"ava": "^3.15.0",
"svg-term-cli": "^2.1.1",
"tsd": "^0.14.0",
"xo": "^0.37.1"
"xo": "^0.38.2"
}
}

View file

@ -15,9 +15,9 @@ $ npm install ansi-styles
## Usage
```js
const style = require('ansi-styles');
import styles from 'ansi-styles';
console.log(`${style.green.open}Hello world!${style.green.close}`);
console.log(`${styles.green.open}Hello world!${styles.green.close}`);
// Color conversion between 256/truecolor
@ -25,8 +25,9 @@ console.log(`${style.green.open}Hello world!${style.green.close}`);
// may be degraded to fit the new color palette. This means terminals
// that do not support 16 million colors will best-match the
// original color.
console.log(`${style.color.ansi256(style.rgbToAnsi256(199, 20, 250))}Hello World${style.color.close}`)
console.log(`${style.color.ansi16m(...style.hexToRgb('#abcdef'))}Hello World${style.color.close}`)
console.log(`${styles.color.ansi(styles.rgbToAnsi(199, 20, 250))}Hello World${styles.color.close}`)
console.log(`${styles.color.ansi256(styles.rgbToAnsi256(199, 20, 250))}Hello World${styles.color.close}`)
console.log(`${styles.color.ansi16m(...styles.hexToRgb('#abcdef'))}Hello World${styles.color.close}`)
```
## API
@ -89,43 +90,53 @@ Each style has an `open` and `close` property.
By default, you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module.
- `style.modifier`
- `style.color`
- `style.bgColor`
- `styles.modifier`
- `styles.color`
- `styles.bgColor`
###### Example
```js
console.log(style.color.green.open);
import styles from 'ansi-styles';
console.log(styles.color.green.open);
```
Raw escape codes (i.e. without the CSI escape prefix `\u001B[` and render mode postfix `m`) are available under `style.codes`, which returns a `Map` with the open codes as keys and close codes as values.
Raw escape codes (i.e. without the CSI escape prefix `\u001B[` and render mode postfix `m`) are available under `styles.codes`, which returns a `Map` with the open codes as keys and close codes as values.
###### Example
```js
console.log(style.codes.get(36));
import styles from 'ansi-styles';
console.log(styles.codes.get(36));
//=> 39
```
## [256 / 16 million (TrueColor) support](https://gist.github.com/XVilka/8346728)
## 16 / 256 / 16 million (TrueColor) support
`ansi-styles` allows converting between various color formats and ANSI escapes, with support for 256 and 16 million colors.
`ansi-styles` allows converting between various color formats and ANSI escapes, with support for 16, 256 and [16 million colors](https://gist.github.com/XVilka/8346728).
The following color spaces from `color-convert` are supported:
The following color spaces are supported:
- `rgb`
- `hex`
- `ansi256`
- `ansi`
To use these, call the associated conversion function with the intended output, for example:
```js
style.color.ansi256(style.rgbToAnsi256(100, 200, 15)); // RGB to 256 color ansi foreground code
style.bgColor.ansi256(style.hexToAnsi256('#C0FFEE')); // HEX to 256 color ansi foreground code
import styles from 'ansi-styles';
style.color.ansi16m(100, 200, 15); // RGB to 16 million color foreground code
style.bgColor.ansi16m(...style.hexToRgb('#C0FFEE')); // Hex (RGB) to 16 million color foreground code
styles.color.ansi(styles.rgbToAnsi(100, 200, 15)); // RGB to 16 color ansi foreground code
styles.bgColor.ansi(styles.hexToAnsi('#C0FFEE')); // HEX to 16 color ansi foreground code
styles.color.ansi256(styles.rgbToAnsi256(100, 200, 15)); // RGB to 256 color ansi foreground code
styles.bgColor.ansi256(styles.hexToAnsi256('#C0FFEE')); // HEX to 256 color ansi foreground code
styles.color.ansi16m(100, 200, 15); // RGB to 16 million color foreground code
styles.bgColor.ansi16m(...styles.hexToRgb('#C0FFEE')); // Hex (RGB) to 16 million color foreground code
```
## Related

25
node_modules/ava/node_modules/array-union/index.d.ts generated vendored Normal file
View file

@ -0,0 +1,25 @@
/**
Create an array of unique values, in order, from the input arrays.
@example
```
import arrayUnion = require('array-union');
arrayUnion([1, 1, 2, 3], [2, 3]);
//=> [1, 2, 3]
arrayUnion(['foo', 'foo', 'bar']);
//=> ['foo', 'bar']
arrayUnion(['🐱', '🦄', '🐻'], ['🦄', '🌈']);
//=> ['🐱', '🦄', '🐻', '🌈']
arrayUnion(['🐱', '🦄'], ['🐻', '🦄'], ['🐶', '🌈', '🌈']);
//=> ['🐱', '🦄', '🐻', '🐶', '🌈']
```
*/
declare function arrayUnion<ArgumentsType extends readonly unknown[]>(
...arguments: readonly ArgumentsType[]
): ArgumentsType;
export = arrayUnion;

3
node_modules/ava/node_modules/array-union/index.js generated vendored Normal file
View file

@ -0,0 +1,3 @@
const arrayUnion = (...arguments_) => [...new Set(arguments_.flat())];
export default arrayUnion;

View file

@ -1,6 +1,6 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:

46
node_modules/ava/node_modules/array-union/package.json generated vendored Normal file
View file

@ -0,0 +1,46 @@
{
"name": "array-union",
"version": "3.0.1",
"description": "Create an array of unique values, in order, from the input arrays",
"license": "MIT",
"repository": "sindresorhus/array-union",
"funding": "https://github.com/sponsors/sindresorhus",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"type": "module",
"exports": "./index.js",
"engines": {
"node": ">=12"
},
"scripts": {
"test": "xo && ava && tsd"
},
"files": [
"index.js",
"index.d.ts"
],
"keywords": [
"array",
"set",
"uniq",
"unique",
"duplicate",
"remove",
"union",
"combine",
"merge"
],
"devDependencies": {
"ava": "^3.15.0",
"tsd": "^0.14.0",
"xo": "^0.38.2"
},
"tsd": {
"compilerOptions": {
"esModuleInterop": true
}
}
}

39
node_modules/ava/node_modules/array-union/readme.md generated vendored Normal file
View file

@ -0,0 +1,39 @@
# array-union
> Create an array of unique values, in order, from the input arrays
## Install
```
$ npm install array-union
```
## Usage
```js
import arrayUnion from 'array-union';
arrayUnion([1, 1, 2, 3], [2, 3]);
//=> [1, 2, 3]
arrayUnion(['foo', 'foo', 'bar']);
//=> ['foo', 'bar']
arrayUnion(['🐱', '🦄', '🐻'], ['🦄', '🌈']);
//=> ['🐱', '🦄', '🐻', '🌈']
arrayUnion(['🐱', '🦄'], ['🐻', '🦄'], ['🐶', '🌈', '🌈']);
//=> ['🐱', '🦄', '🐻', '🐶', '🌈']
```
---
<div align="center">
<b>
<a href="https://tidelift.com/subscription/pkg/npm-array-union?utm_source=npm-array-union&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
</b>
<br>
<sub>
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
</sub>
</div>

36
node_modules/ava/node_modules/arrify/index.d.ts generated vendored Normal file
View file

@ -0,0 +1,36 @@
/**
Convert a value to an array.
_Specifying `null` or `undefined` results in an empty array._
@example
```
import arrify from 'arrify';
arrify('🦄');
//=> ['🦄']
arrify(['🦄']);
//=> ['🦄']
arrify(new Set(['🦄']));
//=> ['🦄']
arrify(null);
//=> []
arrify(undefined);
//=> []
```
*/
export default function arrify<ValueType>(
value: ValueType
): ValueType extends (null | undefined)
? [] // eslint-disable-line @typescript-eslint/ban-types
: ValueType extends string
? [string]
: ValueType extends readonly unknown[]
? ValueType
: ValueType extends Iterable<infer T>
? T[]
: [ValueType];

19
node_modules/ava/node_modules/arrify/index.js generated vendored Normal file
View file

@ -0,0 +1,19 @@
export default function arrify(value) {
if (value === null || value === undefined) {
return [];
}
if (Array.isArray(value)) {
return value;
}
if (typeof value === 'string') {
return [value];
}
if (typeof value[Symbol.iterator] === 'function') {
return [...value];
}
return [value];
}

View file

@ -1,8 +1,9 @@
The MIT License (MIT)
Copyright (c) 2015 Dmitry Ivanov
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

38
node_modules/ava/node_modules/arrify/package.json generated vendored Normal file
View file

@ -0,0 +1,38 @@
{
"name": "arrify",
"version": "3.0.0",
"description": "Convert a value to an array",
"license": "MIT",
"repository": "sindresorhus/arrify",
"funding": "https://github.com/sponsors/sindresorhus",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"type": "module",
"exports": "./index.js",
"engines": {
"node": ">=12"
},
"scripts": {
"test": "xo && ava && tsd"
},
"files": [
"index.js",
"index.d.ts"
],
"keywords": [
"array",
"arrify",
"arrayify",
"convert",
"value",
"ensure"
],
"devDependencies": {
"ava": "^3.15.0",
"tsd": "^0.14.0",
"xo": "^0.39.1"
}
}

44
node_modules/ava/node_modules/arrify/readme.md generated vendored Normal file
View file

@ -0,0 +1,44 @@
# arrify
> Convert a value to an array
## Install
```
$ npm install arrify
```
## Usage
```js
import arrify from 'arrify';
arrify('🦄');
//=> ['🦄']
arrify(['🦄']);
//=> ['🦄']
arrify(new Set(['🦄']));
//=> ['🦄']
arrify(null);
//=> []
arrify(undefined);
//=> []
```
*Specifying `null` or `undefined` results in an empty array.*
---
<div align="center">
<b>
<a href="https://tidelift.com/subscription/pkg/npm-arrify?utm_source=npm-arrify&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
</b>
<br>
<sub>
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
</sub>
</div>

87
node_modules/ava/node_modules/callsites/index.d.ts generated vendored Normal file
View file

@ -0,0 +1,87 @@
type AnyFunction = (...args: any[]) => any;
export interface CallSite {
/**
Returns the value of `this`.
*/
getThis(): unknown | undefined;
/**
Returns the type of `this` as a string. This is the name of the function stored in the constructor field of `this`, if available, otherwise the object's `[[Class]]` internal property.
*/
getTypeName(): string | null;
/**
Returns the current function.
*/
getFunction(): AnyFunction | undefined;
/**
Returns the name of the current function, typically its `name` property. If a name property is not available an attempt will be made to try to infer a name from the function's context.
*/
getFunctionName(): string | null;
/**
Returns the name of the property of `this` or one of its prototypes that holds the current function.
*/
getMethodName(): string | undefined;
/**
Returns the name of the script if this function was defined in a script.
*/
getFileName(): string | null;
/**
Returns the current line number if this function was defined in a script.
*/
getLineNumber(): number | null;
/**
Returns the current column number if this function was defined in a script.
*/
getColumnNumber(): number | null;
/**
Returns a string representing the location where `eval` was called if this function was created using a call to `eval`.
*/
getEvalOrigin(): string | undefined;
/**
Returns `true` if this is a top-level invocation, that is, if it's a global object.
*/
isToplevel(): boolean;
/**
Returns `true` if this call takes place in code defined by a call to `eval`.
*/
isEval(): boolean;
/**
Returns `true` if this call is in native V8 code.
*/
isNative(): boolean;
/**
Returns `true` if this is a constructor call.
*/
isConstructor(): boolean;
}
/**
Get callsites from the V8 stack trace API.
@returns An array of `CallSite` objects.
@example
```
import callsites from 'callsites';
function unicorn() {
console.log(callsites()[0].getFileName());
//=> '/Users/sindresorhus/dev/callsites/test.js'
}
unicorn();
```
*/
export default function callsites(): CallSite[];

7
node_modules/ava/node_modules/callsites/index.js generated vendored Normal file
View file

@ -0,0 +1,7 @@
export default function callsites() {
const _prepareStackTrace = Error.prepareStackTrace;
Error.prepareStackTrace = (_, stack) => stack;
const stack = new Error().stack.slice(1); // eslint-disable-line unicorn/error-message
Error.prepareStackTrace = _prepareStackTrace;
return stack;
}

9
node_modules/ava/node_modules/callsites/license generated vendored Normal file
View file

@ -0,0 +1,9 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

42
node_modules/ava/node_modules/callsites/package.json generated vendored Normal file
View file

@ -0,0 +1,42 @@
{
"name": "callsites",
"version": "4.0.0",
"description": "Get callsites from the V8 stack trace API",
"license": "MIT",
"repository": "sindresorhus/callsites",
"funding": "https://github.com/sponsors/sindresorhus",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"type": "module",
"exports": "./index.js",
"engines": {
"node": ">=12.20"
},
"scripts": {
"test": "xo && ava && tsd"
},
"files": [
"index.js",
"index.d.ts"
],
"keywords": [
"stacktrace",
"v8",
"callsite",
"callsites",
"stack",
"trace",
"function",
"file",
"line",
"debug"
],
"devDependencies": {
"ava": "^3.15.0",
"tsd": "^0.17.0",
"xo": "^0.44.0"
}
}

52
node_modules/ava/node_modules/callsites/readme.md generated vendored Normal file
View file

@ -0,0 +1,52 @@
# callsites
> Get callsites from the [V8 stack trace API](https://v8.dev/docs/stack-trace-api)
## Install
```
$ npm install callsites
```
## Usage
```js
import callsites from 'callsites';
function unicorn() {
console.log(callsites()[0].getFileName());
//=> '/Users/sindresorhus/dev/callsites/test.js'
}
unicorn();
```
## API
Returns an array of callsite objects with the following methods:
- `getThis`: Returns the value of `this`.
- `getTypeName`: Returns the type of `this` as a string. This is the name of the function stored in the constructor field of `this`, if available, otherwise the object's `[[Class]]` internal property.
- `getFunction`: Returns the current function.
- `getFunctionName`: Returns the name of the current function, typically its `name` property. If a name property is not available an attempt will be made to try to infer a name from the function's context.
- `getMethodName`: Returns the name of the property of `this` or one of its prototypes that holds the current function.
- `getFileName`: If this function was defined in a script returns the name of the script.
- `getLineNumber`: If this function was defined in a script returns the current line number.
- `getColumnNumber`: If this function was defined in a script returns the current column number
- `getEvalOrigin`: If this function was created using a call to `eval` returns a string representing the location where `eval` was called.
- `isToplevel`: Is this a top-level invocation, that is, is this the global object?
- `isEval`: Does this call take place in code defined by a call to `eval`?
- `isNative`: Is this call in native V8 code?
- `isConstructor`: Is this a constructor call?
---
<div align="center">
<b>
<a href="https://tidelift.com/subscription/pkg/npm-callsites?utm_source=npm-callsites&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
</b>
<br>
<sub>
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
</sub>
</div>

View file

@ -1,415 +0,0 @@
/**
Basic foreground colors.
[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support)
*/
declare type ForegroundColor =
| 'black'
| 'red'
| 'green'
| 'yellow'
| 'blue'
| 'magenta'
| 'cyan'
| 'white'
| 'gray'
| 'grey'
| 'blackBright'
| 'redBright'
| 'greenBright'
| 'yellowBright'
| 'blueBright'
| 'magentaBright'
| 'cyanBright'
| 'whiteBright';
/**
Basic background colors.
[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support)
*/
declare type BackgroundColor =
| 'bgBlack'
| 'bgRed'
| 'bgGreen'
| 'bgYellow'
| 'bgBlue'
| 'bgMagenta'
| 'bgCyan'
| 'bgWhite'
| 'bgGray'
| 'bgGrey'
| 'bgBlackBright'
| 'bgRedBright'
| 'bgGreenBright'
| 'bgYellowBright'
| 'bgBlueBright'
| 'bgMagentaBright'
| 'bgCyanBright'
| 'bgWhiteBright';
/**
Basic colors.
[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support)
*/
declare type Color = ForegroundColor | BackgroundColor;
declare type Modifiers =
| 'reset'
| 'bold'
| 'dim'
| 'italic'
| 'underline'
| 'inverse'
| 'hidden'
| 'strikethrough'
| 'visible';
declare namespace chalk {
/**
Levels:
- `0` - All colors disabled.
- `1` - Basic 16 colors support.
- `2` - ANSI 256 colors support.
- `3` - Truecolor 16 million colors support.
*/
type Level = 0 | 1 | 2 | 3;
interface Options {
/**
Specify the color support for Chalk.
By default, color support is automatically detected based on the environment.
Levels:
- `0` - All colors disabled.
- `1` - Basic 16 colors support.
- `2` - ANSI 256 colors support.
- `3` - Truecolor 16 million colors support.
*/
level?: Level;
}
/**
Return a new Chalk instance.
*/
type Instance = new (options?: Options) => Chalk;
/**
Detect whether the terminal supports color.
*/
interface ColorSupport {
/**
The color level used by Chalk.
*/
level: Level;
/**
Return whether Chalk supports basic 16 colors.
*/
hasBasic: boolean;
/**
Return whether Chalk supports ANSI 256 colors.
*/
has256: boolean;
/**
Return whether Chalk supports Truecolor 16 million colors.
*/
has16m: boolean;
}
interface ChalkFunction {
/**
Use a template string.
@remarks Template literals are unsupported for nested calls (see [issue #341](https://github.com/chalk/chalk/issues/341))
@example
```
import chalk = require('chalk');
log(chalk`
CPU: {red ${cpu.totalPercent}%}
RAM: {green ${ram.used / ram.total * 100}%}
DISK: {rgb(255,131,0) ${disk.used / disk.total * 100}%}
`);
```
@example
```
import chalk = require('chalk');
log(chalk.red.bgBlack`2 + 3 = {bold ${2 + 3}}`)
```
*/
(text: TemplateStringsArray, ...placeholders: unknown[]): string;
(...text: unknown[]): string;
}
interface Chalk extends ChalkFunction {
/**
Return a new Chalk instance.
*/
Instance: Instance;
/**
The color support for Chalk.
By default, color support is automatically detected based on the environment.
Levels:
- `0` - All colors disabled.
- `1` - Basic 16 colors support.
- `2` - ANSI 256 colors support.
- `3` - Truecolor 16 million colors support.
*/
level: Level;
/**
Use HEX value to set text color.
@param color - Hexadecimal value representing the desired color.
@example
```
import chalk = require('chalk');
chalk.hex('#DEADED');
```
*/
hex(color: string): Chalk;
/**
Use keyword color value to set text color.
@param color - Keyword value representing the desired color.
@example
```
import chalk = require('chalk');
chalk.keyword('orange');
```
*/
keyword(color: string): Chalk;
/**
Use RGB values to set text color.
*/
rgb(red: number, green: number, blue: number): Chalk;
/**
Use HSL values to set text color.
*/
hsl(hue: number, saturation: number, lightness: number): Chalk;
/**
Use HSV values to set text color.
*/
hsv(hue: number, saturation: number, value: number): Chalk;
/**
Use HWB values to set text color.
*/
hwb(hue: number, whiteness: number, blackness: number): Chalk;
/**
Use a [Select/Set Graphic Rendition](https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters) (SGR) [color code number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4_bit) to set text color.
30 <= code && code < 38 || 90 <= code && code < 98
For example, 31 for red, 91 for redBright.
*/
ansi(code: number): Chalk;
/**
Use a [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set text color.
*/
ansi256(index: number): Chalk;
/**
Use HEX value to set background color.
@param color - Hexadecimal value representing the desired color.
@example
```
import chalk = require('chalk');
chalk.bgHex('#DEADED');
```
*/
bgHex(color: string): Chalk;
/**
Use keyword color value to set background color.
@param color - Keyword value representing the desired color.
@example
```
import chalk = require('chalk');
chalk.bgKeyword('orange');
```
*/
bgKeyword(color: string): Chalk;
/**
Use RGB values to set background color.
*/
bgRgb(red: number, green: number, blue: number): Chalk;
/**
Use HSL values to set background color.
*/
bgHsl(hue: number, saturation: number, lightness: number): Chalk;
/**
Use HSV values to set background color.
*/
bgHsv(hue: number, saturation: number, value: number): Chalk;
/**
Use HWB values to set background color.
*/
bgHwb(hue: number, whiteness: number, blackness: number): Chalk;
/**
Use a [Select/Set Graphic Rendition](https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters) (SGR) [color code number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4_bit) to set background color.
30 <= code && code < 38 || 90 <= code && code < 98
For example, 31 for red, 91 for redBright.
Use the foreground code, not the background code (for example, not 41, nor 101).
*/
bgAnsi(code: number): Chalk;
/**
Use a [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set background color.
*/
bgAnsi256(index: number): Chalk;
/**
Modifier: Resets the current color chain.
*/
readonly reset: Chalk;
/**
Modifier: Make text bold.
*/
readonly bold: Chalk;
/**
Modifier: Emitting only a small amount of light.
*/
readonly dim: Chalk;
/**
Modifier: Make text italic. (Not widely supported)
*/
readonly italic: Chalk;
/**
Modifier: Make text underline. (Not widely supported)
*/
readonly underline: Chalk;
/**
Modifier: Inverse background and foreground colors.
*/
readonly inverse: Chalk;
/**
Modifier: Prints the text, but makes it invisible.
*/
readonly hidden: Chalk;
/**
Modifier: Puts a horizontal line through the center of the text. (Not widely supported)
*/
readonly strikethrough: Chalk;
/**
Modifier: Prints the text only when Chalk has a color support level > 0.
Can be useful for things that are purely cosmetic.
*/
readonly visible: Chalk;
readonly black: Chalk;
readonly red: Chalk;
readonly green: Chalk;
readonly yellow: Chalk;
readonly blue: Chalk;
readonly magenta: Chalk;
readonly cyan: Chalk;
readonly white: Chalk;
/*
Alias for `blackBright`.
*/
readonly gray: Chalk;
/*
Alias for `blackBright`.
*/
readonly grey: Chalk;
readonly blackBright: Chalk;
readonly redBright: Chalk;
readonly greenBright: Chalk;
readonly yellowBright: Chalk;
readonly blueBright: Chalk;
readonly magentaBright: Chalk;
readonly cyanBright: Chalk;
readonly whiteBright: Chalk;
readonly bgBlack: Chalk;
readonly bgRed: Chalk;
readonly bgGreen: Chalk;
readonly bgYellow: Chalk;
readonly bgBlue: Chalk;
readonly bgMagenta: Chalk;
readonly bgCyan: Chalk;
readonly bgWhite: Chalk;
/*
Alias for `bgBlackBright`.
*/
readonly bgGray: Chalk;
/*
Alias for `bgBlackBright`.
*/
readonly bgGrey: Chalk;
readonly bgBlackBright: Chalk;
readonly bgRedBright: Chalk;
readonly bgGreenBright: Chalk;
readonly bgYellowBright: Chalk;
readonly bgBlueBright: Chalk;
readonly bgMagentaBright: Chalk;
readonly bgCyanBright: Chalk;
readonly bgWhiteBright: Chalk;
}
}
/**
Main Chalk object that allows to chain styles together.
Call the last one as a method with a string argument.
Order doesn't matter, and later styles take precedent in case of a conflict.
This simply means that `chalk.red.yellow.green` is equivalent to `chalk.green`.
*/
declare const chalk: chalk.Chalk & chalk.ChalkFunction & {
supportsColor: chalk.ColorSupport | false;
Level: chalk.Level;
Color: Color;
ForegroundColor: ForegroundColor;
BackgroundColor: BackgroundColor;
Modifiers: Modifiers;
stderr: chalk.Chalk & {supportsColor: chalk.ColorSupport | false};
};
export = chalk;

Some files were not shown because too many files have changed in this diff Show more