Fix dependabot issues
This commit is contained in:
parent
c89d9bd8b0
commit
531c6ba7c8
705 changed files with 53406 additions and 20466 deletions
2
.github/workflows/update-dependencies.yml
vendored
2
.github/workflows/update-dependencies.yml
vendored
|
|
@ -11,6 +11,8 @@ jobs:
|
|||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Remove PR label
|
||||
env:
|
||||
|
|
|
|||
1972
node_modules/.package-lock.json
generated
vendored
1972
node_modules/.package-lock.json
generated
vendored
File diff suppressed because it is too large
Load diff
125
node_modules/@octokit/endpoint/node_modules/is-plain-object/README.md
generated
vendored
125
node_modules/@octokit/endpoint/node_modules/is-plain-object/README.md
generated
vendored
|
|
@ -1,125 +0,0 @@
|
|||
# is-plain-object [](https://www.npmjs.com/package/is-plain-object) [](https://npmjs.org/package/is-plain-object) [](https://npmjs.org/package/is-plain-object) [](https://travis-ci.org/jonschlinkert/is-plain-object)
|
||||
|
||||
> Returns true if an object was created by the `Object` constructor, or Object.create(null).
|
||||
|
||||
Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support.
|
||||
|
||||
## Install
|
||||
|
||||
Install with [npm](https://www.npmjs.com/):
|
||||
|
||||
```sh
|
||||
$ npm install --save is-plain-object
|
||||
```
|
||||
|
||||
Use [isobject](https://github.com/jonschlinkert/isobject) if you only want to check if the value is an object and not an array or null.
|
||||
|
||||
## Usage
|
||||
|
||||
with es modules
|
||||
```js
|
||||
import { isPlainObject } from 'is-plain-object';
|
||||
```
|
||||
|
||||
or with commonjs
|
||||
```js
|
||||
const { isPlainObject } = require('is-plain-object');
|
||||
```
|
||||
|
||||
**true** when created by the `Object` constructor, or Object.create(null).
|
||||
|
||||
```js
|
||||
isPlainObject(Object.create({}));
|
||||
//=> true
|
||||
isPlainObject(Object.create(Object.prototype));
|
||||
//=> true
|
||||
isPlainObject({foo: 'bar'});
|
||||
//=> true
|
||||
isPlainObject({});
|
||||
//=> true
|
||||
isPlainObject(null);
|
||||
//=> true
|
||||
```
|
||||
|
||||
**false** when not created by the `Object` constructor.
|
||||
|
||||
```js
|
||||
isPlainObject(1);
|
||||
//=> false
|
||||
isPlainObject(['foo', 'bar']);
|
||||
//=> false
|
||||
isPlainObject([]);
|
||||
//=> false
|
||||
isPlainObject(new Foo);
|
||||
//=> false
|
||||
isPlainObject(Object.create(null));
|
||||
//=> false
|
||||
```
|
||||
|
||||
## About
|
||||
|
||||
<details>
|
||||
<summary><strong>Contributing</strong></summary>
|
||||
|
||||
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Running Tests</strong></summary>
|
||||
|
||||
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command:
|
||||
|
||||
```sh
|
||||
$ npm install && npm test
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Building docs</strong></summary>
|
||||
|
||||
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_
|
||||
|
||||
To generate the readme, run the following command:
|
||||
|
||||
```sh
|
||||
$ npm install -g verbose/verb#dev verb-generate-readme && verb
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Related projects
|
||||
|
||||
You might also be interested in these projects:
|
||||
|
||||
* [is-number](https://www.npmjs.com/package/is-number): Returns true if a number or string value is a finite number. Useful for regex… [more](https://github.com/jonschlinkert/is-number) | [homepage](https://github.com/jonschlinkert/is-number "Returns true if a number or string value is a finite number. Useful for regex matches, parsing, user input, etc.")
|
||||
* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.")
|
||||
* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.")
|
||||
|
||||
### Contributors
|
||||
|
||||
| **Commits** | **Contributor** |
|
||||
| --- | --- |
|
||||
| 19 | [jonschlinkert](https://github.com/jonschlinkert) |
|
||||
| 6 | [TrySound](https://github.com/TrySound) |
|
||||
| 6 | [stevenvachon](https://github.com/stevenvachon) |
|
||||
| 3 | [onokumus](https://github.com/onokumus) |
|
||||
| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) |
|
||||
|
||||
### Author
|
||||
|
||||
**Jon Schlinkert**
|
||||
|
||||
* [GitHub Profile](https://github.com/jonschlinkert)
|
||||
* [Twitter Profile](https://twitter.com/jonschlinkert)
|
||||
* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert)
|
||||
|
||||
### License
|
||||
|
||||
Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert).
|
||||
Released under the [MIT License](LICENSE).
|
||||
|
||||
***
|
||||
|
||||
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 28, 2019._
|
||||
85
node_modules/@octokit/endpoint/node_modules/is-plain-object/package.json
generated
vendored
85
node_modules/@octokit/endpoint/node_modules/is-plain-object/package.json
generated
vendored
|
|
@ -1,85 +0,0 @@
|
|||
{
|
||||
"name": "is-plain-object",
|
||||
"description": "Returns true if an object was created by the `Object` constructor, or Object.create(null).",
|
||||
"version": "5.0.0",
|
||||
"homepage": "https://github.com/jonschlinkert/is-plain-object",
|
||||
"author": "Jon Schlinkert (https://github.com/jonschlinkert)",
|
||||
"contributors": [
|
||||
"Jon Schlinkert (http://twitter.com/jonschlinkert)",
|
||||
"Osman Nuri Okumuş (http://onokumus.com)",
|
||||
"Steven Vachon (https://svachon.com)",
|
||||
"(https://github.com/wtgtybhertgeghgtwtg)",
|
||||
"Bogdan Chadkin (https://github.com/TrySound)"
|
||||
],
|
||||
"repository": "jonschlinkert/is-plain-object",
|
||||
"bugs": {
|
||||
"url": "https://github.com/jonschlinkert/is-plain-object/issues"
|
||||
},
|
||||
"license": "MIT",
|
||||
"main": "dist/is-plain-object.js",
|
||||
"module": "dist/is-plain-object.mjs",
|
||||
"types": "is-plain-object.d.ts",
|
||||
"files": [
|
||||
"is-plain-object.d.ts",
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./dist/is-plain-object.mjs",
|
||||
"require": "./dist/is-plain-object.js"
|
||||
},
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "rollup -c",
|
||||
"test_browser": "mocha-headless-chrome --args=disable-web-security -f test/browser.html",
|
||||
"test_node": "mocha -r esm",
|
||||
"test": "npm run test_node && npm run build && npm run test_browser",
|
||||
"prepare": "rollup -c"
|
||||
},
|
||||
"devDependencies": {
|
||||
"chai": "^4.2.0",
|
||||
"esm": "^3.2.22",
|
||||
"gulp-format-md": "^1.0.0",
|
||||
"mocha": "^6.1.4",
|
||||
"mocha-headless-chrome": "^3.1.0",
|
||||
"rollup": "^2.22.1"
|
||||
},
|
||||
"keywords": [
|
||||
"check",
|
||||
"is",
|
||||
"is-object",
|
||||
"isobject",
|
||||
"javascript",
|
||||
"kind",
|
||||
"kind-of",
|
||||
"object",
|
||||
"plain",
|
||||
"type",
|
||||
"typeof",
|
||||
"value"
|
||||
],
|
||||
"verb": {
|
||||
"toc": false,
|
||||
"layout": "default",
|
||||
"tasks": [
|
||||
"readme"
|
||||
],
|
||||
"plugins": [
|
||||
"gulp-format-md"
|
||||
],
|
||||
"related": {
|
||||
"list": [
|
||||
"is-number",
|
||||
"isobject",
|
||||
"kind-of"
|
||||
]
|
||||
},
|
||||
"lint": {
|
||||
"reflinks": true
|
||||
}
|
||||
}
|
||||
}
|
||||
125
node_modules/@octokit/request/node_modules/is-plain-object/README.md
generated
vendored
125
node_modules/@octokit/request/node_modules/is-plain-object/README.md
generated
vendored
|
|
@ -1,125 +0,0 @@
|
|||
# is-plain-object [](https://www.npmjs.com/package/is-plain-object) [](https://npmjs.org/package/is-plain-object) [](https://npmjs.org/package/is-plain-object) [](https://travis-ci.org/jonschlinkert/is-plain-object)
|
||||
|
||||
> Returns true if an object was created by the `Object` constructor, or Object.create(null).
|
||||
|
||||
Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support.
|
||||
|
||||
## Install
|
||||
|
||||
Install with [npm](https://www.npmjs.com/):
|
||||
|
||||
```sh
|
||||
$ npm install --save is-plain-object
|
||||
```
|
||||
|
||||
Use [isobject](https://github.com/jonschlinkert/isobject) if you only want to check if the value is an object and not an array or null.
|
||||
|
||||
## Usage
|
||||
|
||||
with es modules
|
||||
```js
|
||||
import { isPlainObject } from 'is-plain-object';
|
||||
```
|
||||
|
||||
or with commonjs
|
||||
```js
|
||||
const { isPlainObject } = require('is-plain-object');
|
||||
```
|
||||
|
||||
**true** when created by the `Object` constructor, or Object.create(null).
|
||||
|
||||
```js
|
||||
isPlainObject(Object.create({}));
|
||||
//=> true
|
||||
isPlainObject(Object.create(Object.prototype));
|
||||
//=> true
|
||||
isPlainObject({foo: 'bar'});
|
||||
//=> true
|
||||
isPlainObject({});
|
||||
//=> true
|
||||
isPlainObject(null);
|
||||
//=> true
|
||||
```
|
||||
|
||||
**false** when not created by the `Object` constructor.
|
||||
|
||||
```js
|
||||
isPlainObject(1);
|
||||
//=> false
|
||||
isPlainObject(['foo', 'bar']);
|
||||
//=> false
|
||||
isPlainObject([]);
|
||||
//=> false
|
||||
isPlainObject(new Foo);
|
||||
//=> false
|
||||
isPlainObject(Object.create(null));
|
||||
//=> false
|
||||
```
|
||||
|
||||
## About
|
||||
|
||||
<details>
|
||||
<summary><strong>Contributing</strong></summary>
|
||||
|
||||
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Running Tests</strong></summary>
|
||||
|
||||
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command:
|
||||
|
||||
```sh
|
||||
$ npm install && npm test
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Building docs</strong></summary>
|
||||
|
||||
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_
|
||||
|
||||
To generate the readme, run the following command:
|
||||
|
||||
```sh
|
||||
$ npm install -g verbose/verb#dev verb-generate-readme && verb
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Related projects
|
||||
|
||||
You might also be interested in these projects:
|
||||
|
||||
* [is-number](https://www.npmjs.com/package/is-number): Returns true if a number or string value is a finite number. Useful for regex… [more](https://github.com/jonschlinkert/is-number) | [homepage](https://github.com/jonschlinkert/is-number "Returns true if a number or string value is a finite number. Useful for regex matches, parsing, user input, etc.")
|
||||
* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.")
|
||||
* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.")
|
||||
|
||||
### Contributors
|
||||
|
||||
| **Commits** | **Contributor** |
|
||||
| --- | --- |
|
||||
| 19 | [jonschlinkert](https://github.com/jonschlinkert) |
|
||||
| 6 | [TrySound](https://github.com/TrySound) |
|
||||
| 6 | [stevenvachon](https://github.com/stevenvachon) |
|
||||
| 3 | [onokumus](https://github.com/onokumus) |
|
||||
| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) |
|
||||
|
||||
### Author
|
||||
|
||||
**Jon Schlinkert**
|
||||
|
||||
* [GitHub Profile](https://github.com/jonschlinkert)
|
||||
* [Twitter Profile](https://twitter.com/jonschlinkert)
|
||||
* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert)
|
||||
|
||||
### License
|
||||
|
||||
Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert).
|
||||
Released under the [MIT License](LICENSE).
|
||||
|
||||
***
|
||||
|
||||
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 28, 2019._
|
||||
38
node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.js
generated
vendored
38
node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.js
generated
vendored
|
|
@ -1,38 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
/*!
|
||||
* is-plain-object <https://github.com/jonschlinkert/is-plain-object>
|
||||
*
|
||||
* Copyright (c) 2014-2017, Jon Schlinkert.
|
||||
* Released under the MIT License.
|
||||
*/
|
||||
|
||||
function isObject(o) {
|
||||
return Object.prototype.toString.call(o) === '[object Object]';
|
||||
}
|
||||
|
||||
function isPlainObject(o) {
|
||||
var ctor,prot;
|
||||
|
||||
if (isObject(o) === false) return false;
|
||||
|
||||
// If has modified constructor
|
||||
ctor = o.constructor;
|
||||
if (ctor === undefined) return true;
|
||||
|
||||
// If has modified prototype
|
||||
prot = ctor.prototype;
|
||||
if (isObject(prot) === false) return false;
|
||||
|
||||
// If constructor does not have an Object-specific method
|
||||
if (prot.hasOwnProperty('isPrototypeOf') === false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Most likely a plain Object
|
||||
return true;
|
||||
}
|
||||
|
||||
exports.isPlainObject = isPlainObject;
|
||||
34
node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.mjs
generated
vendored
34
node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.mjs
generated
vendored
|
|
@ -1,34 +0,0 @@
|
|||
/*!
|
||||
* is-plain-object <https://github.com/jonschlinkert/is-plain-object>
|
||||
*
|
||||
* Copyright (c) 2014-2017, Jon Schlinkert.
|
||||
* Released under the MIT License.
|
||||
*/
|
||||
|
||||
function isObject(o) {
|
||||
return Object.prototype.toString.call(o) === '[object Object]';
|
||||
}
|
||||
|
||||
function isPlainObject(o) {
|
||||
var ctor,prot;
|
||||
|
||||
if (isObject(o) === false) return false;
|
||||
|
||||
// If has modified constructor
|
||||
ctor = o.constructor;
|
||||
if (ctor === undefined) return true;
|
||||
|
||||
// If has modified prototype
|
||||
prot = ctor.prototype;
|
||||
if (isObject(prot) === false) return false;
|
||||
|
||||
// If constructor does not have an Object-specific method
|
||||
if (prot.hasOwnProperty('isPrototypeOf') === false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Most likely a plain Object
|
||||
return true;
|
||||
}
|
||||
|
||||
export { isPlainObject };
|
||||
1
node_modules/@octokit/request/node_modules/is-plain-object/is-plain-object.d.ts
generated
vendored
1
node_modules/@octokit/request/node_modules/is-plain-object/is-plain-object.d.ts
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
export function isPlainObject(o: any): boolean;
|
||||
85
node_modules/@octokit/request/node_modules/is-plain-object/package.json
generated
vendored
85
node_modules/@octokit/request/node_modules/is-plain-object/package.json
generated
vendored
|
|
@ -1,85 +0,0 @@
|
|||
{
|
||||
"name": "is-plain-object",
|
||||
"description": "Returns true if an object was created by the `Object` constructor, or Object.create(null).",
|
||||
"version": "5.0.0",
|
||||
"homepage": "https://github.com/jonschlinkert/is-plain-object",
|
||||
"author": "Jon Schlinkert (https://github.com/jonschlinkert)",
|
||||
"contributors": [
|
||||
"Jon Schlinkert (http://twitter.com/jonschlinkert)",
|
||||
"Osman Nuri Okumuş (http://onokumus.com)",
|
||||
"Steven Vachon (https://svachon.com)",
|
||||
"(https://github.com/wtgtybhertgeghgtwtg)",
|
||||
"Bogdan Chadkin (https://github.com/TrySound)"
|
||||
],
|
||||
"repository": "jonschlinkert/is-plain-object",
|
||||
"bugs": {
|
||||
"url": "https://github.com/jonschlinkert/is-plain-object/issues"
|
||||
},
|
||||
"license": "MIT",
|
||||
"main": "dist/is-plain-object.js",
|
||||
"module": "dist/is-plain-object.mjs",
|
||||
"types": "is-plain-object.d.ts",
|
||||
"files": [
|
||||
"is-plain-object.d.ts",
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./dist/is-plain-object.mjs",
|
||||
"require": "./dist/is-plain-object.js"
|
||||
},
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "rollup -c",
|
||||
"test_browser": "mocha-headless-chrome --args=disable-web-security -f test/browser.html",
|
||||
"test_node": "mocha -r esm",
|
||||
"test": "npm run test_node && npm run build && npm run test_browser",
|
||||
"prepare": "rollup -c"
|
||||
},
|
||||
"devDependencies": {
|
||||
"chai": "^4.2.0",
|
||||
"esm": "^3.2.22",
|
||||
"gulp-format-md": "^1.0.0",
|
||||
"mocha": "^6.1.4",
|
||||
"mocha-headless-chrome": "^3.1.0",
|
||||
"rollup": "^2.22.1"
|
||||
},
|
||||
"keywords": [
|
||||
"check",
|
||||
"is",
|
||||
"is-object",
|
||||
"isobject",
|
||||
"javascript",
|
||||
"kind",
|
||||
"kind-of",
|
||||
"object",
|
||||
"plain",
|
||||
"type",
|
||||
"typeof",
|
||||
"value"
|
||||
],
|
||||
"verb": {
|
||||
"toc": false,
|
||||
"layout": "default",
|
||||
"tasks": [
|
||||
"readme"
|
||||
],
|
||||
"plugins": [
|
||||
"gulp-format-md"
|
||||
],
|
||||
"related": {
|
||||
"list": [
|
||||
"is-number",
|
||||
"isobject",
|
||||
"kind-of"
|
||||
]
|
||||
},
|
||||
"lint": {
|
||||
"reflinks": true
|
||||
}
|
||||
}
|
||||
}
|
||||
21
node_modules/@types/events/LICENSE
generated
vendored
21
node_modules/@types/events/LICENSE
generated
vendored
|
|
@ -1,21 +0,0 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
16
node_modules/@types/events/README.md
generated
vendored
16
node_modules/@types/events/README.md
generated
vendored
|
|
@ -1,16 +0,0 @@
|
|||
# Installation
|
||||
> `npm install --save @types/events`
|
||||
|
||||
# Summary
|
||||
This package contains type definitions for events (https://github.com/Gozala/events).
|
||||
|
||||
# Details
|
||||
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/events
|
||||
|
||||
Additional Details
|
||||
* Last updated: Thu, 24 Jan 2019 03:19:08 GMT
|
||||
* Dependencies: none
|
||||
* Global values: none
|
||||
|
||||
# Credits
|
||||
These definitions were written by Yasunori Ohoka <https://github.com/yasupeke>, Shenwei Wang <https://github.com/weareoutman>.
|
||||
28
node_modules/@types/events/index.d.ts
generated
vendored
28
node_modules/@types/events/index.d.ts
generated
vendored
|
|
@ -1,28 +0,0 @@
|
|||
// Type definitions for events 3.0
|
||||
// Project: https://github.com/Gozala/events
|
||||
// Definitions by: Yasunori Ohoka <https://github.com/yasupeke>
|
||||
// Shenwei Wang <https://github.com/weareoutman>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
export type Listener = (...args: any[]) => void;
|
||||
|
||||
export class EventEmitter {
|
||||
static listenerCount(emitter: EventEmitter, type: string | number): number;
|
||||
static defaultMaxListeners: number;
|
||||
|
||||
eventNames(): Array<string | number>;
|
||||
setMaxListeners(n: number): this;
|
||||
getMaxListeners(): number;
|
||||
emit(type: string | number, ...args: any[]): boolean;
|
||||
addListener(type: string | number, listener: Listener): this;
|
||||
on(type: string | number, listener: Listener): this;
|
||||
once(type: string | number, listener: Listener): this;
|
||||
prependListener(type: string | number, listener: Listener): this;
|
||||
prependOnceListener(type: string | number, listener: Listener): this;
|
||||
removeListener(type: string | number, listener: Listener): this;
|
||||
off(type: string | number, listener: Listener): this;
|
||||
removeAllListeners(type?: string | number): this;
|
||||
listeners(type: string | number): Listener[];
|
||||
listenerCount(type: string | number): number;
|
||||
rawListeners(type: string | number): Listener[];
|
||||
}
|
||||
28
node_modules/@types/events/package.json
generated
vendored
28
node_modules/@types/events/package.json
generated
vendored
|
|
@ -1,28 +0,0 @@
|
|||
{
|
||||
"name": "@types/events",
|
||||
"version": "3.0.0",
|
||||
"description": "TypeScript definitions for events",
|
||||
"license": "MIT",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Yasunori Ohoka",
|
||||
"url": "https://github.com/yasupeke",
|
||||
"githubUsername": "yasupeke"
|
||||
},
|
||||
{
|
||||
"name": "Shenwei Wang",
|
||||
"url": "https://github.com/weareoutman",
|
||||
"githubUsername": "weareoutman"
|
||||
}
|
||||
],
|
||||
"main": "",
|
||||
"types": "index",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git"
|
||||
},
|
||||
"scripts": {},
|
||||
"dependencies": {},
|
||||
"typesPublisherContentHash": "ae078136220837864b64cc7c1c5267ca1ceb809166fb74569e637bc7de9f2e12",
|
||||
"typeScriptVersion": "2.0"
|
||||
}
|
||||
21
node_modules/@types/glob/LICENSE
generated
vendored
21
node_modules/@types/glob/LICENSE
generated
vendored
|
|
@ -1,21 +0,0 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
16
node_modules/@types/glob/README.md
generated
vendored
16
node_modules/@types/glob/README.md
generated
vendored
|
|
@ -1,16 +0,0 @@
|
|||
# Installation
|
||||
> `npm install --save @types/glob`
|
||||
|
||||
# Summary
|
||||
This package contains type definitions for Glob (https://github.com/isaacs/node-glob).
|
||||
|
||||
# Details
|
||||
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/glob
|
||||
|
||||
Additional Details
|
||||
* Last updated: Thu, 27 Sep 2018 12:34:19 GMT
|
||||
* Dependencies: events, minimatch, node
|
||||
* Global values: none
|
||||
|
||||
# Credits
|
||||
These definitions were written by vvakame <https://github.com/vvakame>, voy <https://github.com/voy>, Klaus Meinhardt <https://github.com/ajafff>.
|
||||
87
node_modules/@types/glob/index.d.ts
generated
vendored
87
node_modules/@types/glob/index.d.ts
generated
vendored
|
|
@ -1,87 +0,0 @@
|
|||
// Type definitions for Glob 7.1
|
||||
// Project: https://github.com/isaacs/node-glob
|
||||
// Definitions by: vvakame <https://github.com/vvakame>
|
||||
// voy <https://github.com/voy>
|
||||
// Klaus Meinhardt <https://github.com/ajafff>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
/// <reference types="node" />
|
||||
|
||||
import events = require("events");
|
||||
import minimatch = require("minimatch");
|
||||
|
||||
declare function G(pattern: string, cb: (err: Error | null, matches: string[]) => void): void;
|
||||
declare function G(pattern: string, options: G.IOptions, cb: (err: Error | null, matches: string[]) => void): void;
|
||||
|
||||
declare namespace G {
|
||||
function __promisify__(pattern: string, options?: IOptions): Promise<string[]>;
|
||||
|
||||
function sync(pattern: string, options?: IOptions): string[];
|
||||
|
||||
function hasMagic(pattern: string, options?: IOptions): boolean;
|
||||
|
||||
let Glob: IGlobStatic;
|
||||
let GlobSync: IGlobSyncStatic;
|
||||
|
||||
interface IOptions extends minimatch.IOptions {
|
||||
cwd?: string;
|
||||
root?: string;
|
||||
dot?: boolean;
|
||||
nomount?: boolean;
|
||||
mark?: boolean;
|
||||
nosort?: boolean;
|
||||
stat?: boolean;
|
||||
silent?: boolean;
|
||||
strict?: boolean;
|
||||
cache?: { [path: string]: boolean | 'DIR' | 'FILE' | ReadonlyArray<string> };
|
||||
statCache?: { [path: string]: false | { isDirectory(): boolean} | undefined };
|
||||
symlinks?: { [path: string]: boolean | undefined };
|
||||
realpathCache?: { [path: string]: string };
|
||||
sync?: boolean;
|
||||
nounique?: boolean;
|
||||
nonull?: boolean;
|
||||
debug?: boolean;
|
||||
nobrace?: boolean;
|
||||
noglobstar?: boolean;
|
||||
noext?: boolean;
|
||||
nocase?: boolean;
|
||||
matchBase?: any;
|
||||
nodir?: boolean;
|
||||
ignore?: string | ReadonlyArray<string>;
|
||||
follow?: boolean;
|
||||
realpath?: boolean;
|
||||
nonegate?: boolean;
|
||||
nocomment?: boolean;
|
||||
absolute?: boolean;
|
||||
}
|
||||
|
||||
interface IGlobStatic extends events.EventEmitter {
|
||||
new (pattern: string, cb?: (err: Error | null, matches: string[]) => void): IGlob;
|
||||
new (pattern: string, options: IOptions, cb?: (err: Error | null, matches: string[]) => void): IGlob;
|
||||
prototype: IGlob;
|
||||
}
|
||||
|
||||
interface IGlobSyncStatic {
|
||||
new (pattern: string, options?: IOptions): IGlobBase;
|
||||
prototype: IGlobBase;
|
||||
}
|
||||
|
||||
interface IGlobBase {
|
||||
minimatch: minimatch.IMinimatch;
|
||||
options: IOptions;
|
||||
aborted: boolean;
|
||||
cache: { [path: string]: boolean | 'DIR' | 'FILE' | ReadonlyArray<string> };
|
||||
statCache: { [path: string]: false | { isDirectory(): boolean; } | undefined };
|
||||
symlinks: { [path: string]: boolean | undefined };
|
||||
realpathCache: { [path: string]: string };
|
||||
found: string[];
|
||||
}
|
||||
|
||||
interface IGlob extends IGlobBase, events.EventEmitter {
|
||||
pause(): void;
|
||||
resume(): void;
|
||||
abort(): void;
|
||||
}
|
||||
}
|
||||
|
||||
export = G;
|
||||
36
node_modules/@types/glob/package.json
generated
vendored
36
node_modules/@types/glob/package.json
generated
vendored
|
|
@ -1,36 +0,0 @@
|
|||
{
|
||||
"name": "@types/glob",
|
||||
"version": "7.1.1",
|
||||
"description": "TypeScript definitions for Glob",
|
||||
"license": "MIT",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "vvakame",
|
||||
"url": "https://github.com/vvakame",
|
||||
"githubUsername": "vvakame"
|
||||
},
|
||||
{
|
||||
"name": "voy",
|
||||
"url": "https://github.com/voy",
|
||||
"githubUsername": "voy"
|
||||
},
|
||||
{
|
||||
"name": "Klaus Meinhardt",
|
||||
"url": "https://github.com/ajafff",
|
||||
"githubUsername": "ajafff"
|
||||
}
|
||||
],
|
||||
"main": "",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git"
|
||||
},
|
||||
"scripts": {},
|
||||
"dependencies": {
|
||||
"@types/events": "*",
|
||||
"@types/minimatch": "*",
|
||||
"@types/node": "*"
|
||||
},
|
||||
"typesPublisherContentHash": "43019f2af91c7a4ca3453c4b806a01c521ca3008ffe1bfefd37c5f9d6135660e",
|
||||
"typeScriptVersion": "2.0"
|
||||
}
|
||||
21
node_modules/@types/minimatch/LICENSE
generated
vendored
21
node_modules/@types/minimatch/LICENSE
generated
vendored
|
|
@ -1,21 +0,0 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
16
node_modules/@types/minimatch/README.md
generated
vendored
16
node_modules/@types/minimatch/README.md
generated
vendored
|
|
@ -1,16 +0,0 @@
|
|||
# Installation
|
||||
> `npm install --save @types/minimatch`
|
||||
|
||||
# Summary
|
||||
This package contains type definitions for Minimatch (https://github.com/isaacs/minimatch).
|
||||
|
||||
# Details
|
||||
Files were exported from https://www.github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/minimatch
|
||||
|
||||
Additional Details
|
||||
* Last updated: Thu, 04 Jan 2018 23:26:01 GMT
|
||||
* Dependencies: none
|
||||
* Global values: none
|
||||
|
||||
# Credits
|
||||
These definitions were written by vvakame <https://github.com/vvakame>, Shant Marouti <https://github.com/shantmarouti>.
|
||||
214
node_modules/@types/minimatch/index.d.ts
generated
vendored
214
node_modules/@types/minimatch/index.d.ts
generated
vendored
|
|
@ -1,214 +0,0 @@
|
|||
// Type definitions for Minimatch 3.0
|
||||
// Project: https://github.com/isaacs/minimatch
|
||||
// Definitions by: vvakame <https://github.com/vvakame>
|
||||
// Shant Marouti <https://github.com/shantmarouti>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
/**
|
||||
* Tests a path against the pattern using the options.
|
||||
*/
|
||||
declare function M(target: string, pattern: string, options?: M.IOptions): boolean;
|
||||
|
||||
declare namespace M {
|
||||
/**
|
||||
* Match against the list of files, in the style of fnmatch or glob.
|
||||
* If nothing is matched, and options.nonull is set,
|
||||
* then return a list containing the pattern itself.
|
||||
*/
|
||||
function match(list: ReadonlyArray<string>, pattern: string, options?: IOptions): string[];
|
||||
|
||||
/**
|
||||
* Returns a function that tests its supplied argument, suitable for use with Array.filter
|
||||
*/
|
||||
function filter(pattern: string, options?: IOptions): (element: string, indexed: number, array: ReadonlyArray<string>) => boolean;
|
||||
|
||||
/**
|
||||
* Make a regular expression object from the pattern.
|
||||
*/
|
||||
function makeRe(pattern: string, options?: IOptions): RegExp;
|
||||
|
||||
let Minimatch: IMinimatchStatic;
|
||||
|
||||
interface IOptions {
|
||||
/**
|
||||
* Dump a ton of stuff to stderr.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
debug?: boolean;
|
||||
|
||||
/**
|
||||
* Do not expand {a,b} and {1..3} brace sets.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nobrace?: boolean;
|
||||
|
||||
/**
|
||||
* Disable ** matching against multiple folder names.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
noglobstar?: boolean;
|
||||
|
||||
/**
|
||||
* Allow patterns to match filenames starting with a period,
|
||||
* even if the pattern does not explicitly have a period in that spot.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
dot?: boolean;
|
||||
|
||||
/**
|
||||
* Disable "extglob" style patterns like +(a|b).
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
noext?: boolean;
|
||||
|
||||
/**
|
||||
* Perform a case-insensitive match.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nocase?: boolean;
|
||||
|
||||
/**
|
||||
* When a match is not found by minimatch.match,
|
||||
* return a list containing the pattern itself if this option is set.
|
||||
* Otherwise, an empty list is returned if there are no matches.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nonull?: boolean;
|
||||
|
||||
/**
|
||||
* If set, then patterns without slashes will be matched against
|
||||
* the basename of the path if it contains slashes.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
matchBase?: boolean;
|
||||
|
||||
/**
|
||||
* Suppress the behavior of treating #
|
||||
* at the start of a pattern as a comment.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nocomment?: boolean;
|
||||
|
||||
/**
|
||||
* Suppress the behavior of treating a leading ! character as negation.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nonegate?: boolean;
|
||||
|
||||
/**
|
||||
* Returns from negate expressions the same as if they were not negated.
|
||||
* (Ie, true on a hit, false on a miss.)
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
flipNegate?: boolean;
|
||||
}
|
||||
|
||||
interface IMinimatchStatic {
|
||||
new(pattern: string, options?: IOptions): IMinimatch;
|
||||
prototype: IMinimatch;
|
||||
}
|
||||
|
||||
interface IMinimatch {
|
||||
/**
|
||||
* The original pattern the minimatch object represents.
|
||||
*/
|
||||
pattern: string;
|
||||
|
||||
/**
|
||||
* The options supplied to the constructor.
|
||||
*/
|
||||
options: IOptions;
|
||||
|
||||
/**
|
||||
* A 2-dimensional array of regexp or string expressions.
|
||||
*/
|
||||
set: any[][]; // (RegExp | string)[][]
|
||||
|
||||
/**
|
||||
* A single regular expression expressing the entire pattern.
|
||||
* Created by the makeRe method.
|
||||
*/
|
||||
regexp: RegExp;
|
||||
|
||||
/**
|
||||
* True if the pattern is negated.
|
||||
*/
|
||||
negate: boolean;
|
||||
|
||||
/**
|
||||
* True if the pattern is a comment.
|
||||
*/
|
||||
comment: boolean;
|
||||
|
||||
/**
|
||||
* True if the pattern is ""
|
||||
*/
|
||||
empty: boolean;
|
||||
|
||||
/**
|
||||
* Generate the regexp member if necessary, and return it.
|
||||
* Will return false if the pattern is invalid.
|
||||
*/
|
||||
makeRe(): RegExp; // regexp or boolean
|
||||
|
||||
/**
|
||||
* Return true if the filename matches the pattern, or false otherwise.
|
||||
*/
|
||||
match(fname: string): boolean;
|
||||
|
||||
/**
|
||||
* Take a /-split filename, and match it against a single row in the regExpSet.
|
||||
* This method is mainly for internal use, but is exposed so that it can be used
|
||||
* by a glob-walker that needs to avoid excessive filesystem calls.
|
||||
*/
|
||||
matchOne(files: string[], pattern: string[], partial: boolean): boolean;
|
||||
|
||||
/**
|
||||
* Deprecated. For internal use.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
debug(): void;
|
||||
|
||||
/**
|
||||
* Deprecated. For internal use.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
make(): void;
|
||||
|
||||
/**
|
||||
* Deprecated. For internal use.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
parseNegate(): void;
|
||||
|
||||
/**
|
||||
* Deprecated. For internal use.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
braceExpand(pattern: string, options: IOptions): void;
|
||||
|
||||
/**
|
||||
* Deprecated. For internal use.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
parse(pattern: string, isSub?: boolean): void;
|
||||
}
|
||||
}
|
||||
|
||||
export = M;
|
||||
27
node_modules/@types/minimatch/package.json
generated
vendored
27
node_modules/@types/minimatch/package.json
generated
vendored
|
|
@ -1,27 +0,0 @@
|
|||
{
|
||||
"name": "@types/minimatch",
|
||||
"version": "3.0.3",
|
||||
"description": "TypeScript definitions for Minimatch",
|
||||
"license": "MIT",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "vvakame",
|
||||
"url": "https://github.com/vvakame",
|
||||
"githubUsername": "vvakame"
|
||||
},
|
||||
{
|
||||
"name": "Shant Marouti",
|
||||
"url": "https://github.com/shantmarouti",
|
||||
"githubUsername": "shantmarouti"
|
||||
}
|
||||
],
|
||||
"main": "",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://www.github.com/DefinitelyTyped/DefinitelyTyped.git"
|
||||
},
|
||||
"scripts": {},
|
||||
"dependencies": {},
|
||||
"typesPublisherContentHash": "e768e36348874adcc93ac67e9c3c7b5fcbd39079c0610ec16e410b8f851308d1",
|
||||
"typeScriptVersion": "2.0"
|
||||
}
|
||||
|
|
@ -1,3 +1,49 @@
|
|||
## 8.2.0 (2021-09-06)
|
||||
|
||||
### New features
|
||||
|
||||
Add support for walking ES2022 class static blocks.
|
||||
|
||||
## 8.1.1 (2021-06-29)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Include `base` in the type declarations.
|
||||
|
||||
## 8.1.0 (2021-04-24)
|
||||
|
||||
### New features
|
||||
|
||||
Support node types for class fields and private methods.
|
||||
|
||||
## 8.0.2 (2021-01-25)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Adjust package.json to work with Node 12.16.0 and 13.0-13.6.
|
||||
|
||||
## 8.0.0 (2021-01-05)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a bug where `full` and `fullAncestor` would skip nodes with overridden types.
|
||||
|
||||
## 8.0.0 (2020-08-12)
|
||||
|
||||
### New features
|
||||
|
||||
The package can now be loaded directly as an ECMAScript module in node 13+.
|
||||
|
||||
## 7.2.0 (2020-06-17)
|
||||
|
||||
### New features
|
||||
|
||||
Support optional chaining and nullish coalescing.
|
||||
|
||||
Support `import.meta`.
|
||||
|
||||
Add support for `export * as ns from "source"`.
|
||||
|
||||
## 7.1.1 (2020-02-13)
|
||||
|
||||
### Bug fixes
|
||||
4
node_modules/ava/node_modules/acorn-walk/LICENSE → node_modules/acorn-walk/LICENSE
generated
vendored
4
node_modules/ava/node_modules/acorn-walk/LICENSE → node_modules/acorn-walk/LICENSE
generated
vendored
|
|
@ -1,4 +1,6 @@
|
|||
Copyright (C) 2012-2018 by various contributors (see AUTHORS)
|
||||
MIT License
|
||||
|
||||
Copyright (C) 2012-2020 by various contributors (see AUTHORS)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
@ -94,22 +94,21 @@ declare module "acorn-walk" {
|
|||
export function findNodeAt<TState>(
|
||||
node: Node,
|
||||
start: number | undefined,
|
||||
end: number | undefined,
|
||||
type: string,
|
||||
end?: number | undefined,
|
||||
type?: FindPredicate | string,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): Found<TState> | undefined;
|
||||
|
||||
export function findNodeAt<TState>(
|
||||
export function findNodeAround<TState>(
|
||||
node: Node,
|
||||
start: number | undefined,
|
||||
end: number | undefined,
|
||||
type?: FindPredicate,
|
||||
type?: FindPredicate | string,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): Found<TState> | undefined;
|
||||
|
||||
export const findNodeAround: typeof findNodeAt;
|
||||
export const findNodeAfter: typeof findNodeAround;
|
||||
|
||||
export const findNodeAfter: typeof findNodeAt;
|
||||
export const base: RecursiveVisitors<any>;
|
||||
}
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||
(global = global || self, factory((global.acorn = global.acorn || {}, global.acorn.walk = {})));
|
||||
}(this, function (exports) { 'use strict';
|
||||
}(this, (function (exports) { 'use strict';
|
||||
|
||||
// AST walker module for Mozilla Parser API compatible trees
|
||||
|
||||
|
|
@ -72,11 +72,15 @@
|
|||
|
||||
// A full walk triggers the callback on each node
|
||||
function full(node, callback, baseVisitor, state, override) {
|
||||
if (!baseVisitor) { baseVisitor = base
|
||||
; }(function c(node, st, override) {
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
var last
|
||||
;(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
baseVisitor[type](node, st, c);
|
||||
if (!override) { callback(node, st, type); }
|
||||
if (last !== node) {
|
||||
callback(node, st, type);
|
||||
last = node;
|
||||
}
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
|
|
@ -84,13 +88,16 @@
|
|||
// the callback on each node
|
||||
function fullAncestor(node, callback, baseVisitor, state) {
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
var ancestors = []
|
||||
var ancestors = [], last
|
||||
;(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
var isNew = node !== ancestors[ancestors.length - 1];
|
||||
if (isNew) { ancestors.push(node); }
|
||||
baseVisitor[type](node, st, c);
|
||||
if (!override) { callback(node, st || ancestors, ancestors, type); }
|
||||
if (last !== node) {
|
||||
callback(node, st || ancestors, ancestors, type);
|
||||
last = node;
|
||||
}
|
||||
if (isNew) { ancestors.pop(); }
|
||||
})(node, state);
|
||||
}
|
||||
|
|
@ -168,17 +175,10 @@
|
|||
return max
|
||||
}
|
||||
|
||||
// Fallback to an Object.create polyfill for older environments.
|
||||
var create = Object.create || function(proto) {
|
||||
function Ctor() {}
|
||||
Ctor.prototype = proto;
|
||||
return new Ctor
|
||||
};
|
||||
|
||||
// Used to create a custom walker. Will fill in all missing node
|
||||
// type properties with the defaults.
|
||||
function make(funcs, baseVisitor) {
|
||||
var visitor = create(baseVisitor || base);
|
||||
var visitor = Object.create(baseVisitor || base);
|
||||
for (var type in funcs) { visitor[type] = funcs[type]; }
|
||||
return visitor
|
||||
}
|
||||
|
|
@ -190,7 +190,7 @@
|
|||
|
||||
var base = {};
|
||||
|
||||
base.Program = base.BlockStatement = function (node, st, c) {
|
||||
base.Program = base.BlockStatement = base.StaticBlock = function (node, st, c) {
|
||||
for (var i = 0, list = node.body; i < list.length; i += 1)
|
||||
{
|
||||
var stmt = list[i];
|
||||
|
|
@ -200,7 +200,7 @@
|
|||
};
|
||||
base.Statement = skipThrough;
|
||||
base.EmptyStatement = ignore;
|
||||
base.ExpressionStatement = base.ParenthesizedExpression =
|
||||
base.ExpressionStatement = base.ParenthesizedExpression = base.ChainExpression =
|
||||
function (node, st, c) { return c(node.expression, st, "Expression"); };
|
||||
base.IfStatement = function (node, st, c) {
|
||||
c(node.test, st, "Expression");
|
||||
|
|
@ -405,6 +405,8 @@
|
|||
if (node.source) { c(node.source, st, "Expression"); }
|
||||
};
|
||||
base.ExportAllDeclaration = function (node, st, c) {
|
||||
if (node.exported)
|
||||
{ c(node.exported, st); }
|
||||
c(node.source, st, "Expression");
|
||||
};
|
||||
base.ImportDeclaration = function (node, st, c) {
|
||||
|
|
@ -419,7 +421,7 @@
|
|||
base.ImportExpression = function (node, st, c) {
|
||||
c(node.source, st, "Expression");
|
||||
};
|
||||
base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.Literal = ignore;
|
||||
base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.PrivateIdentifier = base.Literal = ignore;
|
||||
|
||||
base.TaggedTemplateExpression = function (node, st, c) {
|
||||
c(node.tag, st, "Expression");
|
||||
|
|
@ -439,9 +441,9 @@
|
|||
c(elt, st);
|
||||
}
|
||||
};
|
||||
base.MethodDefinition = base.Property = function (node, st, c) {
|
||||
base.MethodDefinition = base.PropertyDefinition = base.Property = function (node, st, c) {
|
||||
if (node.computed) { c(node.key, st, "Expression"); }
|
||||
c(node.value, st, "Expression");
|
||||
if (node.value) { c(node.value, st, "Expression"); }
|
||||
};
|
||||
|
||||
exports.ancestor = ancestor;
|
||||
|
|
@ -458,4 +460,4 @@
|
|||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
})));
|
||||
|
|
@ -66,11 +66,15 @@ var Found = function Found(node, state) { this.node = node; this.state = state;
|
|||
|
||||
// A full walk triggers the callback on each node
|
||||
function full(node, callback, baseVisitor, state, override) {
|
||||
if (!baseVisitor) { baseVisitor = base
|
||||
; }(function c(node, st, override) {
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
var last
|
||||
;(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
baseVisitor[type](node, st, c);
|
||||
if (!override) { callback(node, st, type); }
|
||||
if (last !== node) {
|
||||
callback(node, st, type);
|
||||
last = node;
|
||||
}
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
|
|
@ -78,13 +82,16 @@ function full(node, callback, baseVisitor, state, override) {
|
|||
// the callback on each node
|
||||
function fullAncestor(node, callback, baseVisitor, state) {
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
var ancestors = []
|
||||
var ancestors = [], last
|
||||
;(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
var isNew = node !== ancestors[ancestors.length - 1];
|
||||
if (isNew) { ancestors.push(node); }
|
||||
baseVisitor[type](node, st, c);
|
||||
if (!override) { callback(node, st || ancestors, ancestors, type); }
|
||||
if (last !== node) {
|
||||
callback(node, st || ancestors, ancestors, type);
|
||||
last = node;
|
||||
}
|
||||
if (isNew) { ancestors.pop(); }
|
||||
})(node, state);
|
||||
}
|
||||
|
|
@ -162,17 +169,10 @@ function findNodeBefore(node, pos, test, baseVisitor, state) {
|
|||
return max
|
||||
}
|
||||
|
||||
// Fallback to an Object.create polyfill for older environments.
|
||||
var create = Object.create || function(proto) {
|
||||
function Ctor() {}
|
||||
Ctor.prototype = proto;
|
||||
return new Ctor
|
||||
};
|
||||
|
||||
// Used to create a custom walker. Will fill in all missing node
|
||||
// type properties with the defaults.
|
||||
function make(funcs, baseVisitor) {
|
||||
var visitor = create(baseVisitor || base);
|
||||
var visitor = Object.create(baseVisitor || base);
|
||||
for (var type in funcs) { visitor[type] = funcs[type]; }
|
||||
return visitor
|
||||
}
|
||||
|
|
@ -184,7 +184,7 @@ function ignore(_node, _st, _c) {}
|
|||
|
||||
var base = {};
|
||||
|
||||
base.Program = base.BlockStatement = function (node, st, c) {
|
||||
base.Program = base.BlockStatement = base.StaticBlock = function (node, st, c) {
|
||||
for (var i = 0, list = node.body; i < list.length; i += 1)
|
||||
{
|
||||
var stmt = list[i];
|
||||
|
|
@ -194,7 +194,7 @@ base.Program = base.BlockStatement = function (node, st, c) {
|
|||
};
|
||||
base.Statement = skipThrough;
|
||||
base.EmptyStatement = ignore;
|
||||
base.ExpressionStatement = base.ParenthesizedExpression =
|
||||
base.ExpressionStatement = base.ParenthesizedExpression = base.ChainExpression =
|
||||
function (node, st, c) { return c(node.expression, st, "Expression"); };
|
||||
base.IfStatement = function (node, st, c) {
|
||||
c(node.test, st, "Expression");
|
||||
|
|
@ -399,6 +399,8 @@ base.ExportNamedDeclaration = base.ExportDefaultDeclaration = function (node, st
|
|||
if (node.source) { c(node.source, st, "Expression"); }
|
||||
};
|
||||
base.ExportAllDeclaration = function (node, st, c) {
|
||||
if (node.exported)
|
||||
{ c(node.exported, st); }
|
||||
c(node.source, st, "Expression");
|
||||
};
|
||||
base.ImportDeclaration = function (node, st, c) {
|
||||
|
|
@ -413,7 +415,7 @@ base.ImportDeclaration = function (node, st, c) {
|
|||
base.ImportExpression = function (node, st, c) {
|
||||
c(node.source, st, "Expression");
|
||||
};
|
||||
base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.Literal = ignore;
|
||||
base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.PrivateIdentifier = base.Literal = ignore;
|
||||
|
||||
base.TaggedTemplateExpression = function (node, st, c) {
|
||||
c(node.tag, st, "Expression");
|
||||
|
|
@ -433,9 +435,9 @@ base.ClassBody = function (node, st, c) {
|
|||
c(elt, st);
|
||||
}
|
||||
};
|
||||
base.MethodDefinition = base.Property = function (node, st, c) {
|
||||
base.MethodDefinition = base.PropertyDefinition = base.Property = function (node, st, c) {
|
||||
if (node.computed) { c(node.key, st, "Expression"); }
|
||||
c(node.value, st, "Expression");
|
||||
if (node.value) { c(node.value, st, "Expression"); }
|
||||
};
|
||||
|
||||
export { ancestor, base, findNodeAfter, findNodeAround, findNodeAt, findNodeBefore, full, fullAncestor, make, recursive, simple };
|
||||
|
|
@ -5,7 +5,18 @@
|
|||
"main": "dist/walk.js",
|
||||
"types": "dist/walk.d.ts",
|
||||
"module": "dist/walk.mjs",
|
||||
"version": "7.1.1",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"import": "./dist/walk.mjs",
|
||||
"require": "./dist/walk.js",
|
||||
"default": "./dist/walk.js"
|
||||
},
|
||||
"./dist/walk.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"version": "8.2.0",
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
},
|
||||
9
node_modules/ansi-align/CHANGELOG.md
generated
vendored
9
node_modules/ansi-align/CHANGELOG.md
generated
vendored
|
|
@ -1,7 +1,14 @@
|
|||
# Change Log
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
### [3.0.1](https://github.com/nexdrew/ansi-align/compare/v3.0.0...v3.0.1) (2021-09-27)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **package:** update string-width to version 4.1.0 ([#52](https://github.com/nexdrew/ansi-align/issues/52)) ([ab5b733](https://github.com/nexdrew/ansi-align/commit/ab5b733b1c30eef87b75e15459f2216db28d7ed3))
|
||||
|
||||
<a name="3.0.0"></a>
|
||||
# [3.0.0](https://github.com/nexdrew/ansi-align/compare/v2.0.0...v3.0.0) (2018-12-17)
|
||||
|
||||
|
|
|
|||
73
node_modules/ansi-align/node_modules/emoji-regex/README.md
generated
vendored
73
node_modules/ansi-align/node_modules/emoji-regex/README.md
generated
vendored
|
|
@ -1,73 +0,0 @@
|
|||
# emoji-regex [](https://travis-ci.org/mathiasbynens/emoji-regex)
|
||||
|
||||
_emoji-regex_ offers a regular expression to match all emoji symbols (including textual representations of emoji) as per the Unicode Standard.
|
||||
|
||||
This repository contains a script that generates this regular expression based on [the data from Unicode Technical Report #51](https://github.com/mathiasbynens/unicode-tr51). Because of this, the regular expression can easily be updated whenever new emoji are added to the Unicode standard.
|
||||
|
||||
## Installation
|
||||
|
||||
Via [npm](https://www.npmjs.com/):
|
||||
|
||||
```bash
|
||||
npm install emoji-regex
|
||||
```
|
||||
|
||||
In [Node.js](https://nodejs.org/):
|
||||
|
||||
```js
|
||||
const emojiRegex = require('emoji-regex');
|
||||
// Note: because the regular expression has the global flag set, this module
|
||||
// exports a function that returns the regex rather than exporting the regular
|
||||
// expression itself, to make it impossible to (accidentally) mutate the
|
||||
// original regular expression.
|
||||
|
||||
const text = `
|
||||
\u{231A}: ⌚ default emoji presentation character (Emoji_Presentation)
|
||||
\u{2194}\u{FE0F}: ↔️ default text presentation character rendered as emoji
|
||||
\u{1F469}: 👩 emoji modifier base (Emoji_Modifier_Base)
|
||||
\u{1F469}\u{1F3FF}: 👩🏿 emoji modifier base followed by a modifier
|
||||
`;
|
||||
|
||||
const regex = emojiRegex();
|
||||
let match;
|
||||
while (match = regex.exec(text)) {
|
||||
const emoji = match[0];
|
||||
console.log(`Matched sequence ${ emoji } — code points: ${ [...emoji].length }`);
|
||||
}
|
||||
```
|
||||
|
||||
Console output:
|
||||
|
||||
```
|
||||
Matched sequence ⌚ — code points: 1
|
||||
Matched sequence ⌚ — code points: 1
|
||||
Matched sequence ↔️ — code points: 2
|
||||
Matched sequence ↔️ — code points: 2
|
||||
Matched sequence 👩 — code points: 1
|
||||
Matched sequence 👩 — code points: 1
|
||||
Matched sequence 👩🏿 — code points: 2
|
||||
Matched sequence 👩🏿 — code points: 2
|
||||
```
|
||||
|
||||
To match emoji in their textual representation as well (i.e. emoji that are not `Emoji_Presentation` symbols and that aren’t forced to render as emoji by a variation selector), `require` the other regex:
|
||||
|
||||
```js
|
||||
const emojiRegex = require('emoji-regex/text.js');
|
||||
```
|
||||
|
||||
Additionally, in environments which support ES2015 Unicode escapes, you may `require` ES2015-style versions of the regexes:
|
||||
|
||||
```js
|
||||
const emojiRegex = require('emoji-regex/es2015/index.js');
|
||||
const emojiRegexText = require('emoji-regex/es2015/text.js');
|
||||
```
|
||||
|
||||
## Author
|
||||
|
||||
| [](https://twitter.com/mathias "Follow @mathias on Twitter") |
|
||||
|---|
|
||||
| [Mathias Bynens](https://mathiasbynens.be/) |
|
||||
|
||||
## License
|
||||
|
||||
_emoji-regex_ is available under the [MIT](https://mths.be/mit) license.
|
||||
6
node_modules/ansi-align/node_modules/emoji-regex/es2015/index.js
generated
vendored
6
node_modules/ansi-align/node_modules/emoji-regex/es2015/index.js
generated
vendored
File diff suppressed because one or more lines are too long
6
node_modules/ansi-align/node_modules/emoji-regex/es2015/text.js
generated
vendored
6
node_modules/ansi-align/node_modules/emoji-regex/es2015/text.js
generated
vendored
File diff suppressed because one or more lines are too long
5
node_modules/ansi-align/node_modules/emoji-regex/index.d.ts
generated
vendored
5
node_modules/ansi-align/node_modules/emoji-regex/index.d.ts
generated
vendored
|
|
@ -1,5 +0,0 @@
|
|||
declare module 'emoji-regex' {
|
||||
function emojiRegex(): RegExp;
|
||||
|
||||
export default emojiRegex;
|
||||
}
|
||||
6
node_modules/ansi-align/node_modules/emoji-regex/index.js
generated
vendored
6
node_modules/ansi-align/node_modules/emoji-regex/index.js
generated
vendored
File diff suppressed because one or more lines are too long
51
node_modules/ansi-align/node_modules/emoji-regex/package.json
generated
vendored
51
node_modules/ansi-align/node_modules/emoji-regex/package.json
generated
vendored
|
|
@ -1,51 +0,0 @@
|
|||
{
|
||||
"name": "emoji-regex",
|
||||
"version": "7.0.3",
|
||||
"description": "A regular expression to match all Emoji-only symbols as per the Unicode Standard.",
|
||||
"homepage": "https://mths.be/emoji-regex",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"keywords": [
|
||||
"unicode",
|
||||
"regex",
|
||||
"regexp",
|
||||
"regular expressions",
|
||||
"code points",
|
||||
"symbols",
|
||||
"characters",
|
||||
"emoji"
|
||||
],
|
||||
"license": "MIT",
|
||||
"author": {
|
||||
"name": "Mathias Bynens",
|
||||
"url": "https://mathiasbynens.be/"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/mathiasbynens/emoji-regex.git"
|
||||
},
|
||||
"bugs": "https://github.com/mathiasbynens/emoji-regex/issues",
|
||||
"files": [
|
||||
"LICENSE-MIT.txt",
|
||||
"index.js",
|
||||
"index.d.ts",
|
||||
"text.js",
|
||||
"es2015/index.js",
|
||||
"es2015/text.js"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "rm -rf -- es2015; babel src -d .; NODE_ENV=es2015 babel src -d ./es2015; node script/inject-sequences.js",
|
||||
"test": "mocha",
|
||||
"test:watch": "npm run test -- --watch"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/plugin-proposal-unicode-property-regex": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"mocha": "^5.2.0",
|
||||
"regexgen": "^1.3.0",
|
||||
"unicode-11.0.0": "^0.7.7",
|
||||
"unicode-tr51": "^9.0.1"
|
||||
}
|
||||
}
|
||||
6
node_modules/ansi-align/node_modules/emoji-regex/text.js
generated
vendored
6
node_modules/ansi-align/node_modules/emoji-regex/text.js
generated
vendored
File diff suppressed because one or more lines are too long
46
node_modules/ansi-align/node_modules/is-fullwidth-code-point/index.js
generated
vendored
46
node_modules/ansi-align/node_modules/is-fullwidth-code-point/index.js
generated
vendored
|
|
@ -1,46 +0,0 @@
|
|||
'use strict';
|
||||
/* eslint-disable yoda */
|
||||
module.exports = x => {
|
||||
if (Number.isNaN(x)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// code points are derived from:
|
||||
// http://www.unix.org/Public/UNIDATA/EastAsianWidth.txt
|
||||
if (
|
||||
x >= 0x1100 && (
|
||||
x <= 0x115f || // Hangul Jamo
|
||||
x === 0x2329 || // LEFT-POINTING ANGLE BRACKET
|
||||
x === 0x232a || // RIGHT-POINTING ANGLE BRACKET
|
||||
// CJK Radicals Supplement .. Enclosed CJK Letters and Months
|
||||
(0x2e80 <= x && x <= 0x3247 && x !== 0x303f) ||
|
||||
// Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A
|
||||
(0x3250 <= x && x <= 0x4dbf) ||
|
||||
// CJK Unified Ideographs .. Yi Radicals
|
||||
(0x4e00 <= x && x <= 0xa4c6) ||
|
||||
// Hangul Jamo Extended-A
|
||||
(0xa960 <= x && x <= 0xa97c) ||
|
||||
// Hangul Syllables
|
||||
(0xac00 <= x && x <= 0xd7a3) ||
|
||||
// CJK Compatibility Ideographs
|
||||
(0xf900 <= x && x <= 0xfaff) ||
|
||||
// Vertical Forms
|
||||
(0xfe10 <= x && x <= 0xfe19) ||
|
||||
// CJK Compatibility Forms .. Small Form Variants
|
||||
(0xfe30 <= x && x <= 0xfe6b) ||
|
||||
// Halfwidth and Fullwidth Forms
|
||||
(0xff01 <= x && x <= 0xff60) ||
|
||||
(0xffe0 <= x && x <= 0xffe6) ||
|
||||
// Kana Supplement
|
||||
(0x1b000 <= x && x <= 0x1b001) ||
|
||||
// Enclosed Ideographic Supplement
|
||||
(0x1f200 <= x && x <= 0x1f251) ||
|
||||
// CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane
|
||||
(0x20000 <= x && x <= 0x3fffd)
|
||||
)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
45
node_modules/ansi-align/node_modules/is-fullwidth-code-point/package.json
generated
vendored
45
node_modules/ansi-align/node_modules/is-fullwidth-code-point/package.json
generated
vendored
|
|
@ -1,45 +0,0 @@
|
|||
{
|
||||
"name": "is-fullwidth-code-point",
|
||||
"version": "2.0.0",
|
||||
"description": "Check if the character represented by a given Unicode code point is fullwidth",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/is-fullwidth-code-point",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"keywords": [
|
||||
"fullwidth",
|
||||
"full-width",
|
||||
"full",
|
||||
"width",
|
||||
"unicode",
|
||||
"character",
|
||||
"char",
|
||||
"string",
|
||||
"str",
|
||||
"codepoint",
|
||||
"code",
|
||||
"point",
|
||||
"is",
|
||||
"detect",
|
||||
"check"
|
||||
],
|
||||
"devDependencies": {
|
||||
"ava": "*",
|
||||
"xo": "*"
|
||||
},
|
||||
"xo": {
|
||||
"esnext": true
|
||||
}
|
||||
}
|
||||
39
node_modules/ansi-align/node_modules/is-fullwidth-code-point/readme.md
generated
vendored
39
node_modules/ansi-align/node_modules/is-fullwidth-code-point/readme.md
generated
vendored
|
|
@ -1,39 +0,0 @@
|
|||
# is-fullwidth-code-point [](https://travis-ci.org/sindresorhus/is-fullwidth-code-point)
|
||||
|
||||
> Check if the character represented by a given [Unicode code point](https://en.wikipedia.org/wiki/Code_point) is [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms)
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install --save is-fullwidth-code-point
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const isFullwidthCodePoint = require('is-fullwidth-code-point');
|
||||
|
||||
isFullwidthCodePoint('谢'.codePointAt());
|
||||
//=> true
|
||||
|
||||
isFullwidthCodePoint('a'.codePointAt());
|
||||
//=> false
|
||||
```
|
||||
|
||||
|
||||
## API
|
||||
|
||||
### isFullwidthCodePoint(input)
|
||||
|
||||
#### input
|
||||
|
||||
Type: `number`
|
||||
|
||||
[Code point](https://en.wikipedia.org/wiki/Code_point) of a character.
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](https://sindresorhus.com)
|
||||
39
node_modules/ansi-align/node_modules/string-width/index.js
generated
vendored
39
node_modules/ansi-align/node_modules/string-width/index.js
generated
vendored
|
|
@ -1,39 +0,0 @@
|
|||
'use strict';
|
||||
const stripAnsi = require('strip-ansi');
|
||||
const isFullwidthCodePoint = require('is-fullwidth-code-point');
|
||||
const emojiRegex = require('emoji-regex')();
|
||||
|
||||
module.exports = input => {
|
||||
input = input.replace(emojiRegex, ' ');
|
||||
|
||||
if (typeof input !== 'string' || input.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
input = stripAnsi(input);
|
||||
|
||||
let width = 0;
|
||||
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
const code = input.codePointAt(i);
|
||||
|
||||
// Ignore control characters
|
||||
if (code <= 0x1F || (code >= 0x7F && code <= 0x9F)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Ignore combining characters
|
||||
if (code >= 0x300 && code <= 0x36F) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Surrogates
|
||||
if (code > 0xFFFF) {
|
||||
i++;
|
||||
}
|
||||
|
||||
width += isFullwidthCodePoint(code) ? 2 : 1;
|
||||
}
|
||||
|
||||
return width;
|
||||
};
|
||||
56
node_modules/ansi-align/node_modules/string-width/package.json
generated
vendored
56
node_modules/ansi-align/node_modules/string-width/package.json
generated
vendored
|
|
@ -1,56 +0,0 @@
|
|||
{
|
||||
"name": "string-width",
|
||||
"version": "3.1.0",
|
||||
"description": "Get the visual width of a string - the number of columns required to display it",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/string-width",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"keywords": [
|
||||
"string",
|
||||
"str",
|
||||
"character",
|
||||
"char",
|
||||
"unicode",
|
||||
"width",
|
||||
"visual",
|
||||
"column",
|
||||
"columns",
|
||||
"fullwidth",
|
||||
"full-width",
|
||||
"full",
|
||||
"ansi",
|
||||
"escape",
|
||||
"codes",
|
||||
"cli",
|
||||
"command-line",
|
||||
"terminal",
|
||||
"console",
|
||||
"cjk",
|
||||
"chinese",
|
||||
"japanese",
|
||||
"korean",
|
||||
"fixed-width"
|
||||
],
|
||||
"dependencies": {
|
||||
"emoji-regex": "^7.0.1",
|
||||
"is-fullwidth-code-point": "^2.0.0",
|
||||
"strip-ansi": "^5.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"ava": "^1.0.1",
|
||||
"xo": "^0.23.0"
|
||||
}
|
||||
}
|
||||
45
node_modules/ansi-align/node_modules/string-width/readme.md
generated
vendored
45
node_modules/ansi-align/node_modules/string-width/readme.md
generated
vendored
|
|
@ -1,45 +0,0 @@
|
|||
# string-width [](https://travis-ci.org/sindresorhus/string-width)
|
||||
|
||||
> Get the visual width of a string - the number of columns required to display it
|
||||
|
||||
Some Unicode characters are [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) and use double the normal width. [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) are stripped and doesn't affect the width.
|
||||
|
||||
Useful to be able to measure the actual width of command-line output.
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install string-width
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const stringWidth = require('string-width');
|
||||
|
||||
stringWidth('古');
|
||||
//=> 2
|
||||
|
||||
stringWidth('\u001b[1m古\u001b[22m');
|
||||
//=> 2
|
||||
|
||||
stringWidth('a');
|
||||
//=> 1
|
||||
|
||||
stringWidth('\u001B]8;;https://github.com\u0007Click\u001B]8;;\u0007');
|
||||
// => 5
|
||||
```
|
||||
|
||||
|
||||
## Related
|
||||
|
||||
- [string-width-cli](https://github.com/sindresorhus/string-width-cli) - CLI for this module
|
||||
- [string-length](https://github.com/sindresorhus/string-length) - Get the real length of a string
|
||||
- [widest-line](https://github.com/sindresorhus/widest-line) - Get the visual width of the widest line in a string
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](https://sindresorhus.com)
|
||||
15
node_modules/ansi-align/node_modules/strip-ansi/index.d.ts
generated
vendored
15
node_modules/ansi-align/node_modules/strip-ansi/index.d.ts
generated
vendored
|
|
@ -1,15 +0,0 @@
|
|||
/**
|
||||
Strip [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) from a string.
|
||||
|
||||
@example
|
||||
```
|
||||
import stripAnsi from 'strip-ansi';
|
||||
|
||||
stripAnsi('\u001B[4mUnicorn\u001B[0m');
|
||||
//=> 'Unicorn'
|
||||
|
||||
stripAnsi('\u001B]8;;https://github.com\u0007Click\u001B]8;;\u0007');
|
||||
//=> 'Click'
|
||||
```
|
||||
*/
|
||||
export default function stripAnsi(string: string): string;
|
||||
7
node_modules/ansi-align/node_modules/strip-ansi/index.js
generated
vendored
7
node_modules/ansi-align/node_modules/strip-ansi/index.js
generated
vendored
|
|
@ -1,7 +0,0 @@
|
|||
'use strict';
|
||||
const ansiRegex = require('ansi-regex');
|
||||
|
||||
const stripAnsi = string => typeof string === 'string' ? string.replace(ansiRegex(), '') : string;
|
||||
|
||||
module.exports = stripAnsi;
|
||||
module.exports.default = stripAnsi;
|
||||
61
node_modules/ansi-align/node_modules/strip-ansi/readme.md
generated
vendored
61
node_modules/ansi-align/node_modules/strip-ansi/readme.md
generated
vendored
|
|
@ -1,61 +0,0 @@
|
|||
# strip-ansi [](https://travis-ci.org/chalk/strip-ansi)
|
||||
|
||||
> Strip [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) from a string
|
||||
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<b>
|
||||
<a href="https://tidelift.com/subscription/pkg/npm-strip-ansi?utm_source=npm-strip-ansi&utm_medium=referral&utm_campaign=readme">Get professional support for 'strip-ansi' with a Tidelift subscription</a>
|
||||
</b>
|
||||
<br>
|
||||
<sub>
|
||||
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
|
||||
</sub>
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install strip-ansi
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const stripAnsi = require('strip-ansi');
|
||||
|
||||
stripAnsi('\u001B[4mUnicorn\u001B[0m');
|
||||
//=> 'Unicorn'
|
||||
|
||||
stripAnsi('\u001B]8;;https://github.com\u0007Click\u001B]8;;\u0007');
|
||||
//=> 'Click'
|
||||
```
|
||||
|
||||
|
||||
## Security
|
||||
|
||||
To report a security vulnerability, please use the [Tidelift security contact](https://tidelift.com/security). Tidelift will coordinate the fix and disclosure.
|
||||
|
||||
|
||||
## Related
|
||||
|
||||
- [strip-ansi-cli](https://github.com/chalk/strip-ansi-cli) - CLI for this module
|
||||
- [strip-ansi-stream](https://github.com/chalk/strip-ansi-stream) - Streaming version of this module
|
||||
- [has-ansi](https://github.com/chalk/has-ansi) - Check if a string has ANSI escape codes
|
||||
- [ansi-regex](https://github.com/chalk/ansi-regex) - Regular expression for matching ANSI escape codes
|
||||
- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right
|
||||
|
||||
|
||||
## Maintainers
|
||||
|
||||
- [Sindre Sorhus](https://github.com/sindresorhus)
|
||||
- [Josh Junon](https://github.com/qix-)
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
16
node_modules/ansi-align/package.json
generated
vendored
16
node_modules/ansi-align/package.json
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "ansi-align",
|
||||
"version": "3.0.0",
|
||||
"version": "3.0.1",
|
||||
"description": "align-text with ANSI support for CLIs",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
|
|
@ -30,14 +30,14 @@
|
|||
},
|
||||
"homepage": "https://github.com/nexdrew/ansi-align#readme",
|
||||
"dependencies": {
|
||||
"string-width": "^3.0.0"
|
||||
"string-width": "^4.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"ava": "^1.0.1",
|
||||
"chalk": "^2.4.1",
|
||||
"coveralls": "^3.0.2",
|
||||
"nyc": "^13.1.0",
|
||||
"standard": "^12.0.1",
|
||||
"standard-version": "^4.4.0"
|
||||
"ava": "^2.0.0",
|
||||
"chalk": "^2.4.2",
|
||||
"coveralls": "^3.0.3",
|
||||
"nyc": "^14.0.0",
|
||||
"standard": "^14.0.0",
|
||||
"standard-version": "^7.0.0"
|
||||
}
|
||||
}
|
||||
10
node_modules/ansi-regex/index.js
generated
vendored
10
node_modules/ansi-regex/index.js
generated
vendored
|
|
@ -1,14 +1,10 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = options => {
|
||||
options = Object.assign({
|
||||
onlyFirst: false
|
||||
}, options);
|
||||
|
||||
module.exports = ({onlyFirst = false} = {}) => {
|
||||
const pattern = [
|
||||
'[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)',
|
||||
'[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)',
|
||||
'(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))'
|
||||
].join('|');
|
||||
|
||||
return new RegExp(pattern, options.onlyFirst ? undefined : 'g');
|
||||
return new RegExp(pattern, onlyFirst ? undefined : 'g');
|
||||
};
|
||||
|
|
|
|||
14
node_modules/ansi-regex/package.json
generated
vendored
14
node_modules/ansi-regex/package.json
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "ansi-regex",
|
||||
"version": "4.1.0",
|
||||
"version": "5.0.1",
|
||||
"description": "Regular expression for matching ANSI escape codes",
|
||||
"license": "MIT",
|
||||
"repository": "chalk/ansi-regex",
|
||||
|
|
@ -10,14 +10,15 @@
|
|||
"url": "sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
"node": ">=8"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava",
|
||||
"test": "xo && ava && tsd",
|
||||
"view-supported": "node fixtures/view-codes.js"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
"index.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"keywords": [
|
||||
"ansi",
|
||||
|
|
@ -47,7 +48,8 @@
|
|||
"pattern"
|
||||
],
|
||||
"devDependencies": {
|
||||
"ava": "^0.25.0",
|
||||
"xo": "^0.23.0"
|
||||
"ava": "^2.4.0",
|
||||
"tsd": "^0.9.0",
|
||||
"xo": "^0.25.3"
|
||||
}
|
||||
}
|
||||
37
node_modules/ansi-regex/readme.md
generated
vendored
37
node_modules/ansi-regex/readme.md
generated
vendored
|
|
@ -1,21 +1,7 @@
|
|||
# ansi-regex [](https://travis-ci.org/chalk/ansi-regex)
|
||||
# ansi-regex
|
||||
|
||||
> Regular expression for matching [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code)
|
||||
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<b>
|
||||
<a href="https://tidelift.com/subscription/pkg/npm-ansi-regex?utm_source=npm-ansi-regex&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
|
||||
</b>
|
||||
<br>
|
||||
<sub>
|
||||
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
|
||||
</sub>
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
|
|
@ -48,12 +34,14 @@ ansiRegex().test('cake');
|
|||
|
||||
## API
|
||||
|
||||
### ansiRegex([options])
|
||||
### ansiRegex(options?)
|
||||
|
||||
Returns a regex for matching ANSI escape codes.
|
||||
|
||||
#### options
|
||||
|
||||
Type: `object`
|
||||
|
||||
##### onlyFirst
|
||||
|
||||
Type: `boolean`<br>
|
||||
|
|
@ -71,17 +59,20 @@ Some of the codes we run as a test are codes that we acquired finding various li
|
|||
On the historical side, those ECMA standards were established in the early 90's whereas the VT100, for example, was designed in the mid/late 70's. At that point in time, control codes were still pretty ungoverned and engineers used them for a multitude of things, namely to activate hardware ports that may have been proprietary. Somewhere else you see a similar 'anarchy' of codes is in the x86 architecture for processors; there are a ton of "interrupts" that can mean different things on certain brands of processors, most of which have been phased out.
|
||||
|
||||
|
||||
## Security
|
||||
|
||||
To report a security vulnerability, please use the [Tidelift security contact](https://tidelift.com/security). Tidelift will coordinate the fix and disclosure.
|
||||
|
||||
|
||||
## Maintainers
|
||||
|
||||
- [Sindre Sorhus](https://github.com/sindresorhus)
|
||||
- [Josh Junon](https://github.com/qix-)
|
||||
|
||||
|
||||
## License
|
||||
---
|
||||
|
||||
MIT
|
||||
<div align="center">
|
||||
<b>
|
||||
<a href="https://tidelift.com/subscription/pkg/npm-ansi-regex?utm_source=npm-ansi-regex&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
|
||||
</b>
|
||||
<br>
|
||||
<sub>
|
||||
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
|
||||
</sub>
|
||||
</div>
|
||||
|
|
|
|||
0
node_modules/chokidar/node_modules/anymatch/LICENSE → node_modules/anymatch/LICENSE
generated
vendored
0
node_modules/chokidar/node_modules/anymatch/LICENSE → node_modules/anymatch/LICENSE
generated
vendored
|
|
@ -86,7 +86,9 @@ const anymatch = (matchers, testString, options = DEFAULT_OPTIONS) => {
|
|||
.filter(item => typeof item === 'string' && item.charAt(0) === BANG)
|
||||
.map(item => item.slice(1))
|
||||
.map(item => picomatch(item, opts));
|
||||
const patterns = mtchers.map(matcher => createPattern(matcher, opts));
|
||||
const patterns = mtchers
|
||||
.filter(item => typeof item !== 'string' || (typeof item === 'string' && item.charAt(0) !== BANG))
|
||||
.map(matcher => createPattern(matcher, opts));
|
||||
|
||||
if (testString == null) {
|
||||
return (testString, ri = false) => {
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "anymatch",
|
||||
"version": "3.1.1",
|
||||
"version": "3.1.2",
|
||||
"description": "Matches strings against configurable strings, globs, regular expressions, and/or functions",
|
||||
"files": [
|
||||
"index.js",
|
||||
165
node_modules/ava/eslint-plugin-helper.js
generated
vendored
165
node_modules/ava/eslint-plugin-helper.js
generated
vendored
|
|
@ -1,26 +1,25 @@
|
|||
'use strict';
|
||||
const normalizeExtensions = require('./lib/extensions');
|
||||
let isMainThread = true;
|
||||
let supportsWorkers = false;
|
||||
try {
|
||||
({isMainThread} = require('worker_threads'));
|
||||
supportsWorkers = true;
|
||||
} catch {}
|
||||
|
||||
const {classify, hasExtension, isHelperish, matches, normalizeFileForMatching, normalizeGlobs, normalizePatterns} = require('./lib/globs');
|
||||
const loadConfig = require('./lib/load-config');
|
||||
const providerManager = require('./lib/provider-manager');
|
||||
|
||||
const configCache = new Map();
|
||||
const helperCache = new Map();
|
||||
let resolveGlobs;
|
||||
let resolveGlobsSync;
|
||||
|
||||
function load(projectDir, overrides) {
|
||||
const cacheKey = `${JSON.stringify(overrides)}\n${projectDir}`;
|
||||
if (helperCache.has(cacheKey)) {
|
||||
return helperCache.get(cacheKey);
|
||||
}
|
||||
if (!supportsWorkers || !isMainThread) {
|
||||
const normalizeExtensions = require('./lib/extensions');
|
||||
const {loadConfig, loadConfigSync} = require('./lib/load-config');
|
||||
const providerManager = require('./lib/provider-manager');
|
||||
|
||||
let conf;
|
||||
let providers;
|
||||
if (configCache.has(projectDir)) {
|
||||
({conf, providers} = configCache.get(projectDir));
|
||||
} else {
|
||||
conf = loadConfig({resolveFrom: projectDir});
|
||||
const configCache = new Map();
|
||||
|
||||
providers = [];
|
||||
const collectProviders = ({conf, projectDir}) => {
|
||||
const providers = [];
|
||||
if (Reflect.has(conf, 'babel')) {
|
||||
const {level, main} = providerManager.babel(projectDir);
|
||||
providers.push({
|
||||
|
|
@ -39,12 +38,125 @@ function load(projectDir, overrides) {
|
|||
});
|
||||
}
|
||||
|
||||
configCache.set(projectDir, {conf, providers});
|
||||
}
|
||||
return providers;
|
||||
};
|
||||
|
||||
const extensions = overrides && overrides.extensions ?
|
||||
normalizeExtensions(overrides.extensions) :
|
||||
normalizeExtensions(conf.extensions, providers);
|
||||
const buildGlobs = ({conf, providers, projectDir, overrideExtensions, overrideFiles}) => {
|
||||
const extensions = overrideExtensions ?
|
||||
normalizeExtensions(overrideExtensions) :
|
||||
normalizeExtensions(conf.extensions, providers);
|
||||
|
||||
return {
|
||||
cwd: projectDir,
|
||||
...normalizeGlobs({
|
||||
extensions,
|
||||
files: overrideFiles ? overrideFiles : conf.files,
|
||||
providers
|
||||
})
|
||||
};
|
||||
};
|
||||
|
||||
resolveGlobsSync = (projectDir, overrideExtensions, overrideFiles) => {
|
||||
if (!configCache.has(projectDir)) {
|
||||
const conf = loadConfigSync({resolveFrom: projectDir});
|
||||
const providers = collectProviders({conf, projectDir});
|
||||
configCache.set(projectDir, {conf, providers});
|
||||
}
|
||||
|
||||
const {conf, providers} = configCache.get(projectDir);
|
||||
return buildGlobs({conf, providers, projectDir, overrideExtensions, overrideFiles});
|
||||
};
|
||||
|
||||
resolveGlobs = async (projectDir, overrideExtensions, overrideFiles) => {
|
||||
if (!configCache.has(projectDir)) {
|
||||
configCache.set(projectDir, loadConfig({resolveFrom: projectDir}).then(conf => { // eslint-disable-line promise/prefer-await-to-then
|
||||
const providers = collectProviders({conf, projectDir});
|
||||
return {conf, providers};
|
||||
}));
|
||||
}
|
||||
|
||||
const {conf, providers} = await configCache.get(projectDir);
|
||||
return buildGlobs({conf, providers, projectDir, overrideExtensions, overrideFiles});
|
||||
};
|
||||
}
|
||||
|
||||
if (supportsWorkers) {
|
||||
const v8 = require('v8');
|
||||
|
||||
const MAX_DATA_LENGTH_EXCLUSIVE = 100 * 1024; // Allocate 100 KiB to exchange globs.
|
||||
|
||||
if (isMainThread) {
|
||||
const {Worker} = require('worker_threads');
|
||||
let data;
|
||||
let sync;
|
||||
let worker;
|
||||
|
||||
resolveGlobsSync = (projectDir, overrideExtensions, overrideFiles) => {
|
||||
if (worker === undefined) {
|
||||
const dataBuffer = new SharedArrayBuffer(MAX_DATA_LENGTH_EXCLUSIVE);
|
||||
data = new Uint8Array(dataBuffer);
|
||||
|
||||
const syncBuffer = new SharedArrayBuffer(4);
|
||||
sync = new Int32Array(syncBuffer);
|
||||
|
||||
worker = new Worker(__filename, {
|
||||
workerData: {
|
||||
dataBuffer,
|
||||
syncBuffer,
|
||||
firstMessage: {projectDir, overrideExtensions, overrideFiles}
|
||||
}
|
||||
});
|
||||
worker.unref();
|
||||
} else {
|
||||
worker.postMessage({projectDir, overrideExtensions, overrideFiles});
|
||||
}
|
||||
|
||||
Atomics.wait(sync, 0, 0);
|
||||
|
||||
const byteLength = Atomics.exchange(sync, 0, 0);
|
||||
if (byteLength === MAX_DATA_LENGTH_EXCLUSIVE) {
|
||||
throw new Error('Globs are over 100 KiB and cannot be resolved');
|
||||
}
|
||||
|
||||
const globsOrError = v8.deserialize(data.slice(0, byteLength));
|
||||
if (globsOrError instanceof Error) {
|
||||
throw globsOrError;
|
||||
}
|
||||
|
||||
return globsOrError;
|
||||
};
|
||||
} else {
|
||||
const {parentPort, workerData} = require('worker_threads');
|
||||
const data = new Uint8Array(workerData.dataBuffer);
|
||||
const sync = new Int32Array(workerData.syncBuffer);
|
||||
|
||||
const handleMessage = async ({projectDir, overrideExtensions, overrideFiles}) => {
|
||||
let encoded;
|
||||
try {
|
||||
const globs = await resolveGlobs(projectDir, overrideExtensions, overrideFiles);
|
||||
encoded = v8.serialize(globs);
|
||||
} catch (error) {
|
||||
encoded = v8.serialize(error);
|
||||
}
|
||||
|
||||
const byteLength = encoded.length < MAX_DATA_LENGTH_EXCLUSIVE ? encoded.copy(data) : MAX_DATA_LENGTH_EXCLUSIVE;
|
||||
Atomics.store(sync, 0, byteLength);
|
||||
Atomics.notify(sync, 0);
|
||||
};
|
||||
|
||||
parentPort.on('message', handleMessage);
|
||||
handleMessage(workerData.firstMessage);
|
||||
delete workerData.firstMessage;
|
||||
}
|
||||
}
|
||||
|
||||
const helperCache = new Map();
|
||||
|
||||
function load(projectDir, overrides) {
|
||||
const cacheKey = `${JSON.stringify(overrides)}\n${projectDir}`;
|
||||
if (helperCache.has(cacheKey)) {
|
||||
return helperCache.get(cacheKey);
|
||||
}
|
||||
|
||||
let helperPatterns = [];
|
||||
if (overrides && overrides.helpers !== undefined) {
|
||||
|
|
@ -55,14 +167,7 @@ function load(projectDir, overrides) {
|
|||
helperPatterns = normalizePatterns(overrides.helpers);
|
||||
}
|
||||
|
||||
const globs = {
|
||||
cwd: projectDir,
|
||||
...normalizeGlobs({
|
||||
extensions,
|
||||
files: overrides && overrides.files ? overrides.files : conf.files,
|
||||
providers
|
||||
})
|
||||
};
|
||||
const globs = resolveGlobsSync(projectDir, overrides && overrides.extensions, overrides && overrides.files);
|
||||
|
||||
const classifyForESLint = file => {
|
||||
const {isTest} = classify(file, globs);
|
||||
|
|
|
|||
13
node_modules/ava/index.d.ts
generated
vendored
13
node_modules/ava/index.d.ts
generated
vendored
|
|
@ -45,6 +45,9 @@ export interface Assertions {
|
|||
/** Assert that `actual` is [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to `expected`. */
|
||||
deepEqual: DeepEqualAssertion;
|
||||
|
||||
/** Assert that `actual` is like `expected`. */
|
||||
like: LikeAssertion;
|
||||
|
||||
/** Fail the test. */
|
||||
fail: FailAssertion;
|
||||
|
||||
|
|
@ -125,6 +128,14 @@ export interface DeepEqualAssertion {
|
|||
skip(actual: any, expected: any, message?: string): void;
|
||||
}
|
||||
|
||||
export interface LikeAssertion {
|
||||
/** Assert that `value` is like `selector`. */
|
||||
(value: any, selector: Record<string, any>, message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(value: any, selector: any, message?: string): void;
|
||||
}
|
||||
|
||||
export interface FailAssertion {
|
||||
/** Fail the test. */
|
||||
(message?: string): void;
|
||||
|
|
@ -342,7 +353,7 @@ export interface TimeoutFn {
|
|||
* Set a timeout for the test, in milliseconds. The test will fail if the timeout is exceeded.
|
||||
* The timeout is reset each time an assertion is made.
|
||||
*/
|
||||
(ms: number): void;
|
||||
(ms: number, message?: string): void;
|
||||
}
|
||||
|
||||
export interface TeardownFn {
|
||||
|
|
|
|||
29
node_modules/ava/lib/api.js
generated
vendored
29
node_modules/ava/lib/api.js
generated
vendored
|
|
@ -17,6 +17,7 @@ const RunStatus = require('./run-status');
|
|||
const fork = require('./fork');
|
||||
const serializeError = require('./serialize-error');
|
||||
const {getApplicableLineNumbers} = require('./line-numbers');
|
||||
const sharedWorkers = require('./plugin-support/shared-workers');
|
||||
|
||||
function resolveModules(modules) {
|
||||
return arrify(modules).map(name => {
|
||||
|
|
@ -110,21 +111,15 @@ class Api extends Emittery {
|
|||
}
|
||||
};
|
||||
|
||||
let cacheDir;
|
||||
let testFiles;
|
||||
try {
|
||||
cacheDir = this._createCacheDir();
|
||||
testFiles = await globs.findTests({cwd: this.options.projectDir, ...apiOptions.globs});
|
||||
if (selectedFiles.length === 0) {
|
||||
if (filter.length === 0) {
|
||||
selectedFiles = testFiles;
|
||||
} else {
|
||||
selectedFiles = globs.applyTestFileFilter({
|
||||
cwd: this.options.projectDir,
|
||||
filter: filter.map(({pattern}) => pattern),
|
||||
testFiles
|
||||
});
|
||||
}
|
||||
selectedFiles = filter.length === 0 ? testFiles : globs.applyTestFileFilter({
|
||||
cwd: this.options.projectDir,
|
||||
filter: filter.map(({pattern}) => pattern),
|
||||
testFiles
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
selectedFiles = [];
|
||||
|
|
@ -147,7 +142,7 @@ class Api extends Emittery {
|
|||
runStatus = new RunStatus(selectedFiles.length, null);
|
||||
}
|
||||
|
||||
const debugWithoutSpecificFile = Boolean(this.options.debug) && selectedFiles.length !== 1;
|
||||
const debugWithoutSpecificFile = Boolean(this.options.debug) && !this.options.debug.active && selectedFiles.length !== 1;
|
||||
|
||||
await this.emit('run', {
|
||||
bailWithoutReporting: debugWithoutSpecificFile,
|
||||
|
|
@ -192,7 +187,7 @@ class Api extends Emittery {
|
|||
|
||||
const {providers = []} = this.options;
|
||||
const providerStates = (await Promise.all(providers.map(async ({type, main}) => {
|
||||
const state = await main.compile({cacheDir, files: testFiles});
|
||||
const state = await main.compile({cacheDir: this._createCacheDir(), files: testFiles});
|
||||
return state === null ? null : {type, state};
|
||||
}))).filter(state => state !== null);
|
||||
|
||||
|
|
@ -206,6 +201,8 @@ class Api extends Emittery {
|
|||
concurrency = 1;
|
||||
}
|
||||
|
||||
const deregisteredSharedWorkers = [];
|
||||
|
||||
// Try and run each file, limited by `concurrency`.
|
||||
await pMap(selectedFiles, async file => {
|
||||
// No new files should be run once a test has timed out or failed,
|
||||
|
|
@ -231,6 +228,7 @@ class Api extends Emittery {
|
|||
|
||||
const worker = fork(file, options, apiOptions.nodeArguments);
|
||||
runStatus.observeWorker(worker, file, {selectingLines: lineNumbers.length > 0});
|
||||
deregisteredSharedWorkers.push(sharedWorkers.observeWorkerProcess(worker, runStatus));
|
||||
|
||||
pendingWorkers.add(worker);
|
||||
worker.promise.then(() => {
|
||||
|
|
@ -238,8 +236,11 @@ class Api extends Emittery {
|
|||
});
|
||||
restartTimer();
|
||||
|
||||
return worker.promise;
|
||||
await worker.promise;
|
||||
}, {concurrency, stopOnError: false});
|
||||
|
||||
// Allow shared workers to clean up before the run ends.
|
||||
await Promise.all(deregisteredSharedWorkers);
|
||||
} catch (error) {
|
||||
if (error && error.name === 'AggregateError') {
|
||||
for (const err of error) {
|
||||
|
|
|
|||
108
node_modules/ava/lib/assert.js
generated
vendored
108
node_modules/ava/lib/assert.js
generated
vendored
|
|
@ -3,11 +3,11 @@ const concordance = require('concordance');
|
|||
const isError = require('is-error');
|
||||
const isPromise = require('is-promise');
|
||||
const concordanceOptions = require('./concordance-options').default;
|
||||
const concordanceDiffOptions = require('./concordance-options').diff;
|
||||
const {CIRCULAR_SELECTOR, isLikeSelector, selectComparable} = require('./like-selector');
|
||||
const snapshotManager = require('./snapshot-manager');
|
||||
|
||||
function formatDescriptorDiff(actualDescriptor, expectedDescriptor, options) {
|
||||
options = {...options, ...concordanceDiffOptions};
|
||||
options = {...options, ...concordanceOptions};
|
||||
return {
|
||||
label: 'Difference:',
|
||||
formatted: concordance.diffDescriptors(actualDescriptor, expectedDescriptor, options)
|
||||
|
|
@ -64,6 +64,21 @@ class AssertionError extends Error {
|
|||
}
|
||||
exports.AssertionError = AssertionError;
|
||||
|
||||
function checkAssertionMessage(assertion, message) {
|
||||
if (typeof message === 'undefined' || typeof message === 'string') {
|
||||
return true;
|
||||
}
|
||||
|
||||
return new AssertionError({
|
||||
assertion,
|
||||
improperUsage: true,
|
||||
message: 'The assertion message must be a string',
|
||||
values: [formatWithLabel('Called with:', message)]
|
||||
});
|
||||
}
|
||||
|
||||
exports.checkAssertionMessage = checkAssertionMessage;
|
||||
|
||||
function getErrorWithLongStackTrace() {
|
||||
const limitBefore = Error.stackTraceLimit;
|
||||
Error.stackTraceLimit = Infinity;
|
||||
|
|
@ -72,8 +87,16 @@ function getErrorWithLongStackTrace() {
|
|||
return err;
|
||||
}
|
||||
|
||||
function validateExpectations(assertion, expectations, numberArgs) { // eslint-disable-line complexity
|
||||
function validateExpectations(assertion, expectations, numberArgs, experiments) { // eslint-disable-line complexity
|
||||
if (numberArgs === 1 || expectations === null || expectations === undefined) {
|
||||
if (experiments.disableNullExpectations && expectations === null) {
|
||||
throw new AssertionError({
|
||||
assertion,
|
||||
message: `The second argument to \`t.${assertion}()\` must be an expectation object or \`undefined\``,
|
||||
values: [formatWithLabel('Called with:', expectations)]
|
||||
});
|
||||
}
|
||||
|
||||
expectations = {};
|
||||
} else if (
|
||||
typeof expectations === 'function' ||
|
||||
|
|
@ -242,7 +265,9 @@ class Assertions {
|
|||
fail = notImplemented,
|
||||
skip = notImplemented,
|
||||
compareWithSnapshot = notImplemented,
|
||||
powerAssert
|
||||
powerAssert,
|
||||
experiments = {},
|
||||
disableSnapshots = false
|
||||
} = {}) {
|
||||
const withSkip = assertionFn => {
|
||||
assertionFn.skip = skip;
|
||||
|
|
@ -267,22 +292,16 @@ class Assertions {
|
|||
});
|
||||
|
||||
const checkMessage = (assertion, message, powerAssert = false) => {
|
||||
if (typeof message === 'undefined' || typeof message === 'string') {
|
||||
return true;
|
||||
const result = checkAssertionMessage(assertion, message);
|
||||
if (result === true) {
|
||||
return this.true;
|
||||
}
|
||||
|
||||
const error = new AssertionError({
|
||||
assertion,
|
||||
improperUsage: true,
|
||||
message: 'The assertion message must be a string',
|
||||
values: [formatWithLabel('Called with:', message)]
|
||||
});
|
||||
|
||||
if (powerAssert) {
|
||||
throw error;
|
||||
throw result;
|
||||
}
|
||||
|
||||
fail(error);
|
||||
fail(result);
|
||||
return false;
|
||||
};
|
||||
|
||||
|
|
@ -387,6 +406,52 @@ class Assertions {
|
|||
}
|
||||
});
|
||||
|
||||
this.like = withSkip((actual, selector, message) => {
|
||||
if (!checkMessage('like', message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isLikeSelector(selector)) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'like',
|
||||
improperUsage: true,
|
||||
message: '`t.like()` selector must be a non-empty object',
|
||||
values: [formatWithLabel('Called with:', selector)]
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
let comparable;
|
||||
try {
|
||||
comparable = selectComparable(actual, selector);
|
||||
} catch (error) {
|
||||
if (error === CIRCULAR_SELECTOR) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'like',
|
||||
improperUsage: true,
|
||||
message: '`t.like()` selector must not contain circular references',
|
||||
values: [formatWithLabel('Called with:', selector)]
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
const result = concordance.compare(comparable, selector, concordanceOptions);
|
||||
if (result.pass) {
|
||||
pass();
|
||||
} else {
|
||||
const actualDescriptor = result.actual || concordance.describe(comparable, concordanceOptions);
|
||||
const expectedDescriptor = result.expected || concordance.describe(selector, concordanceOptions);
|
||||
fail(new AssertionError({
|
||||
assertion: 'like',
|
||||
message,
|
||||
values: [formatDescriptorDiff(actualDescriptor, expectedDescriptor)]
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
this.throws = withSkip((...args) => {
|
||||
// Since arrow functions do not support 'arguments', we are using rest
|
||||
// operator, so we can determine the total number of arguments passed
|
||||
|
|
@ -408,7 +473,7 @@ class Assertions {
|
|||
}
|
||||
|
||||
try {
|
||||
expectations = validateExpectations('throws', expectations, args.length);
|
||||
expectations = validateExpectations('throws', expectations, args.length, experiments);
|
||||
} catch (error) {
|
||||
fail(error);
|
||||
return;
|
||||
|
|
@ -474,7 +539,7 @@ class Assertions {
|
|||
}
|
||||
|
||||
try {
|
||||
expectations = validateExpectations('throwsAsync', expectations, args.length);
|
||||
expectations = validateExpectations('throwsAsync', expectations, args.length, experiments);
|
||||
} catch (error) {
|
||||
fail(error);
|
||||
return Promise.resolve();
|
||||
|
|
@ -634,6 +699,15 @@ class Assertions {
|
|||
});
|
||||
|
||||
this.snapshot = withSkip((expected, ...rest) => {
|
||||
if (disableSnapshots && experiments.disableSnapshotsInHooks) {
|
||||
fail(new AssertionError({
|
||||
assertion: 'snapshot',
|
||||
message: '`t.snapshot()` can only be used in tests',
|
||||
improperUsage: true
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
let message;
|
||||
let snapshotOptions;
|
||||
if (rest.length > 1) {
|
||||
|
|
|
|||
102
node_modules/ava/lib/cli.js
generated
vendored
102
node_modules/ava/lib/cli.js
generated
vendored
|
|
@ -7,7 +7,7 @@ const arrify = require('arrify');
|
|||
const yargs = require('yargs');
|
||||
const readPkg = require('read-pkg');
|
||||
const isCi = require('./is-ci');
|
||||
const loadConfig = require('./load-config');
|
||||
const {loadConfig} = require('./load-config');
|
||||
|
||||
function exit(message) {
|
||||
console.error(`\n ${require('./chalk').get().red(figures.cross)} ${message}`);
|
||||
|
|
@ -83,12 +83,24 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
let confError = null;
|
||||
try {
|
||||
const {argv: {config: configFile}} = yargs.help(false);
|
||||
conf = loadConfig({configFile});
|
||||
conf = await loadConfig({configFile});
|
||||
} catch (error) {
|
||||
confError = error;
|
||||
}
|
||||
|
||||
let debug = null;
|
||||
// Enter debug mode if the main process is being inspected. This assumes the
|
||||
// worker processes are automatically inspected, too. It is not necessary to
|
||||
// run AVA with the debug command, though it's allowed.
|
||||
const activeInspector = require('inspector').url() !== undefined; // eslint-disable-line node/no-unsupported-features/node-builtins
|
||||
let debug = activeInspector ?
|
||||
{
|
||||
active: true,
|
||||
break: false,
|
||||
files: [],
|
||||
host: undefined,
|
||||
port: undefined
|
||||
} : null;
|
||||
|
||||
let resetCache = false;
|
||||
const {argv} = yargs
|
||||
.parserConfiguration({
|
||||
|
|
@ -122,7 +134,11 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
array: true,
|
||||
describe: 'Glob patterns to select what test files to run. Leave empty if you want AVA to run all test files instead. Add a colon and specify line numbers of specific tests to run',
|
||||
type: 'string'
|
||||
}))
|
||||
}), argv => {
|
||||
if (activeInspector) {
|
||||
debug.files = argv.pattern || [];
|
||||
}
|
||||
})
|
||||
.command(
|
||||
'debug [<pattern>...]',
|
||||
'Activate Node.js inspector and run a single test file',
|
||||
|
|
@ -148,6 +164,7 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
}),
|
||||
argv => {
|
||||
debug = {
|
||||
active: activeInspector,
|
||||
break: argv.break === true,
|
||||
files: argv.pattern,
|
||||
host: argv.host,
|
||||
|
|
@ -182,6 +199,10 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
const chalkOptions = {level: combined.color === false ? 0 : require('chalk').level};
|
||||
const chalk = require('./chalk').set(chalkOptions);
|
||||
|
||||
if (combined.updateSnapshots && combined.match) {
|
||||
exit('Snapshots cannot be updated when matching specific tests.');
|
||||
}
|
||||
|
||||
if (confError) {
|
||||
if (confError.parent) {
|
||||
exit(`${confError.message}\n\n${chalk.gray((confError.parent && confError.parent.stack) || confError.parent)}`);
|
||||
|
|
@ -259,11 +280,11 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
|
||||
const ciParallelVars = require('ci-parallel-vars');
|
||||
const Api = require('./api');
|
||||
const VerboseReporter = require('./reporters/verbose');
|
||||
const MiniReporter = require('./reporters/mini');
|
||||
const DefaultReporter = require('./reporters/default');
|
||||
const TapReporter = require('./reporters/tap');
|
||||
const Watcher = require('./watcher');
|
||||
const normalizeExtensions = require('./extensions');
|
||||
const normalizeModuleTypes = require('./module-types');
|
||||
const {normalizeGlobs, normalizePattern} = require('./globs');
|
||||
const normalizeNodeArguments = require('./node-arguments');
|
||||
const validateEnvironmentVariables = require('./environment-variables');
|
||||
|
|
@ -281,12 +302,6 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
|
||||
const {type: defaultModuleType = 'commonjs'} = pkg || {};
|
||||
|
||||
const moduleTypes = {
|
||||
cjs: 'commonjs',
|
||||
mjs: 'module',
|
||||
js: defaultModuleType
|
||||
};
|
||||
|
||||
const providers = [];
|
||||
if (Reflect.has(conf, 'babel')) {
|
||||
try {
|
||||
|
|
@ -328,6 +343,13 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
exit(error.message);
|
||||
}
|
||||
|
||||
let moduleTypes;
|
||||
try {
|
||||
moduleTypes = normalizeModuleTypes(conf.extensions, defaultModuleType, experiments);
|
||||
} catch (error) {
|
||||
exit(error.message);
|
||||
}
|
||||
|
||||
let globs;
|
||||
try {
|
||||
globs = normalizeGlobs({files: conf.files, ignoredByWatcher: conf.ignoredByWatcher, extensions, providers});
|
||||
|
|
@ -357,6 +379,9 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
pattern: normalizePattern(path.relative(projectDir, path.resolve(process.cwd(), pattern))),
|
||||
...rest
|
||||
}));
|
||||
if (combined.updateSnapshots && filter.some(condition => condition.lineNumbers !== null)) {
|
||||
exit('Snapshots cannot be updated when selecting specific tests by their line number.');
|
||||
}
|
||||
|
||||
const api = new Api({
|
||||
cacheEnabled: combined.cache !== false,
|
||||
|
|
@ -384,32 +409,37 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
workerArgv: argv['--']
|
||||
});
|
||||
|
||||
let reporter;
|
||||
if (combined.tap && !combined.watch && debug === null) {
|
||||
reporter = new TapReporter({
|
||||
projectDir,
|
||||
reportStream: process.stdout,
|
||||
stdStream: process.stderr
|
||||
});
|
||||
} else if (debug !== null || combined.verbose || isCi || !process.stdout.isTTY) {
|
||||
reporter = new VerboseReporter({
|
||||
projectDir,
|
||||
reportStream: process.stdout,
|
||||
stdStream: process.stderr,
|
||||
watching: combined.watch
|
||||
});
|
||||
} else {
|
||||
reporter = new MiniReporter({
|
||||
projectDir,
|
||||
reportStream: process.stdout,
|
||||
stdStream: process.stderr,
|
||||
watching: combined.watch
|
||||
});
|
||||
}
|
||||
const reporter = combined.tap && !combined.watch && debug === null ? new TapReporter({
|
||||
projectDir,
|
||||
reportStream: process.stdout,
|
||||
stdStream: process.stderr
|
||||
}) : new DefaultReporter({
|
||||
projectDir,
|
||||
reportStream: process.stdout,
|
||||
stdStream: process.stderr,
|
||||
watching: combined.watch,
|
||||
verbose: debug !== null || combined.verbose || isCi || !process.stdout.isTTY
|
||||
});
|
||||
|
||||
api.on('run', plan => {
|
||||
reporter.startRun(plan);
|
||||
|
||||
if (process.env.AVA_EMIT_RUN_STATUS_OVER_IPC === 'I\'ll find a payphone baby / Take some time to talk to you') {
|
||||
const {controlFlow} = require('./ipc-flow-control');
|
||||
const bufferedSend = controlFlow(process);
|
||||
|
||||
if (process.versions.node >= '12.16.0') {
|
||||
plan.status.on('stateChange', evt => {
|
||||
bufferedSend(evt);
|
||||
});
|
||||
} else {
|
||||
const v8 = require('v8');
|
||||
plan.status.on('stateChange', evt => {
|
||||
bufferedSend([...v8.serialize(evt)]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
plan.status.on('stateChange', evt => {
|
||||
if (evt.type === 'interrupt') {
|
||||
reporter.endRun();
|
||||
|
|
@ -431,14 +461,14 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
} else {
|
||||
let debugWithoutSpecificFile = false;
|
||||
api.on('run', plan => {
|
||||
if (plan.debug && plan.files.length !== 1) {
|
||||
if (debug !== null && plan.files.length !== 1) {
|
||||
debugWithoutSpecificFile = true;
|
||||
}
|
||||
});
|
||||
|
||||
const runStatus = await api.run({filter});
|
||||
|
||||
if (debugWithoutSpecificFile) {
|
||||
if (debugWithoutSpecificFile && !debug.active) {
|
||||
exit('Provide the path to the test file you wish to debug');
|
||||
return;
|
||||
}
|
||||
|
|
|
|||
2
node_modules/ava/lib/code-excerpt.js
generated
vendored
2
node_modules/ava/lib/code-excerpt.js
generated
vendored
|
|
@ -19,7 +19,7 @@ module.exports = (source, options = {}) => {
|
|||
let contents;
|
||||
try {
|
||||
contents = fs.readFileSync(file, 'utf8');
|
||||
} catch (_) {
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
|
|
|||
3
node_modules/ava/lib/concordance-options.js
generated
vendored
3
node_modules/ava/lib/concordance-options.js
generated
vendored
|
|
@ -1,5 +1,5 @@
|
|||
'use strict';
|
||||
const util = require('util');
|
||||
const util = require('util'); // eslint-disable-line unicorn/import-style
|
||||
const ansiStyles = require('ansi-styles');
|
||||
const stripAnsi = require('strip-ansi');
|
||||
const cloneDeepWith = require('lodash/cloneDeepWith');
|
||||
|
|
@ -135,5 +135,4 @@ exports.default = {
|
|||
theme
|
||||
};
|
||||
|
||||
exports.diff = {maxDepth: 1, plugins, theme};
|
||||
exports.snapshotManager = {plugins, theme: plainTheme};
|
||||
|
|
|
|||
5
node_modules/ava/lib/extensions.js
generated
vendored
5
node_modules/ava/lib/extensions.js
generated
vendored
|
|
@ -2,8 +2,11 @@ module.exports = (configuredExtensions, providers = []) => {
|
|||
// Combine all extensions possible for testing. Remove duplicate extensions.
|
||||
const duplicates = new Set();
|
||||
const seen = new Set();
|
||||
|
||||
const normalize = extensions => Array.isArray(extensions) ? extensions : Object.keys(extensions);
|
||||
|
||||
const combine = extensions => {
|
||||
for (const ext of extensions) {
|
||||
for (const ext of normalize(extensions)) {
|
||||
if (seen.has(ext)) {
|
||||
duplicates.add(ext);
|
||||
} else {
|
||||
|
|
|
|||
102
node_modules/ava/lib/fork.js
generated
vendored
102
node_modules/ava/lib/fork.js
generated
vendored
|
|
@ -3,6 +3,7 @@ const childProcess = require('child_process');
|
|||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const Emittery = require('emittery');
|
||||
const {controlFlow} = require('./ipc-flow-control');
|
||||
|
||||
if (fs.realpathSync(__filename) !== __filename) {
|
||||
console.warn('WARNING: `npm link ava` and the `--preserve-symlink` flag are incompatible. We have detected that AVA is linked via `npm link`, and that you are using either an early version of Node 6, or the `--preserve-symlink` flag. This breaks AVA. You should upgrade to Node 6.2.0+, avoid the `--preserve-symlink` flag, or avoid using `npm link ava`.');
|
||||
|
|
@ -11,10 +12,57 @@ if (fs.realpathSync(__filename) !== __filename) {
|
|||
// In case the test file imports a different AVA install,
|
||||
// the presence of this variable allows it to require this one instead
|
||||
const AVA_PATH = path.resolve(__dirname, '..');
|
||||
const WORKER_PATH = require.resolve('./worker/subprocess');
|
||||
|
||||
const workerPath = require.resolve('./worker/subprocess');
|
||||
class SharedWorkerChannel extends Emittery {
|
||||
constructor({channelId, filename, initialData}, sendToFork) {
|
||||
super();
|
||||
|
||||
this.id = channelId;
|
||||
this.filename = filename;
|
||||
this.initialData = initialData;
|
||||
this.sendToFork = sendToFork;
|
||||
}
|
||||
|
||||
signalReady() {
|
||||
this.sendToFork({
|
||||
type: 'shared-worker-ready',
|
||||
channelId: this.id
|
||||
});
|
||||
}
|
||||
|
||||
signalError() {
|
||||
this.sendToFork({
|
||||
type: 'shared-worker-error',
|
||||
channelId: this.id
|
||||
});
|
||||
}
|
||||
|
||||
emitMessage({messageId, replyTo, serializedData}) {
|
||||
this.emit('message', {
|
||||
messageId,
|
||||
replyTo,
|
||||
serializedData
|
||||
});
|
||||
}
|
||||
|
||||
forwardMessageToFork({messageId, replyTo, serializedData}) {
|
||||
this.sendToFork({
|
||||
type: 'shared-worker-message',
|
||||
channelId: this.id,
|
||||
messageId,
|
||||
replyTo,
|
||||
serializedData
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let forkCounter = 0;
|
||||
|
||||
module.exports = (file, options, execArgv = process.execArgv) => {
|
||||
const forkId = `fork/${++forkCounter}`;
|
||||
const sharedWorkerChannels = new Map();
|
||||
|
||||
let finished = false;
|
||||
|
||||
const emitter = new Emittery();
|
||||
|
|
@ -25,12 +73,13 @@ module.exports = (file, options, execArgv = process.execArgv) => {
|
|||
};
|
||||
|
||||
options = {
|
||||
file,
|
||||
baseDir: process.cwd(),
|
||||
file,
|
||||
forkId,
|
||||
...options
|
||||
};
|
||||
|
||||
const subprocess = childProcess.fork(workerPath, options.workerArgv, {
|
||||
const subprocess = childProcess.fork(WORKER_PATH, options.workerArgv, {
|
||||
cwd: options.projectDir,
|
||||
silent: true,
|
||||
env: {NODE_ENV: 'test', ...process.env, ...options.environmentVariables, AVA_PATH},
|
||||
|
|
@ -45,12 +94,12 @@ module.exports = (file, options, execArgv = process.execArgv) => {
|
|||
emitStateChange({type: 'worker-stderr', chunk});
|
||||
});
|
||||
|
||||
const bufferedSend = controlFlow(subprocess);
|
||||
|
||||
let forcedExit = false;
|
||||
const send = evt => {
|
||||
if (subprocess.connected && !finished && !forcedExit) {
|
||||
subprocess.send({ava: evt}, () => {
|
||||
// Disregard errors.
|
||||
});
|
||||
if (!finished && !forcedExit) {
|
||||
bufferedSend({ava: evt});
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -65,15 +114,25 @@ module.exports = (file, options, execArgv = process.execArgv) => {
|
|||
return;
|
||||
}
|
||||
|
||||
if (message.ava.type === 'ready-for-options') {
|
||||
send({type: 'options', options});
|
||||
return;
|
||||
}
|
||||
switch (message.ava.type) {
|
||||
case 'ready-for-options':
|
||||
send({type: 'options', options});
|
||||
break;
|
||||
case 'shared-worker-connect': {
|
||||
const channel = new SharedWorkerChannel(message.ava, send);
|
||||
sharedWorkerChannels.set(channel.id, channel);
|
||||
emitter.emit('connectSharedWorker', channel);
|
||||
break;
|
||||
}
|
||||
|
||||
if (message.ava.type === 'ping') {
|
||||
send({type: 'pong'});
|
||||
} else {
|
||||
emitStateChange(message.ava);
|
||||
case 'shared-worker-message':
|
||||
sharedWorkerChannels.get(message.ava.channelId).emitMessage(message.ava);
|
||||
break;
|
||||
case 'ping':
|
||||
send({type: 'pong'});
|
||||
break;
|
||||
default:
|
||||
emitStateChange(message.ava);
|
||||
}
|
||||
});
|
||||
|
||||
|
|
@ -98,6 +157,10 @@ module.exports = (file, options, execArgv = process.execArgv) => {
|
|||
});
|
||||
|
||||
return {
|
||||
file,
|
||||
forkId,
|
||||
promise,
|
||||
|
||||
exit() {
|
||||
forcedExit = true;
|
||||
subprocess.kill();
|
||||
|
|
@ -107,11 +170,12 @@ module.exports = (file, options, execArgv = process.execArgv) => {
|
|||
send({type: 'peer-failed'});
|
||||
},
|
||||
|
||||
onStateChange(listener) {
|
||||
return emitter.on('stateChange', listener);
|
||||
onConnectSharedWorker(listener) {
|
||||
return emitter.on('connectSharedWorker', listener);
|
||||
},
|
||||
|
||||
file,
|
||||
promise
|
||||
onStateChange(listener) {
|
||||
return emitter.on('stateChange', listener);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
|
|
|||
6
node_modules/ava/lib/globs.js
generated
vendored
6
node_modules/ava/lib/globs.js
generated
vendored
|
|
@ -82,11 +82,7 @@ function normalizeGlobs({extensions, files: filePatterns, ignoredByWatcher: igno
|
|||
filePatterns = defaultTestPatterns;
|
||||
}
|
||||
|
||||
if (ignoredByWatcherPatterns) {
|
||||
ignoredByWatcherPatterns = [...defaultIgnoredByWatcherPatterns, ...normalizePatterns(ignoredByWatcherPatterns)];
|
||||
} else {
|
||||
ignoredByWatcherPatterns = [...defaultIgnoredByWatcherPatterns];
|
||||
}
|
||||
ignoredByWatcherPatterns = ignoredByWatcherPatterns ? [...defaultIgnoredByWatcherPatterns, ...normalizePatterns(ignoredByWatcherPatterns)] : [...defaultIgnoredByWatcherPatterns];
|
||||
|
||||
for (const {level, main} of providers) {
|
||||
if (level >= providerManager.levels.pathRewrites) {
|
||||
|
|
|
|||
39
node_modules/ava/lib/ipc-flow-control.js
generated
vendored
Normal file
39
node_modules/ava/lib/ipc-flow-control.js
generated
vendored
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
function controlFlow(channel) {
|
||||
let errored = false;
|
||||
let deliverImmediately = true;
|
||||
|
||||
const backlog = [];
|
||||
const deliverNext = error => {
|
||||
if (error !== null) {
|
||||
errored = true;
|
||||
}
|
||||
|
||||
if (errored || !channel.connected) {
|
||||
backlog.length = 0; // Free memory.
|
||||
return; // We can't send.
|
||||
}
|
||||
|
||||
let ok = true;
|
||||
while (ok && backlog.length > 0) { // Stop sending after backpressure.
|
||||
ok = channel.send(backlog.shift(), deliverNext);
|
||||
}
|
||||
|
||||
// Re-enable immediate delivery if there is no backpressure and the backlog
|
||||
// has been cleared.
|
||||
deliverImmediately = ok && backlog.length === 0;
|
||||
};
|
||||
|
||||
return message => {
|
||||
if (errored || !channel.connected) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (deliverImmediately) {
|
||||
deliverImmediately = channel.send(message, deliverNext);
|
||||
} else {
|
||||
backlog.push(message);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
exports.controlFlow = controlFlow;
|
||||
37
node_modules/ava/lib/like-selector.js
generated
vendored
Normal file
37
node_modules/ava/lib/like-selector.js
generated
vendored
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
'use strict';
|
||||
function isLikeSelector(selector) {
|
||||
return selector !== null &&
|
||||
typeof selector === 'object' &&
|
||||
Reflect.getPrototypeOf(selector) === Object.prototype &&
|
||||
Reflect.ownKeys(selector).length > 0;
|
||||
}
|
||||
|
||||
exports.isLikeSelector = isLikeSelector;
|
||||
|
||||
const CIRCULAR_SELECTOR = new Error('Encountered a circular selector');
|
||||
exports.CIRCULAR_SELECTOR = CIRCULAR_SELECTOR;
|
||||
|
||||
function selectComparable(lhs, selector, circular = new Set()) {
|
||||
if (circular.has(selector)) {
|
||||
throw CIRCULAR_SELECTOR;
|
||||
}
|
||||
|
||||
circular.add(selector);
|
||||
|
||||
if (lhs === null || typeof lhs !== 'object') {
|
||||
return lhs;
|
||||
}
|
||||
|
||||
const comparable = {};
|
||||
for (const [key, rhs] of Object.entries(selector)) {
|
||||
if (isLikeSelector(rhs)) {
|
||||
comparable[key] = selectComparable(Reflect.get(lhs, key), rhs, circular);
|
||||
} else {
|
||||
comparable[key] = Reflect.get(lhs, key);
|
||||
}
|
||||
}
|
||||
|
||||
return comparable;
|
||||
}
|
||||
|
||||
exports.selectComparable = selectComparable;
|
||||
4
node_modules/ava/lib/line-numbers.js
generated
vendored
4
node_modules/ava/lib/line-numbers.js
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
'use strict';
|
||||
|
||||
const micromatch = require('micromatch');
|
||||
const picomatch = require('picomatch');
|
||||
const flatten = require('lodash/flatten');
|
||||
|
||||
const NUMBER_REGEX = /^\d+$/;
|
||||
|
|
@ -56,7 +56,7 @@ exports.splitPatternAndLineNumbers = splitPatternAndLineNumbers;
|
|||
function getApplicableLineNumbers(normalizedFilePath, filter) {
|
||||
return sortNumbersAscending(distinctArray(flatten(
|
||||
filter
|
||||
.filter(({pattern, lineNumbers}) => lineNumbers && micromatch.isMatch(normalizedFilePath, pattern))
|
||||
.filter(({pattern, lineNumbers}) => lineNumbers && picomatch.isMatch(normalizedFilePath, pattern))
|
||||
.map(({lineNumbers}) => lineNumbers)
|
||||
)));
|
||||
}
|
||||
|
|
|
|||
143
node_modules/ava/lib/load-config.js
generated
vendored
143
node_modules/ava/lib/load-config.js
generated
vendored
|
|
@ -1,27 +1,48 @@
|
|||
'use strict';
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const url = require('url');
|
||||
const vm = require('vm');
|
||||
const isPlainObject = require('is-plain-object');
|
||||
const {isPlainObject} = require('is-plain-object');
|
||||
const pkgConf = require('pkg-conf');
|
||||
|
||||
const NO_SUCH_FILE = Symbol('no ava.config.js file');
|
||||
const MISSING_DEFAULT_EXPORT = Symbol('missing default export');
|
||||
const EXPERIMENTS = new Set();
|
||||
const EXPERIMENTS = new Set([
|
||||
'configurableModuleFormat',
|
||||
'disableNullExpectations',
|
||||
'disableSnapshotsInHooks',
|
||||
'nextGenConfig',
|
||||
'reverseTeardowns',
|
||||
'sharedWorkers'
|
||||
]);
|
||||
|
||||
// *Very* rudimentary support for loading ava.config.js files containing an `export default` statement.
|
||||
const evaluateJsConfig = configFile => {
|
||||
const contents = fs.readFileSync(configFile, 'utf8');
|
||||
const script = new vm.Script(`'use strict';(()=>{let __export__;\n${contents.replace(/export default/g, '__export__ =')};return __export__;})()`, {
|
||||
const evaluateJsConfig = (contents, configFile) => {
|
||||
const script = new vm.Script(`'use strict';(()=>{let __export__;\n${contents.toString('utf8').replace(/export default/g, '__export__ =')};return __export__;})()`, {
|
||||
filename: configFile,
|
||||
lineOffset: -1
|
||||
});
|
||||
return {
|
||||
default: script.runInThisContext()
|
||||
};
|
||||
return script.runInThisContext();
|
||||
};
|
||||
|
||||
const loadJsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.config.js')}) => {
|
||||
const importConfig = async ({configFile, fileForErrorMessage}) => {
|
||||
let module;
|
||||
try {
|
||||
module = await import(url.pathToFileURL(configFile)); // eslint-disable-line node/no-unsupported-features/es-syntax
|
||||
} catch (error) {
|
||||
throw Object.assign(new Error(`Error loading ${fileForErrorMessage}: ${error.message}`), {parent: error});
|
||||
}
|
||||
|
||||
const {default: config = MISSING_DEFAULT_EXPORT} = module;
|
||||
if (config === MISSING_DEFAULT_EXPORT) {
|
||||
throw new Error(`${fileForErrorMessage} must have a default export`);
|
||||
}
|
||||
|
||||
return config;
|
||||
};
|
||||
|
||||
const loadJsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.config.js')}, useImport = false) => {
|
||||
if (!configFile.endsWith('.js')) {
|
||||
return null;
|
||||
}
|
||||
|
|
@ -30,7 +51,10 @@ const loadJsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.confi
|
|||
|
||||
let config;
|
||||
try {
|
||||
({default: config = MISSING_DEFAULT_EXPORT} = evaluateJsConfig(configFile));
|
||||
const contents = fs.readFileSync(configFile);
|
||||
config = useImport && contents.includes('nonSemVerExperiments') && contents.includes('nextGenConfig') ?
|
||||
importConfig({configFile, fileForErrorMessage}) :
|
||||
evaluateJsConfig(contents, configFile) || MISSING_DEFAULT_EXPORT;
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return null;
|
||||
|
|
@ -63,14 +87,17 @@ const loadCjsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.conf
|
|||
}
|
||||
};
|
||||
|
||||
const loadMjsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.config.mjs')}) => {
|
||||
const loadMjsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.config.mjs')}, experimentally = false) => {
|
||||
if (!configFile.endsWith('.mjs')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const fileForErrorMessage = path.relative(projectDir, configFile);
|
||||
try {
|
||||
fs.readFileSync(configFile);
|
||||
const contents = fs.readFileSync(configFile);
|
||||
if (experimentally && contents.includes('nonSemVerExperiments') && contents.includes('nextGenConfig')) {
|
||||
return {config: importConfig({configFile, fileForErrorMessage}), fileForErrorMessage};
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return null;
|
||||
|
|
@ -82,11 +109,7 @@ const loadMjsConfig = ({projectDir, configFile = path.join(projectDir, 'ava.conf
|
|||
throw new Error(`AVA cannot yet load ${fileForErrorMessage} files`);
|
||||
};
|
||||
|
||||
function loadConfig({configFile, resolveFrom = process.cwd(), defaults = {}} = {}) { // eslint-disable-line complexity
|
||||
let packageConf = pkgConf.sync('ava', {cwd: resolveFrom});
|
||||
const filepath = pkgConf.filepath(packageConf);
|
||||
const projectDir = filepath === null ? resolveFrom : path.dirname(filepath);
|
||||
|
||||
function resolveConfigFile(projectDir, configFile) {
|
||||
if (configFile) {
|
||||
configFile = path.resolve(configFile); // Relative to CWD
|
||||
if (path.basename(configFile) !== path.relative(projectDir, configFile)) {
|
||||
|
|
@ -98,6 +121,15 @@ function loadConfig({configFile, resolveFrom = process.cwd(), defaults = {}} = {
|
|||
}
|
||||
}
|
||||
|
||||
return configFile;
|
||||
}
|
||||
|
||||
function loadConfigSync({configFile, resolveFrom = process.cwd(), defaults = {}} = {}) {
|
||||
let packageConf = pkgConf.sync('ava', {cwd: resolveFrom});
|
||||
const filepath = pkgConf.filepath(packageConf);
|
||||
const projectDir = filepath === null ? resolveFrom : path.dirname(filepath);
|
||||
|
||||
configFile = resolveConfigFile(projectDir, configFile);
|
||||
const allowConflictWithPackageJson = Boolean(configFile);
|
||||
|
||||
let [{config: fileConf, fileForErrorMessage} = {config: NO_SUCH_FILE, fileForErrorMessage: undefined}, ...conflicting] = [
|
||||
|
|
@ -157,4 +189,79 @@ function loadConfig({configFile, resolveFrom = process.cwd(), defaults = {}} = {
|
|||
return config;
|
||||
}
|
||||
|
||||
module.exports = loadConfig;
|
||||
exports.loadConfigSync = loadConfigSync;
|
||||
|
||||
async function loadConfig({configFile, resolveFrom = process.cwd(), defaults = {}} = {}) {
|
||||
let packageConf = await pkgConf('ava', {cwd: resolveFrom});
|
||||
const filepath = pkgConf.filepath(packageConf);
|
||||
const projectDir = filepath === null ? resolveFrom : path.dirname(filepath);
|
||||
|
||||
configFile = resolveConfigFile(projectDir, configFile);
|
||||
const allowConflictWithPackageJson = Boolean(configFile);
|
||||
|
||||
// TODO: Refactor resolution logic to implement https://github.com/avajs/ava/issues/2285.
|
||||
let [{config: fileConf, fileForErrorMessage} = {config: NO_SUCH_FILE, fileForErrorMessage: undefined}, ...conflicting] = [
|
||||
loadJsConfig({projectDir, configFile}, true),
|
||||
loadCjsConfig({projectDir, configFile}),
|
||||
loadMjsConfig({projectDir, configFile}, true)
|
||||
].filter(result => result !== null);
|
||||
|
||||
if (conflicting.length > 0) {
|
||||
throw new Error(`Conflicting configuration in ${fileForErrorMessage} and ${conflicting.map(({fileForErrorMessage}) => fileForErrorMessage).join(' & ')}`);
|
||||
}
|
||||
|
||||
let sawPromise = false;
|
||||
if (fileConf !== NO_SUCH_FILE) {
|
||||
if (allowConflictWithPackageJson) {
|
||||
packageConf = {};
|
||||
} else if (Object.keys(packageConf).length > 0) {
|
||||
throw new Error(`Conflicting configuration in ${fileForErrorMessage} and package.json`);
|
||||
}
|
||||
|
||||
if (fileConf && typeof fileConf.then === 'function') { // eslint-disable-line promise/prefer-await-to-then
|
||||
sawPromise = true;
|
||||
fileConf = await fileConf;
|
||||
}
|
||||
|
||||
if (!isPlainObject(fileConf) && typeof fileConf !== 'function') {
|
||||
throw new TypeError(`${fileForErrorMessage} must export a plain object or factory function`);
|
||||
}
|
||||
|
||||
if (typeof fileConf === 'function') {
|
||||
fileConf = fileConf({projectDir});
|
||||
if (fileConf && typeof fileConf.then === 'function') { // eslint-disable-line promise/prefer-await-to-then
|
||||
sawPromise = true;
|
||||
fileConf = await fileConf;
|
||||
}
|
||||
|
||||
if (!isPlainObject(fileConf)) {
|
||||
throw new TypeError(`Factory method exported by ${fileForErrorMessage} must return a plain object`);
|
||||
}
|
||||
}
|
||||
|
||||
if ('ava' in fileConf) {
|
||||
throw new Error(`Encountered ’ava’ property in ${fileForErrorMessage}; avoid wrapping the configuration`);
|
||||
}
|
||||
}
|
||||
|
||||
const config = {...defaults, nonSemVerExperiments: {}, ...fileConf, ...packageConf, projectDir};
|
||||
|
||||
const {nonSemVerExperiments: experiments} = config;
|
||||
if (!isPlainObject(experiments)) {
|
||||
throw new Error(`nonSemVerExperiments from ${fileForErrorMessage} must be an object`);
|
||||
}
|
||||
|
||||
for (const key of Object.keys(experiments)) {
|
||||
if (!EXPERIMENTS.has(key)) {
|
||||
throw new Error(`nonSemVerExperiments.${key} from ${fileForErrorMessage} is not a supported experiment`);
|
||||
}
|
||||
}
|
||||
|
||||
if (sawPromise && experiments.nextGenConfig !== true) {
|
||||
throw new Error(`${fileForErrorMessage} exported a promise or an asynchronous factory function. You must enable the ’asyncConfigurationLoading’ experiment for this to work.`);
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
exports.loadConfig = loadConfig;
|
||||
|
|
|
|||
75
node_modules/ava/lib/module-types.js
generated
vendored
Normal file
75
node_modules/ava/lib/module-types.js
generated
vendored
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
const requireTrueValue = value => {
|
||||
if (value !== true) {
|
||||
throw new TypeError('When specifying module types, use `true` for ’cjs’, ’mjs’ and ’js’ extensions');
|
||||
}
|
||||
};
|
||||
|
||||
const normalize = (extension, type, defaultModuleType) => {
|
||||
switch (extension) {
|
||||
case 'cjs':
|
||||
requireTrueValue(type);
|
||||
return 'commonjs';
|
||||
case 'mjs':
|
||||
requireTrueValue(type);
|
||||
return 'module';
|
||||
case 'js':
|
||||
requireTrueValue(type);
|
||||
return defaultModuleType;
|
||||
default:
|
||||
if (type !== 'commonjs' && type !== 'module') {
|
||||
throw new TypeError(`Module type for ’${extension}’ must be ’commonjs’ or ’module’`);
|
||||
}
|
||||
|
||||
return type;
|
||||
}
|
||||
};
|
||||
|
||||
const deriveFromObject = (extensionsObject, defaultModuleType) => {
|
||||
const moduleTypes = {};
|
||||
for (const [extension, type] of Object.entries(extensionsObject)) {
|
||||
moduleTypes[extension] = normalize(extension, type, defaultModuleType);
|
||||
}
|
||||
|
||||
return moduleTypes;
|
||||
};
|
||||
|
||||
const deriveFromArray = (extensions, defaultModuleType) => {
|
||||
const moduleTypes = {};
|
||||
for (const extension of extensions) {
|
||||
switch (extension) {
|
||||
case 'cjs':
|
||||
moduleTypes.cjs = 'commonjs';
|
||||
break;
|
||||
case 'mjs':
|
||||
moduleTypes.mjs = 'module';
|
||||
break;
|
||||
case 'js':
|
||||
moduleTypes.js = defaultModuleType;
|
||||
break;
|
||||
default:
|
||||
moduleTypes[extension] = 'commonjs';
|
||||
}
|
||||
}
|
||||
|
||||
return moduleTypes;
|
||||
};
|
||||
|
||||
module.exports = (configuredExtensions, defaultModuleType, experiments) => {
|
||||
if (configuredExtensions === undefined) {
|
||||
return {
|
||||
cjs: 'commonjs',
|
||||
mjs: 'module',
|
||||
js: defaultModuleType
|
||||
};
|
||||
}
|
||||
|
||||
if (Array.isArray(configuredExtensions)) {
|
||||
return deriveFromArray(configuredExtensions, defaultModuleType);
|
||||
}
|
||||
|
||||
if (!experiments.configurableModuleFormat) {
|
||||
throw new Error('You must enable the `configurableModuleFormat` experiment in order to specify module types');
|
||||
}
|
||||
|
||||
return deriveFromObject(configuredExtensions, defaultModuleType);
|
||||
};
|
||||
252
node_modules/ava/lib/plugin-support/shared-worker-loader.js
generated
vendored
Normal file
252
node_modules/ava/lib/plugin-support/shared-worker-loader.js
generated
vendored
Normal file
|
|
@ -0,0 +1,252 @@
|
|||
const {EventEmitter, on} = require('events');
|
||||
const v8 = require('v8');
|
||||
const {workerData, parentPort} = require('worker_threads');
|
||||
const pkg = require('../../package.json');
|
||||
|
||||
// Used to forward messages received over the `parentPort`. Every subscription
|
||||
// adds a listener, so do not enforce any maximums.
|
||||
const events = new EventEmitter().setMaxListeners(0);
|
||||
|
||||
// Map of active test workers, used in receiveMessages() to get a reference to
|
||||
// the TestWorker instance, and relevant release functions.
|
||||
const activeTestWorkers = new Map();
|
||||
|
||||
class TestWorker {
|
||||
constructor(id, file) {
|
||||
this.id = id;
|
||||
this.file = file;
|
||||
}
|
||||
|
||||
teardown(fn) {
|
||||
let done = false;
|
||||
const teardownFn = async () => {
|
||||
if (done) {
|
||||
return;
|
||||
}
|
||||
|
||||
done = true;
|
||||
if (activeTestWorkers.has(this.id)) {
|
||||
activeTestWorkers.get(this.id).teardownFns.delete(teardownFn);
|
||||
}
|
||||
|
||||
await fn();
|
||||
};
|
||||
|
||||
activeTestWorkers.get(this.id).teardownFns.add(teardownFn);
|
||||
|
||||
return teardownFn;
|
||||
}
|
||||
|
||||
publish(data) {
|
||||
return publishMessage(this, data);
|
||||
}
|
||||
|
||||
async * subscribe() {
|
||||
yield * receiveMessages(this);
|
||||
}
|
||||
}
|
||||
|
||||
class ReceivedMessage {
|
||||
constructor(testWorker, id, serializedData) {
|
||||
this.testWorker = testWorker;
|
||||
this.id = id;
|
||||
this.data = v8.deserialize(new Uint8Array(serializedData));
|
||||
}
|
||||
|
||||
reply(data) {
|
||||
return publishMessage(this.testWorker, data, this.id);
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure that, no matter how often it's received, we have a stable message
|
||||
// object.
|
||||
const messageCache = new WeakMap();
|
||||
|
||||
async function * receiveMessages(fromTestWorker, replyTo) {
|
||||
for await (const [message] of on(events, 'message')) {
|
||||
if (fromTestWorker !== undefined) {
|
||||
if (message.type === 'deregister-test-worker' && message.id === fromTestWorker.id) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.type === 'message' && message.testWorkerId !== fromTestWorker.id) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (message.type !== 'message') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (replyTo === undefined && message.replyTo !== undefined) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (replyTo !== undefined && message.replyTo !== replyTo) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const active = activeTestWorkers.get(message.testWorkerId);
|
||||
// It is possible for a message to have been buffering for so long — perhaps
|
||||
// due to the caller waiting before iterating to the next message — that the
|
||||
// test worker has been deregistered. Ignore such messages.
|
||||
//
|
||||
// (This is really hard to write a test for, however!)
|
||||
if (active === undefined) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let received = messageCache.get(message);
|
||||
if (received === undefined) {
|
||||
received = new ReceivedMessage(active.instance, message.messageId, message.serializedData);
|
||||
messageCache.set(message, received);
|
||||
}
|
||||
|
||||
yield received;
|
||||
}
|
||||
}
|
||||
|
||||
let messageCounter = 0;
|
||||
const messageIdPrefix = `${workerData.id}/message`;
|
||||
const nextMessageId = () => `${messageIdPrefix}/${++messageCounter}`;
|
||||
|
||||
function publishMessage(testWorker, data, replyTo) {
|
||||
const id = nextMessageId();
|
||||
parentPort.postMessage({
|
||||
type: 'message',
|
||||
messageId: id,
|
||||
testWorkerId: testWorker.id,
|
||||
serializedData: [...v8.serialize(data)],
|
||||
replyTo
|
||||
});
|
||||
|
||||
return {
|
||||
id,
|
||||
async * replies() {
|
||||
yield * receiveMessages(testWorker, id);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function broadcastMessage(data) {
|
||||
const id = nextMessageId();
|
||||
parentPort.postMessage({
|
||||
type: 'broadcast',
|
||||
messageId: id,
|
||||
serializedData: [...v8.serialize(data)]
|
||||
});
|
||||
|
||||
return {
|
||||
id,
|
||||
async * replies() {
|
||||
yield * receiveMessages(undefined, id);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function loadFactory() {
|
||||
try {
|
||||
const mod = require(workerData.filename);
|
||||
if (typeof mod === 'function') {
|
||||
return mod;
|
||||
}
|
||||
|
||||
return mod.default;
|
||||
} catch (error) {
|
||||
if (error && (error.code === 'ERR_REQUIRE_ESM' || (error.code === 'MODULE_NOT_FOUND' && workerData.filename.startsWith('file://')))) {
|
||||
const {default: factory} = await import(workerData.filename); // eslint-disable-line node/no-unsupported-features/es-syntax
|
||||
return factory;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
let signalAvailable = () => {
|
||||
parentPort.postMessage({type: 'available'});
|
||||
signalAvailable = () => {};
|
||||
};
|
||||
|
||||
let fatal;
|
||||
loadFactory(workerData.filename).then(factory => {
|
||||
if (typeof factory !== 'function') {
|
||||
throw new TypeError(`Missing default factory function export for shared worker plugin at ${workerData.filename}`);
|
||||
}
|
||||
|
||||
factory({
|
||||
negotiateProtocol(supported) {
|
||||
if (!supported.includes('experimental')) {
|
||||
fatal = new Error(`This version of AVA (${pkg.version}) is not compatible with shared worker plugin at ${workerData.filename}`);
|
||||
throw fatal;
|
||||
}
|
||||
|
||||
const produceTestWorker = instance => events.emit('testWorker', instance);
|
||||
|
||||
parentPort.on('message', async message => {
|
||||
if (message.type === 'register-test-worker') {
|
||||
const {id, file} = message;
|
||||
const instance = new TestWorker(id, file);
|
||||
|
||||
activeTestWorkers.set(id, {instance, teardownFns: new Set()});
|
||||
|
||||
produceTestWorker(instance);
|
||||
}
|
||||
|
||||
if (message.type === 'deregister-test-worker') {
|
||||
const {id} = message;
|
||||
const {teardownFns} = activeTestWorkers.get(id);
|
||||
activeTestWorkers.delete(id);
|
||||
|
||||
// Run possibly asynchronous release functions serially, in reverse
|
||||
// order. Any error will crash the worker.
|
||||
for await (const fn of [...teardownFns].reverse()) {
|
||||
await fn();
|
||||
}
|
||||
|
||||
parentPort.postMessage({
|
||||
type: 'deregistered-test-worker',
|
||||
id
|
||||
});
|
||||
}
|
||||
|
||||
// Wait for a turn of the event loop, to allow new subscriptions to be
|
||||
// set up in response to the previous message.
|
||||
setImmediate(() => events.emit('message', message));
|
||||
});
|
||||
|
||||
return {
|
||||
initialData: workerData.initialData,
|
||||
protocol: 'experimental',
|
||||
|
||||
ready() {
|
||||
signalAvailable();
|
||||
return this;
|
||||
},
|
||||
|
||||
broadcast(data) {
|
||||
return broadcastMessage(data);
|
||||
},
|
||||
|
||||
async * subscribe() {
|
||||
yield * receiveMessages();
|
||||
},
|
||||
|
||||
async * testWorkers() {
|
||||
for await (const [worker] of on(events, 'testWorker')) {
|
||||
yield worker;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
}).catch(error => {
|
||||
if (fatal === undefined) {
|
||||
fatal = error;
|
||||
}
|
||||
}).finally(() => {
|
||||
if (fatal !== undefined) {
|
||||
process.nextTick(() => {
|
||||
throw fatal;
|
||||
});
|
||||
}
|
||||
});
|
||||
140
node_modules/ava/lib/plugin-support/shared-workers.js
generated
vendored
Normal file
140
node_modules/ava/lib/plugin-support/shared-workers.js
generated
vendored
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
const events = require('events');
|
||||
const serializeError = require('../serialize-error');
|
||||
|
||||
let Worker;
|
||||
try {
|
||||
({Worker} = require('worker_threads'));
|
||||
} catch {}
|
||||
|
||||
const LOADER = require.resolve('./shared-worker-loader');
|
||||
|
||||
let sharedWorkerCounter = 0;
|
||||
const launchedWorkers = new Map();
|
||||
|
||||
const waitForAvailable = async worker => {
|
||||
for await (const [message] of events.on(worker, 'message')) {
|
||||
if (message.type === 'available') {
|
||||
return;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function launchWorker({filename, initialData}) {
|
||||
if (launchedWorkers.has(filename)) {
|
||||
return launchedWorkers.get(filename);
|
||||
}
|
||||
|
||||
const id = `shared-worker/${++sharedWorkerCounter}`;
|
||||
const worker = new Worker(LOADER, {
|
||||
// Ensure the worker crashes for unhandled rejections, rather than allowing undefined behavior.
|
||||
execArgv: ['--unhandled-rejections=strict'],
|
||||
workerData: {
|
||||
filename,
|
||||
id,
|
||||
initialData
|
||||
}
|
||||
});
|
||||
worker.setMaxListeners(0);
|
||||
|
||||
const launched = {
|
||||
statePromises: {
|
||||
available: waitForAvailable(worker),
|
||||
error: events.once(worker, 'error').then(([error]) => error) // eslint-disable-line promise/prefer-await-to-then
|
||||
},
|
||||
exited: false,
|
||||
worker
|
||||
};
|
||||
|
||||
launchedWorkers.set(filename, launched);
|
||||
worker.once('exit', () => {
|
||||
launched.exited = true;
|
||||
});
|
||||
|
||||
return launched;
|
||||
}
|
||||
|
||||
async function observeWorkerProcess(fork, runStatus) {
|
||||
let registrationCount = 0;
|
||||
let signalDeregistered;
|
||||
const deregistered = new Promise(resolve => {
|
||||
signalDeregistered = resolve;
|
||||
});
|
||||
|
||||
fork.promise.finally(() => {
|
||||
if (registrationCount === 0) {
|
||||
signalDeregistered();
|
||||
}
|
||||
});
|
||||
|
||||
fork.onConnectSharedWorker(async channel => {
|
||||
const launched = launchWorker(channel);
|
||||
|
||||
const handleChannelMessage = ({messageId, replyTo, serializedData}) => {
|
||||
launched.worker.postMessage({
|
||||
type: 'message',
|
||||
testWorkerId: fork.forkId,
|
||||
messageId,
|
||||
replyTo,
|
||||
serializedData
|
||||
});
|
||||
};
|
||||
|
||||
const handleWorkerMessage = async message => {
|
||||
if (message.type === 'broadcast' || (message.type === 'message' && message.testWorkerId === fork.forkId)) {
|
||||
const {messageId, replyTo, serializedData} = message;
|
||||
channel.forwardMessageToFork({messageId, replyTo, serializedData});
|
||||
}
|
||||
|
||||
if (message.type === 'deregistered-test-worker' && message.id === fork.forkId) {
|
||||
launched.worker.off('message', handleWorkerMessage);
|
||||
|
||||
registrationCount--;
|
||||
if (registrationCount === 0) {
|
||||
signalDeregistered();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
launched.statePromises.error.then(error => { // eslint-disable-line promise/prefer-await-to-then
|
||||
signalDeregistered();
|
||||
launched.worker.off('message', handleWorkerMessage);
|
||||
runStatus.emitStateChange({type: 'shared-worker-error', err: serializeError('Shared worker error', true, error)});
|
||||
channel.signalError();
|
||||
});
|
||||
|
||||
try {
|
||||
await launched.statePromises.available;
|
||||
|
||||
registrationCount++;
|
||||
launched.worker.postMessage({
|
||||
type: 'register-test-worker',
|
||||
id: fork.forkId,
|
||||
file: fork.file
|
||||
});
|
||||
|
||||
fork.promise.finally(() => {
|
||||
launched.worker.postMessage({
|
||||
type: 'deregister-test-worker',
|
||||
id: fork.forkId
|
||||
});
|
||||
|
||||
channel.off('message', handleChannelMessage);
|
||||
});
|
||||
|
||||
launched.worker.on('message', handleWorkerMessage);
|
||||
channel.on('message', handleChannelMessage);
|
||||
channel.signalReady();
|
||||
} catch {
|
||||
return;
|
||||
} finally {
|
||||
// Attaching listeners has the side-effect of referencing the worker.
|
||||
// Explicitly unreference it now so it does not prevent the main process
|
||||
// from exiting.
|
||||
launched.worker.unref();
|
||||
}
|
||||
});
|
||||
|
||||
return deregistered;
|
||||
}
|
||||
|
||||
exports.observeWorkerProcess = observeWorkerProcess;
|
||||
2
node_modules/ava/lib/provider-manager.js
generated
vendored
2
node_modules/ava/lib/provider-manager.js
generated
vendored
|
|
@ -21,7 +21,7 @@ function load(providerModule, projectDir) {
|
|||
let level;
|
||||
const provider = makeProvider({
|
||||
negotiateProtocol(identifiers, {version}) {
|
||||
const [identifier] = identifiers.filter(identifier => Reflect.has(levelsByProtocol, identifier));
|
||||
const identifier = identifiers.find(identifier => Reflect.has(levelsByProtocol, identifier));
|
||||
|
||||
if (identifier === undefined) {
|
||||
fatal = new Error(`This version of AVA (${ava.version}) is not compatible with ${providerModule}@${version}`);
|
||||
|
|
|
|||
920
node_modules/ava/lib/reporters/default.js
generated
vendored
Normal file
920
node_modules/ava/lib/reporters/default.js
generated
vendored
Normal file
|
|
@ -0,0 +1,920 @@
|
|||
'use strict';
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const stream = require('stream');
|
||||
|
||||
const cliCursor = require('cli-cursor');
|
||||
const figures = require('figures');
|
||||
const indentString = require('indent-string');
|
||||
const ora = require('ora');
|
||||
const plur = require('plur');
|
||||
const prettyMs = require('pretty-ms');
|
||||
const trimOffNewlines = require('trim-off-newlines');
|
||||
|
||||
const chalk = require('../chalk').get();
|
||||
const codeExcerpt = require('../code-excerpt');
|
||||
const beautifyStack = require('./beautify-stack');
|
||||
const colors = require('./colors');
|
||||
const formatSerializedError = require('./format-serialized-error');
|
||||
const improperUsageMessages = require('./improper-usage-messages');
|
||||
const prefixTitle = require('./prefix-title');
|
||||
|
||||
const nodeInternals = require('stack-utils').nodeInternals();
|
||||
|
||||
class LineWriter extends stream.Writable {
|
||||
constructor(dest) {
|
||||
super();
|
||||
|
||||
this.dest = dest;
|
||||
this.columns = dest.columns || 80;
|
||||
this.lastLineIsEmpty = false;
|
||||
}
|
||||
|
||||
_write(chunk, _, callback) {
|
||||
this.dest.write(chunk);
|
||||
callback();
|
||||
}
|
||||
|
||||
writeLine(string) {
|
||||
if (string) {
|
||||
this.write(indentString(string, 2) + os.EOL);
|
||||
this.lastLineIsEmpty = false;
|
||||
} else {
|
||||
this.write(os.EOL);
|
||||
this.lastLineIsEmpty = true;
|
||||
}
|
||||
}
|
||||
|
||||
ensureEmptyLine() {
|
||||
if (!this.lastLineIsEmpty) {
|
||||
this.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class LineWriterWithSpinner extends LineWriter {
|
||||
constructor(dest, spinner) {
|
||||
super(dest);
|
||||
|
||||
this.lastSpinnerText = '';
|
||||
this.spinner = spinner;
|
||||
}
|
||||
|
||||
_write(chunk, _, callback) {
|
||||
this.spinner.clear();
|
||||
this._writeWithSpinner(chunk.toString('utf8'));
|
||||
|
||||
callback();
|
||||
}
|
||||
|
||||
_writev(pieces, callback) {
|
||||
// Discard the current spinner output. Any lines that were meant to be
|
||||
// preserved should be rewritten.
|
||||
this.spinner.clear();
|
||||
|
||||
const last = pieces.pop();
|
||||
for (const piece of pieces) {
|
||||
this.dest.write(piece.chunk);
|
||||
}
|
||||
|
||||
this._writeWithSpinner(last.chunk.toString('utf8'));
|
||||
callback();
|
||||
}
|
||||
|
||||
_writeWithSpinner(string) {
|
||||
if (!this.spinner.isSpinning) {
|
||||
this.dest.write(string);
|
||||
return;
|
||||
}
|
||||
|
||||
this.lastSpinnerText = string;
|
||||
// Ignore whitespace at the end of the chunk. We're continiously rewriting
|
||||
// the last line through the spinner. Also be careful to remove the indent
|
||||
// as the spinner adds its own.
|
||||
this.spinner.text = string.trimEnd().slice(2);
|
||||
this.spinner.render();
|
||||
}
|
||||
}
|
||||
|
||||
function manageCorking(stream) {
|
||||
let corked = false;
|
||||
const cork = () => {
|
||||
corked = true;
|
||||
stream.cork();
|
||||
};
|
||||
|
||||
const uncork = () => {
|
||||
corked = false;
|
||||
stream.uncork();
|
||||
};
|
||||
|
||||
return {
|
||||
decorateFlushingWriter(fn) {
|
||||
return function (...args) {
|
||||
if (corked) {
|
||||
stream.uncork();
|
||||
}
|
||||
|
||||
try {
|
||||
return fn.apply(this, args);
|
||||
} finally {
|
||||
if (corked) {
|
||||
stream.cork();
|
||||
}
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
decorateWriter(fn) {
|
||||
return function (...args) {
|
||||
cork();
|
||||
try {
|
||||
return fn.apply(this, args);
|
||||
} finally {
|
||||
uncork();
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
class Reporter {
|
||||
constructor({
|
||||
verbose,
|
||||
reportStream,
|
||||
stdStream,
|
||||
projectDir,
|
||||
watching,
|
||||
spinner,
|
||||
durationThreshold
|
||||
}) {
|
||||
this.verbose = verbose;
|
||||
this.reportStream = reportStream;
|
||||
this.stdStream = stdStream;
|
||||
this.watching = watching;
|
||||
this.relativeFile = file => path.relative(projectDir, file);
|
||||
|
||||
const {decorateWriter, decorateFlushingWriter} = manageCorking(this.reportStream);
|
||||
this.consumeStateChange = decorateWriter(this.consumeStateChange);
|
||||
this.endRun = decorateWriter(this.endRun);
|
||||
|
||||
if (this.verbose) {
|
||||
this.durationThreshold = durationThreshold || 100;
|
||||
this.spinner = null;
|
||||
this.clearSpinner = () => {};
|
||||
this.lineWriter = new LineWriter(this.reportStream);
|
||||
} else {
|
||||
this.spinner = ora({
|
||||
isEnabled: true,
|
||||
color: spinner ? spinner.color : 'gray',
|
||||
discardStdin: !watching,
|
||||
hideCursor: false,
|
||||
spinner: spinner || (process.platform === 'win32' ? 'line' : 'dots'),
|
||||
stream: reportStream
|
||||
});
|
||||
this.clearSpinner = decorateFlushingWriter(this.spinner.clear.bind(this.spinner));
|
||||
this.lineWriter = new LineWriterWithSpinner(this.reportStream, this.spinner);
|
||||
}
|
||||
|
||||
this.reset();
|
||||
}
|
||||
|
||||
reset() {
|
||||
if (this.removePreviousListener) {
|
||||
this.removePreviousListener();
|
||||
}
|
||||
|
||||
this.prefixTitle = (testFile, title) => title;
|
||||
|
||||
this.runningTestFiles = new Map();
|
||||
this.filesWithMissingAvaImports = new Set();
|
||||
this.filesWithoutDeclaredTests = new Set();
|
||||
this.filesWithoutMatchedLineNumbers = new Set();
|
||||
|
||||
this.failures = [];
|
||||
this.internalErrors = [];
|
||||
this.knownFailures = [];
|
||||
this.lineNumberErrors = [];
|
||||
this.sharedWorkerErrors = [];
|
||||
this.uncaughtExceptions = [];
|
||||
this.unhandledRejections = [];
|
||||
this.unsavedSnapshots = [];
|
||||
|
||||
this.previousFailures = 0;
|
||||
|
||||
this.failFastEnabled = false;
|
||||
this.lastLineIsEmpty = false;
|
||||
this.matching = false;
|
||||
|
||||
this.removePreviousListener = null;
|
||||
this.stats = null;
|
||||
}
|
||||
|
||||
startRun(plan) {
|
||||
if (plan.bailWithoutReporting) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.reset();
|
||||
|
||||
this.failFastEnabled = plan.failFastEnabled;
|
||||
this.matching = plan.matching;
|
||||
this.previousFailures = plan.previousFailures;
|
||||
this.emptyParallelRun = plan.status.emptyParallelRun;
|
||||
|
||||
if (this.watching || plan.files.length > 1) {
|
||||
this.prefixTitle = (testFile, title) => prefixTitle(plan.filePathPrefix, testFile, title);
|
||||
}
|
||||
|
||||
this.removePreviousListener = plan.status.on('stateChange', evt => {
|
||||
this.consumeStateChange(evt);
|
||||
});
|
||||
|
||||
if (this.watching && plan.runVector > 1) {
|
||||
this.lineWriter.write(chalk.gray.dim('\u2500'.repeat(this.lineWriter.columns)) + os.EOL);
|
||||
}
|
||||
|
||||
if (this.spinner === null) {
|
||||
this.lineWriter.writeLine();
|
||||
} else {
|
||||
cliCursor.hide(this.reportStream);
|
||||
this.lineWriter.writeLine();
|
||||
this.spinner.start();
|
||||
}
|
||||
}
|
||||
|
||||
consumeStateChange(event) { // eslint-disable-line complexity
|
||||
const fileStats = this.stats && event.testFile ? this.stats.byFile.get(event.testFile) : null;
|
||||
|
||||
switch (event.type) { // eslint-disable-line default-case
|
||||
case 'hook-failed': {
|
||||
this.failures.push(event);
|
||||
this.writeTestSummary(event);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'stats': {
|
||||
this.stats = event.stats;
|
||||
break;
|
||||
}
|
||||
|
||||
case 'test-failed': {
|
||||
this.failures.push(event);
|
||||
this.writeTestSummary(event);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'test-passed': {
|
||||
if (event.knownFailing) {
|
||||
this.knownFailures.push(event);
|
||||
}
|
||||
|
||||
this.writeTestSummary(event);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'timeout': {
|
||||
this.lineWriter.writeLine(colors.error(`\n${figures.cross} Timed out while running tests`));
|
||||
this.lineWriter.writeLine('');
|
||||
this.writePendingTests(event);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'interrupt': {
|
||||
this.lineWriter.writeLine(colors.error(`\n${figures.cross} Exiting due to SIGINT`));
|
||||
this.lineWriter.writeLine('');
|
||||
this.writePendingTests(event);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'internal-error': {
|
||||
this.internalErrors.push(event);
|
||||
|
||||
if (event.testFile) {
|
||||
this.write(colors.error(`${figures.cross} Internal error when running ${this.relativeFile(event.testFile)}`));
|
||||
} else {
|
||||
this.write(colors.error(`${figures.cross} Internal error`));
|
||||
}
|
||||
|
||||
if (this.verbose) {
|
||||
this.lineWriter.writeLine(colors.stack(event.err.summary));
|
||||
this.lineWriter.writeLine(colors.errorStack(event.err.stack));
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'line-number-selection-error': {
|
||||
this.lineNumberErrors.push(event);
|
||||
|
||||
this.write(colors.information(`${figures.warning} Could not parse ${this.relativeFile(event.testFile)} for line number selection`));
|
||||
break;
|
||||
}
|
||||
|
||||
case 'missing-ava-import': {
|
||||
this.filesWithMissingAvaImports.add(event.testFile);
|
||||
|
||||
this.write(colors.error(`${figures.cross} No tests found in ${this.relativeFile(event.testFile)}, make sure to import "ava" at the top of your test file`));
|
||||
break;
|
||||
}
|
||||
|
||||
case 'hook-finished': {
|
||||
if (this.verbose && event.logs.length > 0) {
|
||||
this.lineWriter.writeLine(` ${this.prefixTitle(event.testFile, event.title)}`);
|
||||
this.writeLogs(event);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'selected-test': {
|
||||
if (this.verbose) {
|
||||
if (event.skip) {
|
||||
this.lineWriter.writeLine(colors.skip(`- ${this.prefixTitle(event.testFile, event.title)}`));
|
||||
} else if (event.todo) {
|
||||
this.lineWriter.writeLine(colors.todo(`- ${this.prefixTitle(event.testFile, event.title)}`));
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'shared-worker-error': {
|
||||
this.sharedWorkerErrors.push(event);
|
||||
|
||||
if (this.verbose) {
|
||||
this.lineWriter.ensureEmptyLine();
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Error in shared worker`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(event);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'snapshot-error':
|
||||
this.unsavedSnapshots.push(event);
|
||||
break;
|
||||
|
||||
case 'uncaught-exception': {
|
||||
this.uncaughtExceptions.push(event);
|
||||
|
||||
if (this.verbose) {
|
||||
this.lineWriter.ensureEmptyLine();
|
||||
this.lineWriter.writeLine(colors.title(`Uncaught exception in ${this.relativeFile(event.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(event);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'unhandled-rejection': {
|
||||
this.unhandledRejections.push(event);
|
||||
|
||||
if (this.verbose) {
|
||||
this.lineWriter.ensureEmptyLine();
|
||||
this.lineWriter.writeLine(colors.title(`Unhandled rejection in ${this.relativeFile(event.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(event);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'worker-failed': {
|
||||
if (fileStats.declaredTests === 0) {
|
||||
this.filesWithoutDeclaredTests.add(event.testFile);
|
||||
}
|
||||
|
||||
if (this.verbose && !this.filesWithMissingAvaImports.has(event.testFile)) {
|
||||
if (event.nonZeroExitCode) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} ${this.relativeFile(event.testFile)} exited with a non-zero exit code: ${event.nonZeroExitCode}`));
|
||||
} else {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} ${this.relativeFile(event.testFile)} exited due to ${event.signal}`));
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'worker-finished': {
|
||||
if (!event.forcedExit && !this.filesWithMissingAvaImports.has(event.testFile)) {
|
||||
if (fileStats.declaredTests === 0) {
|
||||
this.filesWithoutDeclaredTests.add(event.testFile);
|
||||
|
||||
this.write(colors.error(`${figures.cross} No tests found in ${this.relativeFile(event.testFile)}`));
|
||||
} else if (fileStats.selectingLines && fileStats.selectedTests === 0) {
|
||||
this.filesWithoutMatchedLineNumbers.add(event.testFile);
|
||||
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Line numbers for ${this.relativeFile(event.testFile)} did not match any tests`));
|
||||
} else if (this.verbose && !this.failFastEnabled && fileStats.remainingTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} ${fileStats.remainingTests} ${plur('test', fileStats.remainingTests)} remaining in ${this.relativeFile(event.testFile)}`));
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'worker-stderr': {
|
||||
// Forcibly clear the spinner, writing the chunk corrupts the TTY.
|
||||
this.clearSpinner();
|
||||
|
||||
this.stdStream.write(event.chunk);
|
||||
// If the chunk does not end with a linebreak, *forcibly* write one to
|
||||
// ensure it remains visible in the TTY.
|
||||
// Tests cannot assume their standard output is not interrupted. Indeed
|
||||
// we multiplex stdout and stderr into a single stream. However as
|
||||
// long as stdStream is different from reportStream users can read
|
||||
// their original output by redirecting the streams.
|
||||
if (event.chunk[event.chunk.length - 1] !== 0x0A) {
|
||||
this.reportStream.write(os.EOL);
|
||||
}
|
||||
|
||||
if (this.spinner !== null) {
|
||||
this.lineWriter.write(this.lineWriter.lastSpinnerText);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 'worker-stdout': {
|
||||
// Forcibly clear the spinner, writing the chunk corrupts the TTY.
|
||||
this.clearSpinner();
|
||||
|
||||
this.stdStream.write(event.chunk);
|
||||
// If the chunk does not end with a linebreak, *forcibly* write one to
|
||||
// ensure it remains visible in the TTY.
|
||||
// Tests cannot assume their standard output is not interrupted. Indeed
|
||||
// we multiplex stdout and stderr into a single stream. However as
|
||||
// long as stdStream is different from reportStream users can read
|
||||
// their original output by redirecting the streams.
|
||||
if (event.chunk[event.chunk.length - 1] !== 0x0A) {
|
||||
this.reportStream.write(os.EOL);
|
||||
}
|
||||
|
||||
if (this.spinner !== null) {
|
||||
this.lineWriter.write(this.lineWriter.lastSpinnerText);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writePendingTests(evt) {
|
||||
for (const [file, testsInFile] of evt.pendingTests) {
|
||||
if (testsInFile.size === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(`${testsInFile.size} tests were pending in ${this.relativeFile(file)}\n`);
|
||||
for (const title of testsInFile) {
|
||||
this.lineWriter.writeLine(`${figures.circleDotted} ${this.prefixTitle(file, title)}`);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine('');
|
||||
}
|
||||
}
|
||||
|
||||
write(string) {
|
||||
if (this.verbose) {
|
||||
this.lineWriter.writeLine(string);
|
||||
} else {
|
||||
this.writeWithCounts(string);
|
||||
}
|
||||
}
|
||||
|
||||
writeWithCounts(string) {
|
||||
if (!this.stats) {
|
||||
return this.lineWriter.writeLine(string);
|
||||
}
|
||||
|
||||
string = string || '';
|
||||
if (string !== '') {
|
||||
string += os.EOL;
|
||||
}
|
||||
|
||||
let firstLinePostfix = this.watching ? ' ' + chalk.gray.dim('[' + new Date().toLocaleTimeString('en-US', {hour12: false}) + ']') : '';
|
||||
|
||||
if (this.stats.passedTests > 0) {
|
||||
string += os.EOL + colors.pass(`${this.stats.passedTests} passed`) + firstLinePostfix;
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
string += os.EOL + colors.error(`${this.stats.passedKnownFailingTests} ${plur('known failure', this.stats.passedKnownFailingTests)}`);
|
||||
}
|
||||
|
||||
if (this.stats.failedHooks > 0) {
|
||||
string += os.EOL + colors.error(`${this.stats.failedHooks} ${plur('hook', this.stats.failedHooks)} failed`) + firstLinePostfix;
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedTests > 0) {
|
||||
string += os.EOL + colors.error(`${this.stats.failedTests} ${plur('test', this.stats.failedTests)} failed`) + firstLinePostfix;
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.skippedTests > 0) {
|
||||
string += os.EOL + colors.skip(`${this.stats.skippedTests} skipped`);
|
||||
}
|
||||
|
||||
if (this.stats.todoTests > 0) {
|
||||
string += os.EOL + colors.todo(`${this.stats.todoTests} todo`);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(string);
|
||||
}
|
||||
|
||||
writeErr(event) {
|
||||
if (event.err.name === 'TSError' && event.err.object && event.err.object.diagnosticText) {
|
||||
this.lineWriter.writeLine(colors.errorStack(trimOffNewlines(event.err.object.diagnosticText)));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.err.source) {
|
||||
this.lineWriter.writeLine(colors.errorSource(`${this.relativeFile(event.err.source.file)}:${event.err.source.line}`));
|
||||
const excerpt = codeExcerpt(event.err.source, {maxWidth: this.reportStream.columns - 2});
|
||||
if (excerpt) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(excerpt);
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
|
||||
if (event.err.avaAssertionError) {
|
||||
const result = formatSerializedError(event.err);
|
||||
if (result.printMessage) {
|
||||
this.lineWriter.writeLine(event.err.message);
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
if (result.formatted) {
|
||||
this.lineWriter.writeLine(result.formatted);
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
const message = improperUsageMessages.forError(event.err);
|
||||
if (message) {
|
||||
this.lineWriter.writeLine(message);
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
} else if (event.err.nonErrorObject) {
|
||||
this.lineWriter.writeLine(trimOffNewlines(event.err.formatted));
|
||||
this.lineWriter.writeLine();
|
||||
} else {
|
||||
this.lineWriter.writeLine(event.err.summary);
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
const formatted = this.formatErrorStack(event.err);
|
||||
if (formatted.length > 0) {
|
||||
this.lineWriter.writeLine(formatted.join('\n'));
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
|
||||
formatErrorStack(error) {
|
||||
if (!error.stack) {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (error.shouldBeautifyStack) {
|
||||
return beautifyStack(error.stack).map(line => {
|
||||
if (nodeInternals.some(internal => internal.test(line))) {
|
||||
return colors.errorStackInternal(`${figures.pointerSmall} ${line}`);
|
||||
}
|
||||
|
||||
return colors.errorStack(`${figures.pointerSmall} ${line}`);
|
||||
});
|
||||
}
|
||||
|
||||
return [error.stack];
|
||||
}
|
||||
|
||||
writeLogs(event, surroundLines) {
|
||||
if (event.logs && event.logs.length > 0) {
|
||||
if (surroundLines) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
for (const log of event.logs) {
|
||||
const logLines = indentString(colors.log(log), 4);
|
||||
const logLinesWithLeadingFigure = logLines.replace(/^ {4}/, ` ${colors.information(figures.info)} `);
|
||||
this.lineWriter.writeLine(logLinesWithLeadingFigure);
|
||||
}
|
||||
|
||||
if (surroundLines) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
writeTestSummary(event) {
|
||||
if (event.type === 'hook-failed' || event.type === 'test-failed') {
|
||||
if (this.verbose) {
|
||||
this.write(`${colors.error(figures.cross)} ${this.prefixTitle(event.testFile, event.title)} ${colors.error(event.err.message)}`);
|
||||
} else {
|
||||
this.write(this.prefixTitle(event.testFile, event.title));
|
||||
}
|
||||
} else if (event.knownFailing) {
|
||||
if (this.verbose) {
|
||||
this.write(`${colors.error(figures.tick)} ${colors.error(this.prefixTitle(event.testFile, event.title))}`);
|
||||
} else {
|
||||
this.write(colors.error(this.prefixTitle(event.testFile, event.title)));
|
||||
}
|
||||
} else if (this.verbose) {
|
||||
const duration = event.duration > this.durationThreshold ? colors.duration(' (' + prettyMs(event.duration) + ')') : '';
|
||||
this.write(`${colors.pass(figures.tick)} ${this.prefixTitle(event.testFile, event.title)}${duration}`);
|
||||
} else {
|
||||
this.write(this.prefixTitle(event.testFile, event.title));
|
||||
}
|
||||
|
||||
if (this.verbose) {
|
||||
this.writeLogs(event);
|
||||
}
|
||||
}
|
||||
|
||||
writeFailure(event) {
|
||||
this.lineWriter.writeLine(colors.title(this.prefixTitle(event.testFile, event.title)));
|
||||
if (!this.writeLogs(event, true)) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
this.writeErr(event);
|
||||
}
|
||||
|
||||
endRun() {// eslint-disable-line complexity
|
||||
let firstLinePostfix = this.watching ? ` ${chalk.gray.dim(`[${new Date().toLocaleTimeString('en-US', {hour12: false})}]`)}` : '';
|
||||
let wroteSomething = false;
|
||||
|
||||
if (!this.verbose) {
|
||||
this.spinner.stop();
|
||||
cliCursor.show(this.reportStream);
|
||||
} else if (this.emptyParallelRun) {
|
||||
this.lineWriter.writeLine('No files tested in this parallel run');
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.stats) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldn’t find any files to test` + firstLinePostfix));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.matching && this.stats.selectedTests === 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldn’t find any matching tests` + firstLinePostfix));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.verbose) {
|
||||
this.lineWriter.writeLine(colors.log(figures.line));
|
||||
this.lineWriter.writeLine();
|
||||
} else {
|
||||
if (this.filesWithMissingAvaImports.size > 0) {
|
||||
for (const testFile of this.filesWithMissingAvaImports) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(testFile)}, make sure to import "ava" at the top of your test file`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
wroteSomething = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.filesWithoutDeclaredTests.size > 0) {
|
||||
for (const testFile of this.filesWithoutDeclaredTests) {
|
||||
if (!this.filesWithMissingAvaImports.has(testFile)) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(testFile)}`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
wroteSomething = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.lineNumberErrors.length > 0) {
|
||||
for (const event of this.lineNumberErrors) {
|
||||
this.lineWriter.writeLine(colors.information(`${figures.warning} Could not parse ${this.relativeFile(event.testFile)} for line number selection` + firstLinePostfix));
|
||||
firstLinePostfix = '';
|
||||
wroteSomething = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.filesWithoutMatchedLineNumbers.size > 0) {
|
||||
for (const testFile of this.filesWithoutMatchedLineNumbers) {
|
||||
if (!this.filesWithMissingAvaImports.has(testFile) && !this.filesWithoutDeclaredTests.has(testFile)) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Line numbers for ${this.relativeFile(testFile)} did not match any tests`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
wroteSomething = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (wroteSomething) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(colors.log(figures.line));
|
||||
this.lineWriter.writeLine();
|
||||
wroteSomething = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.failures.length > 0) {
|
||||
const writeTrailingLines = this.internalErrors.length > 0 || this.sharedWorkerErrors.length > 0 || this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
|
||||
|
||||
const lastFailure = this.failures[this.failures.length - 1];
|
||||
for (const event of this.failures) {
|
||||
this.writeFailure(event);
|
||||
if (event !== lastFailure) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
} else if (!this.verbose && writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
wroteSomething = true;
|
||||
}
|
||||
|
||||
if (this.verbose) {
|
||||
this.lineWriter.writeLine(colors.log(figures.line));
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.verbose) {
|
||||
if (this.internalErrors.length > 0) {
|
||||
const writeTrailingLines = this.sharedWorkerErrors.length > 0 || this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
|
||||
|
||||
const last = this.internalErrors[this.internalErrors.length - 1];
|
||||
for (const event of this.internalErrors) {
|
||||
if (event.testFile) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error when running ${this.relativeFile(event.testFile)}`));
|
||||
} else {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error`));
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(colors.stack(event.err.summary));
|
||||
this.lineWriter.writeLine(colors.errorStack(event.err.stack));
|
||||
if (event !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
wroteSomething = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.sharedWorkerErrors.length > 0) {
|
||||
const writeTrailingLines = this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
|
||||
|
||||
const last = this.sharedWorkerErrors[this.sharedWorkerErrors.length - 1];
|
||||
for (const evt of this.sharedWorkerErrors) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Error in shared worker`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt.err);
|
||||
if (evt !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
wroteSomething = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.uncaughtExceptions.length > 0) {
|
||||
const writeTrailingLines = this.unhandledRejections.length > 0;
|
||||
|
||||
const last = this.uncaughtExceptions[this.uncaughtExceptions.length - 1];
|
||||
for (const event of this.uncaughtExceptions) {
|
||||
this.lineWriter.writeLine(colors.title(`Uncaught exception in ${this.relativeFile(event.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(event);
|
||||
if (event !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
wroteSomething = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.unhandledRejections.length > 0) {
|
||||
const last = this.unhandledRejections[this.unhandledRejections.length - 1];
|
||||
for (const event of this.unhandledRejections) {
|
||||
this.lineWriter.writeLine(colors.title(`Unhandled rejection in ${this.relativeFile(event.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(event);
|
||||
if (event !== last) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
wroteSomething = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (wroteSomething) {
|
||||
this.lineWriter.writeLine(colors.log(figures.line));
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
|
||||
if (this.unsavedSnapshots.length > 0) {
|
||||
this.lineWriter.writeLine(colors.title('Could not update snapshots for the following test files:'));
|
||||
this.lineWriter.writeLine();
|
||||
for (const event of this.unsavedSnapshots) {
|
||||
this.lineWriter.writeLine(`${figures.warning} ${this.relativeFile(event.testFile)}`);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
if (this.failFastEnabled && (this.stats.remainingTests > 0 || this.stats.files > this.stats.finishedWorkers)) {
|
||||
let remaining = '';
|
||||
if (this.stats.remainingTests > 0) {
|
||||
remaining += `At least ${this.stats.remainingTests} ${plur('test was', 'tests were', this.stats.remainingTests)} skipped`;
|
||||
if (this.stats.files > this.stats.finishedWorkers) {
|
||||
remaining += ', as well as ';
|
||||
}
|
||||
}
|
||||
|
||||
if (this.stats.files > this.stats.finishedWorkers) {
|
||||
const skippedFileCount = this.stats.files - this.stats.finishedWorkers;
|
||||
remaining += `${skippedFileCount} ${plur('test file', 'test files', skippedFileCount)}`;
|
||||
if (this.stats.remainingTests === 0) {
|
||||
remaining += ` ${plur('was', 'were', skippedFileCount)} skipped`;
|
||||
}
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(colors.information(`\`--fail-fast\` is on. ${remaining}.`));
|
||||
if (this.verbose) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
|
||||
if (this.verbose && this.stats.parallelRuns) {
|
||||
const {
|
||||
currentFileCount,
|
||||
currentIndex,
|
||||
totalRuns
|
||||
} = this.stats.parallelRuns;
|
||||
this.lineWriter.writeLine(colors.information(`Ran ${currentFileCount} test ${plur('file', currentFileCount)} out of ${this.stats.files} for job ${currentIndex + 1} of ${totalRuns}`));
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
if (this.stats.failedHooks > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.failedHooks} ${plur('hook', this.stats.failedHooks)} failed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.failedTests} ${plur('test', this.stats.failedTests)} failed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (
|
||||
this.stats.failedHooks === 0 &&
|
||||
this.stats.failedTests === 0 &&
|
||||
this.stats.passedTests > 0
|
||||
) {
|
||||
this.lineWriter.writeLine(colors.pass(`${this.stats.passedTests} ${plur('test', this.stats.passedTests)} passed`) + firstLinePostfix
|
||||
);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.passedKnownFailingTests} ${plur('known failure', this.stats.passedKnownFailingTests)}`));
|
||||
}
|
||||
|
||||
if (this.stats.skippedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.skip(`${this.stats.skippedTests} ${plur('test', this.stats.skippedTests)} skipped`));
|
||||
}
|
||||
|
||||
if (this.stats.todoTests > 0) {
|
||||
this.lineWriter.writeLine(colors.todo(`${this.stats.todoTests} ${plur('test', this.stats.todoTests)} todo`));
|
||||
}
|
||||
|
||||
if (this.stats.unhandledRejections > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.unhandledRejections} unhandled ${plur('rejection', this.stats.unhandledRejections)}`));
|
||||
}
|
||||
|
||||
if (this.stats.uncaughtExceptions > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.uncaughtExceptions} uncaught ${plur('exception', this.stats.uncaughtExceptions)}`));
|
||||
}
|
||||
|
||||
if (this.previousFailures > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.previousFailures} previous ${plur('failure', this.previousFailures)} in test files that were not rerun`));
|
||||
}
|
||||
|
||||
if (this.watching) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports = Reporter;
|
||||
619
node_modules/ava/lib/reporters/mini.js
generated
vendored
619
node_modules/ava/lib/reporters/mini.js
generated
vendored
|
|
@ -1,619 +0,0 @@
|
|||
'use strict';
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const stream = require('stream');
|
||||
|
||||
const cliCursor = require('cli-cursor');
|
||||
const figures = require('figures');
|
||||
const indentString = require('indent-string');
|
||||
const ora = require('ora');
|
||||
const plur = require('plur');
|
||||
const trimOffNewlines = require('trim-off-newlines');
|
||||
const beautifyStack = require('./beautify-stack');
|
||||
|
||||
const chalk = require('../chalk').get();
|
||||
const codeExcerpt = require('../code-excerpt');
|
||||
const colors = require('./colors');
|
||||
const formatSerializedError = require('./format-serialized-error');
|
||||
const improperUsageMessages = require('./improper-usage-messages');
|
||||
const prefixTitle = require('./prefix-title');
|
||||
const whileCorked = require('./while-corked');
|
||||
|
||||
const nodeInternals = require('stack-utils').nodeInternals();
|
||||
|
||||
class LineWriter extends stream.Writable {
|
||||
constructor(dest, spinner) {
|
||||
super();
|
||||
|
||||
this.dest = dest;
|
||||
this.columns = dest.columns || 80;
|
||||
this.spinner = spinner;
|
||||
this.lastSpinnerText = '';
|
||||
}
|
||||
|
||||
_write(chunk, encoding, callback) {
|
||||
// Discard the current spinner output. Any lines that were meant to be
|
||||
// preserved should be rewritten.
|
||||
this.spinner.clear();
|
||||
|
||||
this._writeWithSpinner(chunk.toString('utf8'));
|
||||
callback();
|
||||
}
|
||||
|
||||
_writev(pieces, callback) {
|
||||
// Discard the current spinner output. Any lines that were meant to be
|
||||
// preserved should be rewritten.
|
||||
this.spinner.clear();
|
||||
|
||||
const last = pieces.pop();
|
||||
for (const piece of pieces) {
|
||||
this.dest.write(piece.chunk);
|
||||
}
|
||||
|
||||
this._writeWithSpinner(last.chunk.toString('utf8'));
|
||||
callback();
|
||||
}
|
||||
|
||||
_writeWithSpinner(string) {
|
||||
if (!this.spinner.id) {
|
||||
this.dest.write(string);
|
||||
return;
|
||||
}
|
||||
|
||||
this.lastSpinnerText = string;
|
||||
// Ignore whitespace at the end of the chunk. We're continiously rewriting
|
||||
// the last line through the spinner. Also be careful to remove the indent
|
||||
// as the spinner adds its own.
|
||||
this.spinner.text = string.trimEnd().slice(2);
|
||||
this.spinner.render();
|
||||
}
|
||||
|
||||
writeLine(string) {
|
||||
if (string) {
|
||||
this.write(indentString(string, 2) + os.EOL);
|
||||
} else {
|
||||
this.write(os.EOL);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class MiniReporter {
|
||||
constructor(options) {
|
||||
this.reportStream = options.reportStream;
|
||||
this.stdStream = options.stdStream;
|
||||
this.watching = options.watching;
|
||||
|
||||
this.spinner = ora({
|
||||
isEnabled: true,
|
||||
color: options.spinner ? options.spinner.color : 'gray',
|
||||
discardStdin: !options.watching,
|
||||
hideCursor: false,
|
||||
spinner: options.spinner || (process.platform === 'win32' ? 'line' : 'dots'),
|
||||
stream: options.reportStream
|
||||
});
|
||||
this.lineWriter = new LineWriter(this.reportStream, this.spinner);
|
||||
|
||||
this.consumeStateChange = whileCorked(this.reportStream, whileCorked(this.lineWriter, this.consumeStateChange));
|
||||
this.endRun = whileCorked(this.reportStream, whileCorked(this.lineWriter, this.endRun));
|
||||
this.relativeFile = file => path.relative(options.projectDir, file);
|
||||
|
||||
this.reset();
|
||||
}
|
||||
|
||||
reset() {
|
||||
if (this.removePreviousListener) {
|
||||
this.removePreviousListener();
|
||||
}
|
||||
|
||||
this.failFastEnabled = false;
|
||||
this.failures = [];
|
||||
this.filesWithMissingAvaImports = new Set();
|
||||
this.filesWithoutDeclaredTests = new Set();
|
||||
this.filesWithoutMatchedLineNumbers = new Set();
|
||||
this.internalErrors = [];
|
||||
this.knownFailures = [];
|
||||
this.lineNumberErrors = [];
|
||||
this.matching = false;
|
||||
this.prefixTitle = (testFile, title) => title;
|
||||
this.previousFailures = 0;
|
||||
this.removePreviousListener = null;
|
||||
this.stats = null;
|
||||
this.uncaughtExceptions = [];
|
||||
this.unhandledRejections = [];
|
||||
}
|
||||
|
||||
startRun(plan) {
|
||||
if (plan.bailWithoutReporting) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.reset();
|
||||
|
||||
this.failFastEnabled = plan.failFastEnabled;
|
||||
this.matching = plan.matching;
|
||||
this.previousFailures = plan.previousFailures;
|
||||
|
||||
if (this.watching || plan.files.length > 1) {
|
||||
this.prefixTitle = (testFile, title) => prefixTitle(plan.filePathPrefix, testFile, title);
|
||||
}
|
||||
|
||||
this.removePreviousListener = plan.status.on('stateChange', evt => this.consumeStateChange(evt));
|
||||
|
||||
if (this.watching && plan.runVector > 1) {
|
||||
this.reportStream.write(chalk.gray.dim('\u2500'.repeat(this.lineWriter.columns)) + os.EOL);
|
||||
}
|
||||
|
||||
cliCursor.hide(this.reportStream);
|
||||
this.lineWriter.writeLine();
|
||||
|
||||
this.spinner.start();
|
||||
}
|
||||
|
||||
consumeStateChange(evt) { // eslint-disable-line complexity
|
||||
const fileStats = this.stats && evt.testFile ? this.stats.byFile.get(evt.testFile) : null;
|
||||
|
||||
switch (evt.type) {
|
||||
case 'declared-test':
|
||||
// Ignore
|
||||
break;
|
||||
case 'hook-failed':
|
||||
this.failures.push(evt);
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'internal-error':
|
||||
this.internalErrors.push(evt);
|
||||
if (evt.testFile) {
|
||||
this.writeWithCounts(colors.error(`${figures.cross} Internal error when running ${this.relativeFile(evt.testFile)}`));
|
||||
} else {
|
||||
this.writeWithCounts(colors.error(`${figures.cross} Internal error`));
|
||||
}
|
||||
|
||||
break;
|
||||
case 'line-number-selection-error':
|
||||
this.lineNumberErrors.push(evt);
|
||||
this.writeWithCounts(colors.information(`${figures.warning} Could not parse ${this.relativeFile(evt.testFile)} for line number selection`));
|
||||
break;
|
||||
case 'missing-ava-import':
|
||||
this.filesWithMissingAvaImports.add(evt.testFile);
|
||||
this.writeWithCounts(colors.error(`${figures.cross} No tests found in ${this.relativeFile(evt.testFile)}, make sure to import "ava" at the top of your test file`));
|
||||
break;
|
||||
case 'selected-test':
|
||||
// Ignore
|
||||
break;
|
||||
case 'stats':
|
||||
this.stats = evt.stats;
|
||||
break;
|
||||
case 'test-failed':
|
||||
this.failures.push(evt);
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'test-passed':
|
||||
if (evt.knownFailing) {
|
||||
this.knownFailures.push(evt);
|
||||
}
|
||||
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'timeout':
|
||||
this.lineWriter.writeLine(colors.error(`\n${figures.cross} Timed out while running tests`));
|
||||
this.lineWriter.writeLine('');
|
||||
this.writePendingTests(evt);
|
||||
break;
|
||||
case 'interrupt':
|
||||
this.lineWriter.writeLine(colors.error(`\n${figures.cross} Exiting due to SIGINT`));
|
||||
this.lineWriter.writeLine('');
|
||||
this.writePendingTests(evt);
|
||||
break;
|
||||
case 'uncaught-exception':
|
||||
this.uncaughtExceptions.push(evt);
|
||||
break;
|
||||
case 'unhandled-rejection':
|
||||
this.unhandledRejections.push(evt);
|
||||
break;
|
||||
case 'worker-failed':
|
||||
if (fileStats.declaredTests === 0) {
|
||||
this.filesWithoutDeclaredTests.add(evt.testFile);
|
||||
}
|
||||
|
||||
break;
|
||||
case 'worker-finished':
|
||||
if (fileStats.declaredTests === 0) {
|
||||
this.filesWithoutDeclaredTests.add(evt.testFile);
|
||||
this.writeWithCounts(colors.error(`${figures.cross} No tests found in ${this.relativeFile(evt.testFile)}`));
|
||||
} else if (fileStats.selectingLines && fileStats.selectedTests === 0) {
|
||||
this.filesWithoutMatchedLineNumbers.add(evt.testFile);
|
||||
this.writeWithCounts(colors.error(`${figures.cross} Line numbers for ${this.relativeFile(evt.testFile)} did not match any tests`));
|
||||
}
|
||||
|
||||
break;
|
||||
case 'worker-stderr':
|
||||
case 'worker-stdout':
|
||||
// Forcibly clear the spinner, writing the chunk corrupts the TTY.
|
||||
this.spinner.clear();
|
||||
|
||||
this.stdStream.write(evt.chunk);
|
||||
// If the chunk does not end with a linebreak, *forcibly* write one to
|
||||
// ensure it remains visible in the TTY.
|
||||
// Tests cannot assume their standard output is not interrupted. Indeed
|
||||
// we multiplex stdout and stderr into a single stream. However as
|
||||
// long as stdStream is different from reportStream users can read
|
||||
// their original output by redirecting the streams.
|
||||
if (evt.chunk[evt.chunk.length - 1] !== 0x0A) {
|
||||
// Use write() rather than writeLine() so the (presumably corked)
|
||||
// line writer will actually write the empty line before re-rendering
|
||||
// the last spinner text below.
|
||||
this.lineWriter.write(os.EOL);
|
||||
}
|
||||
|
||||
this.lineWriter.write(this.lineWriter.lastSpinnerText);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
writeWithCounts(string) {
|
||||
if (!this.stats) {
|
||||
return this.lineWriter.writeLine(string);
|
||||
}
|
||||
|
||||
string = string || '';
|
||||
if (string !== '') {
|
||||
string += os.EOL;
|
||||
}
|
||||
|
||||
let firstLinePostfix = this.watching ?
|
||||
' ' + chalk.gray.dim('[' + new Date().toLocaleTimeString('en-US', {hour12: false}) + ']') :
|
||||
'';
|
||||
|
||||
if (this.stats.passedTests > 0) {
|
||||
string += os.EOL + colors.pass(`${this.stats.passedTests} passed`) + firstLinePostfix;
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
string += os.EOL + colors.error(`${this.stats.passedKnownFailingTests} ${plur('known failure', this.stats.passedKnownFailingTests)}`);
|
||||
}
|
||||
|
||||
if (this.stats.failedHooks > 0) {
|
||||
string += os.EOL + colors.error(`${this.stats.failedHooks} ${plur('hook', this.stats.failedHooks)} failed`) + firstLinePostfix;
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedTests > 0) {
|
||||
string += os.EOL + colors.error(`${this.stats.failedTests} ${plur('test', this.stats.failedTests)} failed`) + firstLinePostfix;
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.skippedTests > 0) {
|
||||
string += os.EOL + colors.skip(`${this.stats.skippedTests} skipped`);
|
||||
}
|
||||
|
||||
if (this.stats.todoTests > 0) {
|
||||
string += os.EOL + colors.todo(`${this.stats.todoTests} todo`);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(string);
|
||||
}
|
||||
|
||||
writeErr(evt) {
|
||||
if (evt.err.name === 'TSError' && evt.err.object && evt.err.object.diagnosticText) {
|
||||
this.lineWriter.writeLine(colors.errorStack(trimOffNewlines(evt.err.object.diagnosticText)));
|
||||
return;
|
||||
}
|
||||
|
||||
if (evt.err.source) {
|
||||
this.lineWriter.writeLine(colors.errorSource(`${this.relativeFile(evt.err.source.file)}:${evt.err.source.line}`));
|
||||
const excerpt = codeExcerpt(evt.err.source, {maxWidth: this.lineWriter.columns - 2});
|
||||
if (excerpt) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(excerpt);
|
||||
}
|
||||
}
|
||||
|
||||
if (evt.err.avaAssertionError) {
|
||||
const result = formatSerializedError(evt.err);
|
||||
if (result.printMessage) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(evt.err.message);
|
||||
}
|
||||
|
||||
if (result.formatted) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(result.formatted);
|
||||
}
|
||||
|
||||
const message = improperUsageMessages.forError(evt.err);
|
||||
if (message) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(message);
|
||||
}
|
||||
} else if (evt.err.nonErrorObject) {
|
||||
this.lineWriter.writeLine(trimOffNewlines(evt.err.formatted));
|
||||
} else {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(evt.err.summary);
|
||||
}
|
||||
|
||||
const formatted = this.formatErrorStack(evt.err);
|
||||
if (formatted.length > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(formatted.join('\n'));
|
||||
}
|
||||
}
|
||||
|
||||
formatErrorStack(error) {
|
||||
if (!error.stack) {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (error.shouldBeautifyStack) {
|
||||
return beautifyStack(error.stack).map(line => {
|
||||
if (nodeInternals.some(internal => internal.test(line))) {
|
||||
return colors.errorStackInternal(`${figures.pointerSmall} ${line}`);
|
||||
}
|
||||
|
||||
return colors.errorStack(`${figures.pointerSmall} ${line}`);
|
||||
});
|
||||
}
|
||||
|
||||
return [error.stack];
|
||||
}
|
||||
|
||||
writeLogs(evt) {
|
||||
if (evt.logs) {
|
||||
for (const log of evt.logs) {
|
||||
const logLines = indentString(colors.log(log), 4);
|
||||
const logLinesWithLeadingFigure = logLines.replace(
|
||||
/^ {4}/,
|
||||
` ${colors.information(figures.info)} `
|
||||
);
|
||||
this.lineWriter.writeLine(logLinesWithLeadingFigure);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writeTestSummary(evt) {
|
||||
if (evt.type === 'hook-failed' || evt.type === 'test-failed') {
|
||||
this.writeWithCounts(`${this.prefixTitle(evt.testFile, evt.title)}`);
|
||||
} else if (evt.knownFailing) {
|
||||
this.writeWithCounts(`${colors.error(this.prefixTitle(evt.testFile, evt.title))}`);
|
||||
} else {
|
||||
this.writeWithCounts(`${this.prefixTitle(evt.testFile, evt.title)}`);
|
||||
}
|
||||
}
|
||||
|
||||
writeFailure(evt) {
|
||||
this.lineWriter.writeLine(`${colors.title(this.prefixTitle(evt.testFile, evt.title))}`);
|
||||
this.writeLogs(evt);
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
}
|
||||
|
||||
writePendingTests(evt) {
|
||||
for (const [file, testsInFile] of evt.pendingTests) {
|
||||
if (testsInFile.size === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(`${testsInFile.size} tests were pending in ${this.relativeFile(file)}\n`);
|
||||
for (const title of testsInFile) {
|
||||
this.lineWriter.writeLine(`${figures.circleDotted} ${this.prefixTitle(file, title)}`);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine('');
|
||||
}
|
||||
}
|
||||
|
||||
endRun() { // eslint-disable-line complexity
|
||||
this.spinner.stop();
|
||||
cliCursor.show(this.reportStream);
|
||||
|
||||
if (!this.stats) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldn’t find any files to test`));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.matching && this.stats.selectedTests === 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldn’t find any matching tests`));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine();
|
||||
|
||||
let firstLinePostfix = this.watching ?
|
||||
' ' + chalk.gray.dim('[' + new Date().toLocaleTimeString('en-US', {hour12: false}) + ']') :
|
||||
'';
|
||||
|
||||
if (this.filesWithMissingAvaImports.size > 0) {
|
||||
for (const testFile of this.filesWithMissingAvaImports) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(testFile)}, make sure to import "ava" at the top of your test file`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
}
|
||||
|
||||
if (this.filesWithoutDeclaredTests.size > 0) {
|
||||
for (const testFile of this.filesWithoutDeclaredTests) {
|
||||
if (!this.filesWithMissingAvaImports.has(testFile)) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(testFile)}`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.lineNumberErrors.length > 0) {
|
||||
for (const evt of this.lineNumberErrors) {
|
||||
this.lineWriter.writeLine(colors.information(`${figures.warning} Could not parse ${this.relativeFile(evt.testFile)} for line number selection`));
|
||||
}
|
||||
}
|
||||
|
||||
if (this.filesWithoutMatchedLineNumbers.size > 0) {
|
||||
for (const testFile of this.filesWithoutMatchedLineNumbers) {
|
||||
if (!this.filesWithMissingAvaImports.has(testFile) && !this.filesWithoutDeclaredTests.has(testFile)) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Line numbers for ${this.relativeFile(testFile)} did not match any tests`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.filesWithMissingAvaImports.size > 0 || this.filesWithoutDeclaredTests.size > 0 || this.filesWithoutMatchedLineNumbers.size > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
if (this.stats.failedHooks > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.failedHooks} ${plur('hook', this.stats.failedHooks)} failed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.failedTests} ${plur('test', this.stats.failedTests)} failed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedHooks === 0 && this.stats.failedTests === 0 && this.stats.passedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.pass(`${this.stats.passedTests} ${plur('test', this.stats.passedTests)} passed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.passedKnownFailingTests} ${plur('known failure', this.stats.passedKnownFailingTests)}`));
|
||||
}
|
||||
|
||||
if (this.stats.skippedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.skip(`${this.stats.skippedTests} ${plur('test', this.stats.skippedTests)} skipped`));
|
||||
}
|
||||
|
||||
if (this.stats.todoTests > 0) {
|
||||
this.lineWriter.writeLine(colors.todo(`${this.stats.todoTests} ${plur('test', this.stats.todoTests)} todo`));
|
||||
}
|
||||
|
||||
if (this.stats.unhandledRejections > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.unhandledRejections} unhandled ${plur('rejection', this.stats.unhandledRejections)}`));
|
||||
}
|
||||
|
||||
if (this.stats.uncaughtExceptions > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.uncaughtExceptions} uncaught ${plur('exception', this.stats.uncaughtExceptions)}`));
|
||||
}
|
||||
|
||||
if (this.previousFailures > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.previousFailures} previous ${plur('failure', this.previousFailures)} in test files that were not rerun`));
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
for (const evt of this.knownFailures) {
|
||||
this.lineWriter.writeLine(colors.error(this.prefixTitle(evt.testFile, evt.title)));
|
||||
}
|
||||
}
|
||||
|
||||
const shouldWriteFailFastDisclaimer = this.failFastEnabled && (this.stats.remainingTests > 0 || this.stats.files > this.stats.finishedWorkers);
|
||||
|
||||
if (this.failures.length > 0) {
|
||||
const writeTrailingLines = shouldWriteFailFastDisclaimer || this.internalErrors.length > 0 || this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
|
||||
this.lineWriter.writeLine();
|
||||
|
||||
const last = this.failures[this.failures.length - 1];
|
||||
for (const evt of this.failures) {
|
||||
this.writeFailure(evt);
|
||||
if (evt !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.internalErrors.length > 0) {
|
||||
const writeLeadingLine = this.failures.length === 0;
|
||||
const writeTrailingLines = shouldWriteFailFastDisclaimer || this.uncaughtExceptions.length > 0 || this.unhandledRejections.length > 0;
|
||||
|
||||
if (writeLeadingLine) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
const last = this.internalErrors[this.internalErrors.length - 1];
|
||||
for (const evt of this.internalErrors) {
|
||||
if (evt.testFile) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error when running ${this.relativeFile(evt.testFile)}`));
|
||||
} else {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error`));
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(colors.stack(evt.err.summary));
|
||||
this.lineWriter.writeLine(colors.errorStack(evt.err.stack));
|
||||
if (evt !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.uncaughtExceptions.length > 0) {
|
||||
const writeLeadingLine = this.failures.length === 0 && this.internalErrors.length === 0;
|
||||
const writeTrailingLines = shouldWriteFailFastDisclaimer || this.unhandledRejections.length > 0;
|
||||
|
||||
if (writeLeadingLine) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
const last = this.uncaughtExceptions[this.uncaughtExceptions.length - 1];
|
||||
for (const evt of this.uncaughtExceptions) {
|
||||
this.lineWriter.writeLine(colors.title(`Uncaught exception in ${this.relativeFile(evt.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
if (evt !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.unhandledRejections.length > 0) {
|
||||
const writeLeadingLine = this.failures.length === 0 && this.internalErrors.length === 0 && this.uncaughtExceptions.length === 0;
|
||||
const writeTrailingLines = shouldWriteFailFastDisclaimer;
|
||||
|
||||
if (writeLeadingLine) {
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
const last = this.unhandledRejections[this.unhandledRejections.length - 1];
|
||||
for (const evt of this.unhandledRejections) {
|
||||
this.lineWriter.writeLine(colors.title(`Unhandled rejection in ${this.relativeFile(evt.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
if (evt !== last || writeTrailingLines) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldWriteFailFastDisclaimer) {
|
||||
let remaining = '';
|
||||
if (this.stats.remainingTests > 0) {
|
||||
remaining += `At least ${this.stats.remainingTests} ${plur('test was', 'tests were', this.stats.remainingTests)} skipped`;
|
||||
if (this.stats.files > this.stats.finishedWorkers) {
|
||||
remaining += ', as well as ';
|
||||
}
|
||||
}
|
||||
|
||||
if (this.stats.files > this.stats.finishedWorkers) {
|
||||
const skippedFileCount = this.stats.files - this.stats.finishedWorkers;
|
||||
remaining += `${skippedFileCount} ${plur('test file', 'test files', skippedFileCount)}`;
|
||||
if (this.stats.remainingTests === 0) {
|
||||
remaining += ` ${plur('was', 'were', skippedFileCount)} skipped`;
|
||||
}
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(colors.information(`\`--fail-fast\` is on. ${remaining}.`));
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
module.exports = MiniReporter;
|
||||
19
node_modules/ava/lib/reporters/tap.js
generated
vendored
19
node_modules/ava/lib/reporters/tap.js
generated
vendored
|
|
@ -30,7 +30,7 @@ function dumpError(error) {
|
|||
}
|
||||
|
||||
if (error.values.length > 0) {
|
||||
object.values = error.values.reduce((acc, value) => {
|
||||
object.values = error.values.reduce((acc, value) => { // eslint-disable-line unicorn/no-reduce
|
||||
acc[value.label] = stripAnsi(value.formatted);
|
||||
return acc;
|
||||
}, {});
|
||||
|
|
@ -125,12 +125,22 @@ class TapReporter {
|
|||
this.reportStream.write(`# ${stripAnsi(title)}${os.EOL}`);
|
||||
if (evt.logs) {
|
||||
for (const log of evt.logs) {
|
||||
const logLines = indentString(log, 4).replace(/^ {4}/, ' # ');
|
||||
const logLines = indentString(log, 4).replace(/^ {4}/gm, '# ');
|
||||
this.reportStream.write(`${logLines}${os.EOL}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writeTimeout(evt) {
|
||||
const err = new Error(`Exited because no new tests completed within the last ${evt.period}ms of inactivity`);
|
||||
|
||||
for (const [testFile, tests] of evt.pendingTests) {
|
||||
for (const title of tests) {
|
||||
this.writeTest({testFile, title, err}, {passed: false, todo: false, skip: false});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
consumeStateChange(evt) { // eslint-disable-line complexity
|
||||
const fileStats = this.stats && evt.testFile ? this.stats.byFile.get(evt.testFile) : null;
|
||||
|
||||
|
|
@ -158,6 +168,9 @@ class TapReporter {
|
|||
this.writeTest(evt, {passed: false, todo: true, skip: false});
|
||||
}
|
||||
|
||||
break;
|
||||
case 'snapshot-error':
|
||||
this.writeComment(evt, {title: 'Could not update snapshots'});
|
||||
break;
|
||||
case 'stats':
|
||||
this.stats = evt.stats;
|
||||
|
|
@ -169,7 +182,7 @@ class TapReporter {
|
|||
this.writeTest(evt, {passed: true, todo: false, skip: false});
|
||||
break;
|
||||
case 'timeout':
|
||||
this.writeCrash(evt, `Exited because no new tests completed within the last ${evt.period}ms of inactivity`);
|
||||
this.writeTimeout(evt);
|
||||
break;
|
||||
case 'uncaught-exception':
|
||||
this.writeCrash(evt);
|
||||
|
|
|
|||
463
node_modules/ava/lib/reporters/verbose.js
generated
vendored
463
node_modules/ava/lib/reporters/verbose.js
generated
vendored
|
|
@ -1,463 +0,0 @@
|
|||
'use strict';
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const stream = require('stream');
|
||||
|
||||
const figures = require('figures');
|
||||
const indentString = require('indent-string');
|
||||
const plur = require('plur');
|
||||
const prettyMs = require('pretty-ms');
|
||||
const trimOffNewlines = require('trim-off-newlines');
|
||||
const beautifyStack = require('./beautify-stack');
|
||||
|
||||
const chalk = require('../chalk').get();
|
||||
const codeExcerpt = require('../code-excerpt');
|
||||
const colors = require('./colors');
|
||||
const formatSerializedError = require('./format-serialized-error');
|
||||
const improperUsageMessages = require('./improper-usage-messages');
|
||||
const prefixTitle = require('./prefix-title');
|
||||
const whileCorked = require('./while-corked');
|
||||
|
||||
const nodeInternals = require('stack-utils').nodeInternals();
|
||||
|
||||
class LineWriter extends stream.Writable {
|
||||
constructor(dest) {
|
||||
super();
|
||||
|
||||
this.dest = dest;
|
||||
this.columns = dest.columns || 80;
|
||||
this.lastLineIsEmpty = false;
|
||||
}
|
||||
|
||||
_write(chunk, encoding, callback) {
|
||||
this.dest.write(chunk);
|
||||
callback();
|
||||
}
|
||||
|
||||
writeLine(string) {
|
||||
if (string) {
|
||||
this.write(indentString(string, 2) + os.EOL);
|
||||
this.lastLineIsEmpty = false;
|
||||
} else {
|
||||
this.write(os.EOL);
|
||||
this.lastLineIsEmpty = true;
|
||||
}
|
||||
}
|
||||
|
||||
ensureEmptyLine() {
|
||||
if (!this.lastLineIsEmpty) {
|
||||
this.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class VerboseReporter {
|
||||
constructor(options) {
|
||||
this.durationThreshold = options.durationThreshold || 100;
|
||||
this.reportStream = options.reportStream;
|
||||
this.stdStream = options.stdStream;
|
||||
this.watching = options.watching;
|
||||
|
||||
this.lineWriter = new LineWriter(this.reportStream);
|
||||
this.consumeStateChange = whileCorked(this.reportStream, this.consumeStateChange);
|
||||
this.endRun = whileCorked(this.reportStream, this.endRun);
|
||||
this.relativeFile = file => path.relative(options.projectDir, file);
|
||||
|
||||
this.reset();
|
||||
}
|
||||
|
||||
reset() {
|
||||
if (this.removePreviousListener) {
|
||||
this.removePreviousListener();
|
||||
}
|
||||
|
||||
this.failFastEnabled = false;
|
||||
this.failures = [];
|
||||
this.filesWithMissingAvaImports = new Set();
|
||||
this.knownFailures = [];
|
||||
this.runningTestFiles = new Map();
|
||||
this.lastLineIsEmpty = false;
|
||||
this.matching = false;
|
||||
this.prefixTitle = (testFile, title) => title;
|
||||
this.previousFailures = 0;
|
||||
this.removePreviousListener = null;
|
||||
this.stats = null;
|
||||
}
|
||||
|
||||
startRun(plan) {
|
||||
if (plan.bailWithoutReporting) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.reset();
|
||||
|
||||
this.failFastEnabled = plan.failFastEnabled;
|
||||
this.matching = plan.matching;
|
||||
this.previousFailures = plan.previousFailures;
|
||||
this.emptyParallelRun = plan.status.emptyParallelRun;
|
||||
|
||||
if (this.watching || plan.files.length > 1) {
|
||||
this.prefixTitle = (testFile, title) => prefixTitle(plan.filePathPrefix, testFile, title);
|
||||
}
|
||||
|
||||
this.removePreviousListener = plan.status.on('stateChange', evt => this.consumeStateChange(evt));
|
||||
|
||||
if (this.watching && plan.runVector > 1) {
|
||||
this.lineWriter.write(chalk.gray.dim('\u2500'.repeat(this.reportStream.columns || 80)) + os.EOL);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
consumeStateChange(evt) { // eslint-disable-line complexity
|
||||
const fileStats = this.stats && evt.testFile ? this.stats.byFile.get(evt.testFile) : null;
|
||||
|
||||
switch (evt.type) {
|
||||
case 'hook-failed':
|
||||
this.failures.push(evt);
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'internal-error':
|
||||
if (evt.testFile) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error when running ${this.relativeFile(evt.testFile)}`));
|
||||
} else {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Internal error`));
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(colors.stack(evt.err.summary));
|
||||
this.lineWriter.writeLine(colors.errorStack(evt.err.stack));
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
break;
|
||||
case 'line-number-selection-error':
|
||||
this.lineWriter.writeLine(colors.information(`${figures.warning} Could not parse ${this.relativeFile(evt.testFile)} for line number selection`));
|
||||
break;
|
||||
case 'missing-ava-import':
|
||||
this.filesWithMissingAvaImports.add(evt.testFile);
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(evt.testFile)}, make sure to import "ava" at the top of your test file`));
|
||||
break;
|
||||
case 'hook-finished':
|
||||
if (evt.logs.length > 0) {
|
||||
this.lineWriter.writeLine(` ${this.prefixTitle(evt.testFile, evt.title)}`);
|
||||
this.writeLogs(evt);
|
||||
}
|
||||
|
||||
break;
|
||||
case 'selected-test':
|
||||
if (evt.skip) {
|
||||
this.lineWriter.writeLine(colors.skip(`- ${this.prefixTitle(evt.testFile, evt.title)}`));
|
||||
} else if (evt.todo) {
|
||||
this.lineWriter.writeLine(colors.todo(`- ${this.prefixTitle(evt.testFile, evt.title)}`));
|
||||
}
|
||||
|
||||
break;
|
||||
case 'stats':
|
||||
this.stats = evt.stats;
|
||||
break;
|
||||
case 'test-failed':
|
||||
this.failures.push(evt);
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'test-passed':
|
||||
if (evt.knownFailing) {
|
||||
this.knownFailures.push(evt);
|
||||
}
|
||||
|
||||
this.writeTestSummary(evt);
|
||||
break;
|
||||
case 'timeout':
|
||||
this.lineWriter.writeLine(colors.error(`\n${figures.cross} Timed out while running tests`));
|
||||
this.lineWriter.writeLine('');
|
||||
this.writePendingTests(evt);
|
||||
break;
|
||||
case 'interrupt':
|
||||
this.lineWriter.writeLine(colors.error(`\n${figures.cross} Exiting due to SIGINT`));
|
||||
this.lineWriter.writeLine('');
|
||||
this.writePendingTests(evt);
|
||||
break;
|
||||
case 'uncaught-exception':
|
||||
this.lineWriter.ensureEmptyLine();
|
||||
this.lineWriter.writeLine(colors.title(`Uncaught exception in ${this.relativeFile(evt.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
this.lineWriter.writeLine();
|
||||
break;
|
||||
case 'unhandled-rejection':
|
||||
this.lineWriter.ensureEmptyLine();
|
||||
this.lineWriter.writeLine(colors.title(`Unhandled rejection in ${this.relativeFile(evt.testFile)}`));
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
this.lineWriter.writeLine();
|
||||
break;
|
||||
case 'worker-failed':
|
||||
if (!this.filesWithMissingAvaImports.has(evt.testFile)) {
|
||||
if (evt.nonZeroExitCode) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} ${this.relativeFile(evt.testFile)} exited with a non-zero exit code: ${evt.nonZeroExitCode}`));
|
||||
} else {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} ${this.relativeFile(evt.testFile)} exited due to ${evt.signal}`));
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
case 'worker-finished':
|
||||
if (!evt.forcedExit && !this.filesWithMissingAvaImports.has(evt.testFile)) {
|
||||
if (fileStats.declaredTests === 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} No tests found in ${this.relativeFile(evt.testFile)}`));
|
||||
} else if (fileStats.selectingLines && fileStats.selectedTests === 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Line numbers for ${this.relativeFile(evt.testFile)} did not match any tests`));
|
||||
} else if (!this.failFastEnabled && fileStats.remainingTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} ${fileStats.remainingTests} ${plur('test', fileStats.remainingTests)} remaining in ${this.relativeFile(evt.testFile)}`));
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
case 'worker-stderr':
|
||||
case 'worker-stdout':
|
||||
this.stdStream.write(evt.chunk);
|
||||
// If the chunk does not end with a linebreak, *forcibly* write one to
|
||||
// ensure it remains visible in the TTY.
|
||||
// Tests cannot assume their standard output is not interrupted. Indeed
|
||||
// we multiplex stdout and stderr into a single stream. However as
|
||||
// long as stdStream is different from reportStream users can read
|
||||
// their original output by redirecting the streams.
|
||||
if (evt.chunk[evt.chunk.length - 1] !== 0x0A) {
|
||||
this.reportStream.write(os.EOL);
|
||||
}
|
||||
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
writeErr(evt) {
|
||||
if (evt.err.name === 'TSError' && evt.err.object && evt.err.object.diagnosticText) {
|
||||
this.lineWriter.writeLine(colors.errorStack(trimOffNewlines(evt.err.object.diagnosticText)));
|
||||
return;
|
||||
}
|
||||
|
||||
if (evt.err.source) {
|
||||
this.lineWriter.writeLine(colors.errorSource(`${this.relativeFile(evt.err.source.file)}:${evt.err.source.line}`));
|
||||
const excerpt = codeExcerpt(evt.err.source, {maxWidth: this.reportStream.columns - 2});
|
||||
if (excerpt) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(excerpt);
|
||||
}
|
||||
}
|
||||
|
||||
if (evt.err.avaAssertionError) {
|
||||
const result = formatSerializedError(evt.err);
|
||||
if (result.printMessage) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(evt.err.message);
|
||||
}
|
||||
|
||||
if (result.formatted) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(result.formatted);
|
||||
}
|
||||
|
||||
const message = improperUsageMessages.forError(evt.err);
|
||||
if (message) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(message);
|
||||
}
|
||||
} else if (evt.err.nonErrorObject) {
|
||||
this.lineWriter.writeLine(trimOffNewlines(evt.err.formatted));
|
||||
} else {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(evt.err.summary);
|
||||
}
|
||||
|
||||
const formatted = this.formatErrorStack(evt.err);
|
||||
if (formatted.length > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine(formatted.join('\n'));
|
||||
}
|
||||
}
|
||||
|
||||
formatErrorStack(error) {
|
||||
if (!error.stack) {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (error.shouldBeautifyStack) {
|
||||
return beautifyStack(error.stack).map(line => {
|
||||
if (nodeInternals.some(internal => internal.test(line))) {
|
||||
return colors.errorStackInternal(`${figures.pointerSmall} ${line}`);
|
||||
}
|
||||
|
||||
return colors.errorStack(`${figures.pointerSmall} ${line}`);
|
||||
});
|
||||
}
|
||||
|
||||
return [error.stack];
|
||||
}
|
||||
|
||||
writePendingTests(evt) {
|
||||
for (const [file, testsInFile] of evt.pendingTests) {
|
||||
if (testsInFile.size === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(`${testsInFile.size} tests were pending in ${this.relativeFile(file)}\n`);
|
||||
for (const title of testsInFile) {
|
||||
this.lineWriter.writeLine(`${figures.circleDotted} ${this.prefixTitle(file, title)}`);
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine('');
|
||||
}
|
||||
}
|
||||
|
||||
writeLogs(evt) {
|
||||
if (evt.logs) {
|
||||
for (const log of evt.logs) {
|
||||
const logLines = indentString(colors.log(log), 4);
|
||||
const logLinesWithLeadingFigure = logLines.replace(
|
||||
/^ {4}/,
|
||||
` ${colors.information(figures.info)} `
|
||||
);
|
||||
this.lineWriter.writeLine(logLinesWithLeadingFigure);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writeTestSummary(evt) {
|
||||
if (evt.type === 'hook-failed' || evt.type === 'test-failed') {
|
||||
this.lineWriter.writeLine(`${colors.error(figures.cross)} ${this.prefixTitle(evt.testFile, evt.title)} ${colors.error(evt.err.message)}`);
|
||||
} else if (evt.knownFailing) {
|
||||
this.lineWriter.writeLine(`${colors.error(figures.tick)} ${colors.error(this.prefixTitle(evt.testFile, evt.title))}`);
|
||||
} else {
|
||||
const duration = evt.duration > this.durationThreshold ? colors.duration(' (' + prettyMs(evt.duration) + ')') : '';
|
||||
|
||||
this.lineWriter.writeLine(`${colors.pass(figures.tick)} ${this.prefixTitle(evt.testFile, evt.title)}${duration}`);
|
||||
}
|
||||
|
||||
this.writeLogs(evt);
|
||||
}
|
||||
|
||||
writeFailure(evt) {
|
||||
this.lineWriter.writeLine(`${colors.title(this.prefixTitle(evt.testFile, evt.title))}`);
|
||||
this.writeLogs(evt);
|
||||
this.lineWriter.writeLine();
|
||||
this.writeErr(evt);
|
||||
}
|
||||
|
||||
endRun() { // eslint-disable-line complexity
|
||||
if (this.emptyParallelRun) {
|
||||
this.lineWriter.writeLine('No files tested in this parallel run');
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.stats) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldn’t find any files to test`));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.matching && this.stats.selectedTests === 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${figures.cross} Couldn’t find any matching tests`));
|
||||
this.lineWriter.writeLine();
|
||||
return;
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine();
|
||||
|
||||
if (this.stats.parallelRuns) {
|
||||
const {currentFileCount, currentIndex, totalRuns} = this.stats.parallelRuns;
|
||||
this.lineWriter.writeLine(colors.information(`Ran ${currentFileCount} test ${plur('file', currentFileCount)} out of ${this.stats.files} for job ${currentIndex + 1} of ${totalRuns}`));
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
|
||||
let firstLinePostfix = this.watching ?
|
||||
' ' + chalk.gray.dim('[' + new Date().toLocaleTimeString('en-US', {hour12: false}) + ']') :
|
||||
'';
|
||||
|
||||
if (this.stats.failedHooks > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.failedHooks} ${plur('hook', this.stats.failedHooks)} failed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.failedTests} ${plur('test', this.stats.failedTests)} failed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.failedHooks === 0 && this.stats.failedTests === 0 && this.stats.passedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.pass(`${this.stats.passedTests} ${plur('test', this.stats.passedTests)} passed`) + firstLinePostfix);
|
||||
firstLinePostfix = '';
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.passedKnownFailingTests} ${plur('known failure', this.stats.passedKnownFailingTests)}`));
|
||||
}
|
||||
|
||||
if (this.stats.skippedTests > 0) {
|
||||
this.lineWriter.writeLine(colors.skip(`${this.stats.skippedTests} ${plur('test', this.stats.skippedTests)} skipped`));
|
||||
}
|
||||
|
||||
if (this.stats.todoTests > 0) {
|
||||
this.lineWriter.writeLine(colors.todo(`${this.stats.todoTests} ${plur('test', this.stats.todoTests)} todo`));
|
||||
}
|
||||
|
||||
if (this.stats.unhandledRejections > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.unhandledRejections} unhandled ${plur('rejection', this.stats.unhandledRejections)}`));
|
||||
}
|
||||
|
||||
if (this.stats.uncaughtExceptions > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.stats.uncaughtExceptions} uncaught ${plur('exception', this.stats.uncaughtExceptions)}`));
|
||||
}
|
||||
|
||||
if (this.previousFailures > 0) {
|
||||
this.lineWriter.writeLine(colors.error(`${this.previousFailures} previous ${plur('failure', this.previousFailures)} in test files that were not rerun`));
|
||||
}
|
||||
|
||||
if (this.stats.passedKnownFailingTests > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
for (const evt of this.knownFailures) {
|
||||
this.lineWriter.writeLine(colors.error(this.prefixTitle(evt.testFile, evt.title)));
|
||||
}
|
||||
}
|
||||
|
||||
const shouldWriteFailFastDisclaimer = this.failFastEnabled && (this.stats.remainingTests > 0 || this.stats.files > this.stats.finishedWorkers);
|
||||
|
||||
if (this.failures.length > 0) {
|
||||
this.lineWriter.writeLine();
|
||||
|
||||
const lastFailure = this.failures[this.failures.length - 1];
|
||||
for (const evt of this.failures) {
|
||||
this.writeFailure(evt);
|
||||
if (evt !== lastFailure || shouldWriteFailFastDisclaimer) {
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldWriteFailFastDisclaimer) {
|
||||
let remaining = '';
|
||||
if (this.stats.remainingTests > 0) {
|
||||
remaining += `At least ${this.stats.remainingTests} ${plur('test was', 'tests were', this.stats.remainingTests)} skipped`;
|
||||
if (this.stats.files > this.stats.finishedWorkers) {
|
||||
remaining += ', as well as ';
|
||||
}
|
||||
}
|
||||
|
||||
if (this.stats.files > this.stats.finishedWorkers) {
|
||||
const skippedFileCount = this.stats.files - this.stats.finishedWorkers;
|
||||
remaining += `${skippedFileCount} ${plur('test file', 'test files', skippedFileCount)}`;
|
||||
if (this.stats.remainingTests === 0) {
|
||||
remaining += ` ${plur('was', 'were', skippedFileCount)} skipped`;
|
||||
}
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine(colors.information(`\`--fail-fast\` is on. ${remaining}.`));
|
||||
}
|
||||
|
||||
this.lineWriter.writeLine();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = VerboseReporter;
|
||||
13
node_modules/ava/lib/reporters/while-corked.js
generated
vendored
13
node_modules/ava/lib/reporters/while-corked.js
generated
vendored
|
|
@ -1,13 +0,0 @@
|
|||
'use strict';
|
||||
function whileCorked(stream, fn) {
|
||||
return function (...args) {
|
||||
stream.cork();
|
||||
try {
|
||||
fn.apply(this, args);
|
||||
} finally {
|
||||
stream.uncork();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = whileCorked;
|
||||
5
node_modules/ava/lib/run-status.js
generated
vendored
5
node_modules/ava/lib/run-status.js
generated
vendored
|
|
@ -27,6 +27,7 @@ class RunStatus extends Emittery {
|
|||
passedKnownFailingTests: 0,
|
||||
passedTests: 0,
|
||||
selectedTests: 0,
|
||||
sharedWorkerErrors: 0,
|
||||
skippedTests: 0,
|
||||
timeouts: 0,
|
||||
todoTests: 0,
|
||||
|
|
@ -93,6 +94,9 @@ class RunStatus extends Emittery {
|
|||
this.addPendingTest(event);
|
||||
}
|
||||
|
||||
break;
|
||||
case 'shared-worker-error':
|
||||
stats.sharedWorkerErrors++;
|
||||
break;
|
||||
case 'test-failed':
|
||||
stats.failedTests++;
|
||||
|
|
@ -164,6 +168,7 @@ class RunStatus extends Emittery {
|
|||
this.stats.failedHooks > 0 ||
|
||||
this.stats.failedTests > 0 ||
|
||||
this.stats.failedWorkers > 0 ||
|
||||
this.stats.sharedWorkerErrors > 0 ||
|
||||
this.stats.timeouts > 0 ||
|
||||
this.stats.uncaughtExceptions > 0 ||
|
||||
this.stats.unhandledRejections > 0
|
||||
|
|
|
|||
79
node_modules/ava/lib/runner.js
generated
vendored
79
node_modules/ava/lib/runner.js
generated
vendored
|
|
@ -23,13 +23,17 @@ class Runner extends Emittery {
|
|||
this.recordNewSnapshots = options.recordNewSnapshots === true;
|
||||
this.runOnlyExclusive = options.runOnlyExclusive === true;
|
||||
this.serial = options.serial === true;
|
||||
this.skippingTests = false;
|
||||
this.snapshotDir = options.snapshotDir;
|
||||
this.updateSnapshots = options.updateSnapshots;
|
||||
|
||||
this.activeRunnables = new Set();
|
||||
this.boundCompareTestSnapshot = this.compareTestSnapshot.bind(this);
|
||||
this.skippedSnapshots = false;
|
||||
this.boundSkipSnapshot = this.skipSnapshot.bind(this);
|
||||
this.interrupted = false;
|
||||
this.snapshots = null;
|
||||
this.nextTaskIndex = 0;
|
||||
this.tasks = {
|
||||
after: [],
|
||||
afterAlways: [],
|
||||
|
|
@ -41,6 +45,7 @@ class Runner extends Emittery {
|
|||
serial: [],
|
||||
todo: []
|
||||
};
|
||||
this.waitForReady = [];
|
||||
|
||||
const uniqueTestTitles = new Set();
|
||||
this.registerUniqueTitle = title => {
|
||||
|
|
@ -74,6 +79,8 @@ class Runner extends Emittery {
|
|||
});
|
||||
}
|
||||
|
||||
metadata.taskIndex = this.nextTaskIndex++;
|
||||
|
||||
const {args, buildTitle, implementations, rawTitle} = parseTestArgs(testArgs);
|
||||
|
||||
if (this.checkSelectedByLineNumbers) {
|
||||
|
|
@ -147,6 +154,10 @@ class Runner extends Emittery {
|
|||
task.metadata.exclusive = matcher([title], this.match).length === 1;
|
||||
}
|
||||
|
||||
if (task.metadata.skipped) {
|
||||
this.skippingTests = true;
|
||||
}
|
||||
|
||||
if (task.metadata.exclusive) {
|
||||
this.runOnlyExclusive = true;
|
||||
}
|
||||
|
|
@ -182,7 +193,7 @@ class Runner extends Emittery {
|
|||
fixedLocation: this.snapshotDir,
|
||||
projectDir: this.projectDir,
|
||||
recordNewSnapshots: this.recordNewSnapshots,
|
||||
updating: this.updateSnapshots
|
||||
updating: this.updateSnapshots && !this.runOnlyExclusive && !this.skippingTests
|
||||
});
|
||||
this.emit('dependency', this.snapshots.snapPath);
|
||||
}
|
||||
|
|
@ -190,18 +201,35 @@ class Runner extends Emittery {
|
|||
return this.snapshots.compare(options);
|
||||
}
|
||||
|
||||
skipSnapshot() {
|
||||
this.skippedSnapshots = true;
|
||||
}
|
||||
|
||||
saveSnapshotState() {
|
||||
if (
|
||||
this.updateSnapshots &&
|
||||
(
|
||||
this.runOnlyExclusive ||
|
||||
this.skippingTests ||
|
||||
this.skippedSnapshots
|
||||
)
|
||||
) {
|
||||
return {cannotSave: true};
|
||||
}
|
||||
|
||||
if (this.snapshots) {
|
||||
return this.snapshots.save();
|
||||
return {touchedFiles: this.snapshots.save()};
|
||||
}
|
||||
|
||||
if (this.updateSnapshots) {
|
||||
// TODO: There may be unused snapshot files if no test caused the
|
||||
// snapshots to be loaded. Prune them. But not if tests (including hooks!)
|
||||
// were skipped. Perhaps emit a warning if this occurs?
|
||||
return {touchedFiles: snapshotManager.cleanSnapshots({
|
||||
file: this.file,
|
||||
fixedLocation: this.snapshotDir,
|
||||
projectDir: this.projectDir
|
||||
})};
|
||||
}
|
||||
|
||||
return null;
|
||||
return {};
|
||||
}
|
||||
|
||||
onRun(runnable) {
|
||||
|
|
@ -241,7 +269,7 @@ class Runner extends Emittery {
|
|||
};
|
||||
|
||||
let waitForSerial = Promise.resolve();
|
||||
await runnables.reduce((previous, runnable) => {
|
||||
await runnables.reduce((previous, runnable) => { // eslint-disable-line unicorn/no-reduce
|
||||
if (runnable.metadata.serial || this.serial) {
|
||||
waitForSerial = previous.then(() => {
|
||||
// Serial runnables run as long as there was no previous failure, unless
|
||||
|
|
@ -275,7 +303,7 @@ class Runner extends Emittery {
|
|||
return result;
|
||||
}
|
||||
|
||||
async runHooks(tasks, contextRef, titleSuffix, testPassed) {
|
||||
async runHooks(tasks, contextRef, {titleSuffix, testPassed, associatedTaskIndex} = {}) {
|
||||
const hooks = tasks.map(task => new Runnable({
|
||||
contextRef,
|
||||
experiments: this.experiments,
|
||||
|
|
@ -284,8 +312,9 @@ class Runner extends Emittery {
|
|||
task.implementation :
|
||||
t => task.implementation.apply(null, [t].concat(task.args)),
|
||||
compareTestSnapshot: this.boundCompareTestSnapshot,
|
||||
skipSnapshot: this.boundSkipSnapshot,
|
||||
updateSnapshots: this.updateSnapshots,
|
||||
metadata: task.metadata,
|
||||
metadata: {...task.metadata, associatedTaskIndex},
|
||||
powerAssert: this.powerAssert,
|
||||
title: `${task.title}${titleSuffix || ''}`,
|
||||
isHook: true,
|
||||
|
|
@ -316,7 +345,14 @@ class Runner extends Emittery {
|
|||
|
||||
async runTest(task, contextRef) {
|
||||
const hookSuffix = ` for ${task.title}`;
|
||||
let hooksOk = await this.runHooks(this.tasks.beforeEach, contextRef, hookSuffix);
|
||||
let hooksOk = await this.runHooks(
|
||||
this.tasks.beforeEach,
|
||||
contextRef,
|
||||
{
|
||||
titleSuffix: hookSuffix,
|
||||
associatedTaskIndex: task.metadata.taskIndex
|
||||
}
|
||||
);
|
||||
|
||||
let testOk = false;
|
||||
if (hooksOk) {
|
||||
|
|
@ -329,6 +365,7 @@ class Runner extends Emittery {
|
|||
task.implementation :
|
||||
t => task.implementation.apply(null, [t].concat(task.args)),
|
||||
compareTestSnapshot: this.boundCompareTestSnapshot,
|
||||
skipSnapshot: this.boundSkipSnapshot,
|
||||
updateSnapshots: this.updateSnapshots,
|
||||
metadata: task.metadata,
|
||||
powerAssert: this.powerAssert,
|
||||
|
|
@ -348,7 +385,14 @@ class Runner extends Emittery {
|
|||
logs: result.logs
|
||||
});
|
||||
|
||||
hooksOk = await this.runHooks(this.tasks.afterEach, contextRef, hookSuffix, testOk);
|
||||
hooksOk = await this.runHooks(
|
||||
this.tasks.afterEach,
|
||||
contextRef,
|
||||
{
|
||||
titleSuffix: hookSuffix,
|
||||
testPassed: testOk,
|
||||
associatedTaskIndex: task.metadata.taskIndex
|
||||
});
|
||||
} else {
|
||||
this.emit('stateChange', {
|
||||
type: 'test-failed',
|
||||
|
|
@ -362,7 +406,14 @@ class Runner extends Emittery {
|
|||
}
|
||||
}
|
||||
|
||||
const alwaysOk = await this.runHooks(this.tasks.afterEachAlways, contextRef, hookSuffix, testOk);
|
||||
const alwaysOk = await this.runHooks(
|
||||
this.tasks.afterEachAlways,
|
||||
contextRef,
|
||||
{
|
||||
titleSuffix: hookSuffix,
|
||||
testPassed: testOk,
|
||||
associatedTaskIndex: task.metadata.taskIndex
|
||||
});
|
||||
return alwaysOk && hooksOk && testOk;
|
||||
}
|
||||
|
||||
|
|
@ -435,6 +486,8 @@ class Runner extends Emittery {
|
|||
});
|
||||
}
|
||||
|
||||
await Promise.all(this.waitForReady);
|
||||
|
||||
if (concurrentTests.length === 0 && serialTests.length === 0) {
|
||||
this.emit('finish');
|
||||
// Don't run any hooks if there are no tests to run.
|
||||
|
|
@ -451,7 +504,7 @@ class Runner extends Emittery {
|
|||
return false;
|
||||
}
|
||||
|
||||
return serialTests.reduce(async (previous, task) => {
|
||||
return serialTests.reduce(async (previous, task) => { // eslint-disable-line unicorn/no-reduce
|
||||
const previousOk = await previous;
|
||||
// Don't start tests after an interrupt.
|
||||
if (this.interrupted) {
|
||||
|
|
|
|||
76
node_modules/ava/lib/snapshot-manager.js
generated
vendored
76
node_modules/ava/lib/snapshot-manager.js
generated
vendored
|
|
@ -104,13 +104,32 @@ function combineEntries(entries) {
|
|||
const buffers = [];
|
||||
let byteLength = 0;
|
||||
|
||||
const sortedKeys = [...entries.keys()].sort();
|
||||
const sortedKeys = [...entries.keys()].sort((keyA, keyB) => {
|
||||
const [a, b] = [entries.get(keyA), entries.get(keyB)];
|
||||
const taskDifference = a.taskIndex - b.taskIndex;
|
||||
|
||||
if (taskDifference !== 0) {
|
||||
return taskDifference;
|
||||
}
|
||||
|
||||
const [assocA, assocB] = [a.associatedTaskIndex, b.associatedTaskIndex];
|
||||
if (assocA !== undefined && assocB !== undefined) {
|
||||
const assocDifference = assocA - assocB;
|
||||
|
||||
if (assocDifference !== 0) {
|
||||
return assocDifference;
|
||||
}
|
||||
}
|
||||
|
||||
return a.snapIndex - b.snapIndex;
|
||||
});
|
||||
|
||||
for (const key of sortedKeys) {
|
||||
const keyBuffer = Buffer.from(`\n\n## ${key}\n\n`, 'utf8');
|
||||
buffers.push(keyBuffer);
|
||||
byteLength += keyBuffer.byteLength;
|
||||
|
||||
const formattedEntries = entries.get(key);
|
||||
const formattedEntries = entries.get(key).buffers;
|
||||
const last = formattedEntries[formattedEntries.length - 1];
|
||||
for (const entry of formattedEntries) {
|
||||
buffers.push(entry);
|
||||
|
|
@ -176,10 +195,11 @@ function encodeSnapshots(buffersByHash) {
|
|||
byteOffset += 2;
|
||||
|
||||
const entries = [];
|
||||
for (const pair of buffersByHash) {
|
||||
const hash = pair[0];
|
||||
const snapshotBuffers = pair[1];
|
||||
|
||||
// Maps can't have duplicate keys, so all items in [...buffersByHash.keys()]
|
||||
// are unique, so sortedHashes should be deterministic.
|
||||
const sortedHashes = [...buffersByHash.keys()].sort();
|
||||
const sortedBuffersByHash = [...sortedHashes.map(hash => [hash, buffersByHash.get(hash)])];
|
||||
for (const [hash, snapshotBuffers] of sortedBuffersByHash) {
|
||||
buffers.push(Buffer.from(hash, 'hex'));
|
||||
byteOffset += MD5_HASH_LENGTH;
|
||||
|
||||
|
|
@ -332,6 +352,7 @@ class Manager {
|
|||
const descriptor = concordance.describe(options.expected, concordanceOptions);
|
||||
const snapshot = concordance.serialize(descriptor);
|
||||
const entry = formatEntry(options.label, descriptor);
|
||||
const {taskIndex, snapIndex, associatedTaskIndex} = options;
|
||||
|
||||
return () => { // Must be called in order!
|
||||
this.hasChanges = true;
|
||||
|
|
@ -353,9 +374,9 @@ class Manager {
|
|||
snapshots.push(snapshot);
|
||||
|
||||
if (this.reportEntries.has(options.belongsTo)) {
|
||||
this.reportEntries.get(options.belongsTo).push(entry);
|
||||
this.reportEntries.get(options.belongsTo).buffers.push(entry);
|
||||
} else {
|
||||
this.reportEntries.set(options.belongsTo, [entry]);
|
||||
this.reportEntries.set(options.belongsTo, {buffers: [entry], taskIndex, snapIndex, associatedTaskIndex});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
@ -428,12 +449,49 @@ const determineSnapshotDir = mem(({file, fixedLocation, projectDir}) => {
|
|||
|
||||
exports.determineSnapshotDir = determineSnapshotDir;
|
||||
|
||||
function load({file, fixedLocation, projectDir, recordNewSnapshots, updating}) {
|
||||
function determineSnapshotPaths({file, fixedLocation, projectDir}) {
|
||||
const dir = determineSnapshotDir({file, fixedLocation, projectDir});
|
||||
const relFile = path.relative(projectDir, resolveSourceFile(file));
|
||||
const name = path.basename(relFile);
|
||||
const reportFile = `${name}.md`;
|
||||
const snapFile = `${name}.snap`;
|
||||
|
||||
return {
|
||||
dir,
|
||||
relFile,
|
||||
snapFile,
|
||||
reportFile
|
||||
};
|
||||
}
|
||||
|
||||
function cleanFile(file) {
|
||||
try {
|
||||
fs.unlinkSync(file);
|
||||
return [file];
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return [];
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Remove snapshot and report if they exist. Returns an array containing the
|
||||
// paths of the touched files.
|
||||
function cleanSnapshots({file, fixedLocation, projectDir}) {
|
||||
const {dir, snapFile, reportFile} = determineSnapshotPaths({file, fixedLocation, projectDir});
|
||||
|
||||
return [
|
||||
...cleanFile(path.join(dir, snapFile)),
|
||||
...cleanFile(path.join(dir, reportFile))
|
||||
];
|
||||
}
|
||||
|
||||
exports.cleanSnapshots = cleanSnapshots;
|
||||
|
||||
function load({file, fixedLocation, projectDir, recordNewSnapshots, updating}) {
|
||||
const {dir, relFile, snapFile, reportFile} = determineSnapshotPaths({file, fixedLocation, projectDir});
|
||||
const snapPath = path.join(dir, snapFile);
|
||||
|
||||
let appendOnly = !updating;
|
||||
|
|
|
|||
65
node_modules/ava/lib/test.js
generated
vendored
65
node_modules/ava/lib/test.js
generated
vendored
|
|
@ -39,7 +39,9 @@ class ExecutionContext extends assert.Assertions {
|
|||
compareWithSnapshot: options => {
|
||||
return test.compareWithSnapshot(options);
|
||||
},
|
||||
powerAssert: test.powerAssert
|
||||
powerAssert: test.powerAssert,
|
||||
experiments: test.experiments,
|
||||
disableSnapshots: test.isHook === true
|
||||
});
|
||||
testMap.set(this, test);
|
||||
|
||||
|
|
@ -64,8 +66,8 @@ class ExecutionContext extends assert.Assertions {
|
|||
|
||||
this.plan.skip = () => {};
|
||||
|
||||
this.timeout = ms => {
|
||||
test.timeout(ms);
|
||||
this.timeout = (ms, message) => {
|
||||
test.timeout(ms, message);
|
||||
};
|
||||
|
||||
this.teardown = callback => {
|
||||
|
|
@ -73,6 +75,12 @@ class ExecutionContext extends assert.Assertions {
|
|||
};
|
||||
|
||||
this.try = async (...attemptArgs) => {
|
||||
if (test.isHook) {
|
||||
const error = new Error('`t.try()` can only be used in tests');
|
||||
test.saveFirstError(error);
|
||||
throw error;
|
||||
}
|
||||
|
||||
const {args, buildTitle, implementations, receivedImplementationArray} = parseTestArgs(attemptArgs);
|
||||
|
||||
if (implementations.length === 0) {
|
||||
|
|
@ -179,7 +187,8 @@ class ExecutionContext extends assert.Assertions {
|
|||
}
|
||||
|
||||
get passed() {
|
||||
return testMap.get(this).testPassed;
|
||||
const test = testMap.get(this);
|
||||
return test.isHook ? test.testPassed : !test.assertError;
|
||||
}
|
||||
|
||||
_throwsArgStart(assertion, file, line) {
|
||||
|
|
@ -221,7 +230,17 @@ class Test {
|
|||
const index = id ? 0 : this.nextSnapshotIndex++;
|
||||
const label = id ? '' : message || `Snapshot ${index + 1}`; // Human-readable labels start counting at 1.
|
||||
|
||||
const {record, ...result} = options.compareTestSnapshot({belongsTo, deferRecording, expected, index, label});
|
||||
const {taskIndex, associatedTaskIndex} = this.metadata;
|
||||
const {record, ...result} = options.compareTestSnapshot({
|
||||
belongsTo,
|
||||
deferRecording,
|
||||
expected,
|
||||
index,
|
||||
label,
|
||||
taskIndex,
|
||||
snapIndex: this.snapshotCount,
|
||||
associatedTaskIndex
|
||||
});
|
||||
if (record) {
|
||||
this.deferredSnapshotRecordings.push(record);
|
||||
}
|
||||
|
|
@ -230,6 +249,10 @@ class Test {
|
|||
};
|
||||
|
||||
this.skipSnapshot = () => {
|
||||
if (typeof options.skipSnapshot === 'function') {
|
||||
options.skipSnapshot();
|
||||
}
|
||||
|
||||
if (options.updateSnapshots) {
|
||||
this.addFailedAssertion(new Error('Snapshot assertions cannot be skipped when updating snapshots'));
|
||||
} else {
|
||||
|
|
@ -289,11 +312,8 @@ class Test {
|
|||
};
|
||||
}
|
||||
|
||||
if (this.metadata.inline) {
|
||||
throw new Error('`t.end()` is not supported inside `t.try()`');
|
||||
} else {
|
||||
throw new Error('`t.end()` is not supported in this context. To use `t.end()` as a callback, you must use "callback mode" via `test.cb(testName, fn)`');
|
||||
}
|
||||
const error_ = this.metadata.inline ? new Error('`t.end()` is not supported inside `t.try()`') : new Error('`t.end()` is not supported in this context. To use `t.end()` as a callback, you must use "callback mode" via `test.cb(testName, fn)`');
|
||||
throw error_;
|
||||
}
|
||||
|
||||
endCallback(error, savedError) {
|
||||
|
|
@ -430,7 +450,14 @@ class Test {
|
|||
this.planError = planError;
|
||||
}
|
||||
|
||||
timeout(ms) {
|
||||
timeout(ms, message) {
|
||||
const result = assert.checkAssertionMessage('timeout', message);
|
||||
if (result !== true) {
|
||||
this.saveFirstError(result);
|
||||
// Allow the timeout to be set even when the message is invalid.
|
||||
message = '';
|
||||
}
|
||||
|
||||
if (this.finishing) {
|
||||
return;
|
||||
}
|
||||
|
|
@ -438,7 +465,7 @@ class Test {
|
|||
this.clearTimeout();
|
||||
this.timeoutMs = ms;
|
||||
this.timeoutTimer = nowAndTimers.setTimeout(() => {
|
||||
this.saveFirstError(new Error('Test timeout exceeded'));
|
||||
this.saveFirstError(new Error(message || 'Test timeout exceeded'));
|
||||
|
||||
if (this.finishDueToTimeout) {
|
||||
this.finishDueToTimeout();
|
||||
|
|
@ -482,7 +509,13 @@ class Test {
|
|||
}
|
||||
|
||||
async runTeardowns() {
|
||||
for (const teardown of this.teardowns) {
|
||||
const teardowns = [...this.teardowns];
|
||||
|
||||
if (this.experiments.reverseTeardowns) {
|
||||
teardowns.reverse();
|
||||
}
|
||||
|
||||
for (const teardown of teardowns) {
|
||||
try {
|
||||
await teardown(); // eslint-disable-line no-await-in-loop
|
||||
} catch (error) {
|
||||
|
|
@ -714,11 +747,7 @@ class Test {
|
|||
if (this.metadata.failing) {
|
||||
passed = !passed;
|
||||
|
||||
if (passed) {
|
||||
error = null;
|
||||
} else {
|
||||
error = new Error('Test was expected to fail, but succeeded, you should stop marking the test as failing');
|
||||
}
|
||||
error = passed ? null : new Error('Test was expected to fail, but succeeded, you should stop marking the test as failing');
|
||||
}
|
||||
|
||||
return {
|
||||
|
|
|
|||
203
node_modules/ava/lib/worker/ipc.js
generated
vendored
203
node_modules/ava/lib/worker/ipc.js
generated
vendored
|
|
@ -1,50 +1,42 @@
|
|||
'use strict';
|
||||
const Emittery = require('emittery');
|
||||
const events = require('events');
|
||||
const pEvent = require('p-event');
|
||||
const {controlFlow} = require('../ipc-flow-control');
|
||||
const {get: getOptions} = require('./options');
|
||||
|
||||
const emitter = new Emittery();
|
||||
process.on('message', message => {
|
||||
if (!message.ava) {
|
||||
return;
|
||||
}
|
||||
const selectAvaMessage = type => message => message.ava && message.ava.type === type;
|
||||
|
||||
switch (message.ava.type) {
|
||||
case 'options':
|
||||
emitter.emit('options', message.ava.options);
|
||||
break;
|
||||
case 'peer-failed':
|
||||
emitter.emit('peerFailed');
|
||||
break;
|
||||
case 'pong':
|
||||
emitter.emit('pong');
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
exports.options = emitter.once('options');
|
||||
exports.peerFailed = emitter.once('peerFailed');
|
||||
exports.options = pEvent(process, 'message', selectAvaMessage('options')).then(message => message.ava.options);
|
||||
exports.peerFailed = pEvent(process, 'message', selectAvaMessage('peer-failed'));
|
||||
|
||||
const bufferedSend = controlFlow(process);
|
||||
function send(evt) {
|
||||
if (process.connected) {
|
||||
process.send({ava: evt});
|
||||
}
|
||||
bufferedSend({ava: evt});
|
||||
}
|
||||
|
||||
exports.send = send;
|
||||
|
||||
let refs = 1;
|
||||
function ref() {
|
||||
if (++refs === 1) {
|
||||
process.channel.ref();
|
||||
}
|
||||
}
|
||||
|
||||
function unref() {
|
||||
process.channel.unref();
|
||||
if (refs > 0 && --refs === 0) {
|
||||
process.channel.unref();
|
||||
}
|
||||
}
|
||||
|
||||
exports.unref = unref;
|
||||
|
||||
let pendingPings = Promise.resolve();
|
||||
async function flush() {
|
||||
process.channel.ref();
|
||||
ref();
|
||||
const promise = pendingPings.then(async () => { // eslint-disable-line promise/prefer-await-to-then
|
||||
send({type: 'ping'});
|
||||
await emitter.once('pong');
|
||||
await pEvent(process, 'message', selectAvaMessage('pong'));
|
||||
if (promise === pendingPings) {
|
||||
unref();
|
||||
}
|
||||
|
|
@ -54,3 +46,156 @@ async function flush() {
|
|||
}
|
||||
|
||||
exports.flush = flush;
|
||||
|
||||
let channelCounter = 0;
|
||||
let messageCounter = 0;
|
||||
|
||||
const channelEmitters = new Map();
|
||||
function createChannelEmitter(channelId) {
|
||||
if (channelEmitters.size === 0) {
|
||||
process.on('message', message => {
|
||||
if (!message.ava) {
|
||||
return;
|
||||
}
|
||||
|
||||
const {channelId, type, ...payload} = message.ava;
|
||||
if (
|
||||
type === 'shared-worker-error' ||
|
||||
type === 'shared-worker-message' ||
|
||||
type === 'shared-worker-ready'
|
||||
) {
|
||||
const emitter = channelEmitters.get(channelId);
|
||||
if (emitter !== undefined) {
|
||||
emitter.emit(type, payload);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const emitter = new events.EventEmitter();
|
||||
channelEmitters.set(channelId, emitter);
|
||||
return [emitter, () => channelEmitters.delete(channelId)];
|
||||
}
|
||||
|
||||
function registerSharedWorker(filename, initialData) {
|
||||
const channelId = `${getOptions().forkId}/channel/${++channelCounter}`;
|
||||
const [channelEmitter, unsubscribe] = createChannelEmitter(channelId);
|
||||
|
||||
let forcedUnref = false;
|
||||
let refs = 0;
|
||||
const forceUnref = () => {
|
||||
if (forcedUnref) {
|
||||
return;
|
||||
}
|
||||
|
||||
forcedUnref = true;
|
||||
if (refs > 0) {
|
||||
unref();
|
||||
}
|
||||
};
|
||||
|
||||
const refChannel = () => {
|
||||
if (!forcedUnref && ++refs === 1) {
|
||||
ref();
|
||||
}
|
||||
};
|
||||
|
||||
const unrefChannel = () => {
|
||||
if (!forcedUnref && refs > 0 && --refs === 0) {
|
||||
unref();
|
||||
}
|
||||
};
|
||||
|
||||
send({
|
||||
type: 'shared-worker-connect',
|
||||
channelId,
|
||||
filename,
|
||||
initialData
|
||||
});
|
||||
|
||||
let currentlyAvailable = false;
|
||||
let error = null;
|
||||
|
||||
refChannel();
|
||||
const ready = pEvent(channelEmitter, 'shared-worker-ready').then(() => { // eslint-disable-line promise/prefer-await-to-then
|
||||
currentlyAvailable = error === null;
|
||||
}).finally(unrefChannel);
|
||||
|
||||
const messageEmitters = new Set();
|
||||
const handleMessage = message => {
|
||||
// Wait for a turn of the event loop, to allow new subscriptions to be set
|
||||
// up in response to the previous message.
|
||||
setImmediate(() => {
|
||||
for (const emitter of messageEmitters) {
|
||||
emitter.emit('message', message);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
channelEmitter.on('shared-worker-message', handleMessage);
|
||||
|
||||
pEvent(channelEmitter, 'shared-worker-error').then(() => { // eslint-disable-line promise/prefer-await-to-then
|
||||
unsubscribe();
|
||||
forceUnref();
|
||||
|
||||
error = new Error('The shared worker is no longer available');
|
||||
currentlyAvailable = false;
|
||||
for (const emitter of messageEmitters) {
|
||||
emitter.emit('error', error);
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
forceUnref,
|
||||
ready,
|
||||
channel: {
|
||||
available: ready,
|
||||
|
||||
get currentlyAvailable() {
|
||||
return currentlyAvailable;
|
||||
},
|
||||
|
||||
async * receive() {
|
||||
if (error !== null) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
const emitter = new events.EventEmitter();
|
||||
messageEmitters.add(emitter);
|
||||
try {
|
||||
refChannel();
|
||||
for await (const [message] of events.on(emitter, 'message')) {
|
||||
yield message;
|
||||
}
|
||||
} finally {
|
||||
unrefChannel();
|
||||
messageEmitters.delete(emitter);
|
||||
}
|
||||
},
|
||||
|
||||
post(serializedData, replyTo) {
|
||||
if (error !== null) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (!currentlyAvailable) {
|
||||
throw new Error('Shared worker is not yet available');
|
||||
}
|
||||
|
||||
const messageId = `${channelId}/message/${++messageCounter}`;
|
||||
send({
|
||||
type: 'shared-worker-message',
|
||||
channelId,
|
||||
messageId,
|
||||
replyTo,
|
||||
serializedData
|
||||
});
|
||||
|
||||
return messageId;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
exports.registerSharedWorker = registerSharedWorker;
|
||||
|
||||
|
|
|
|||
121
node_modules/ava/lib/worker/plugin.js
generated
vendored
Normal file
121
node_modules/ava/lib/worker/plugin.js
generated
vendored
Normal file
|
|
@ -0,0 +1,121 @@
|
|||
const v8 = require('v8');
|
||||
const pkg = require('../../package.json');
|
||||
const subprocess = require('./subprocess');
|
||||
const options = require('./options');
|
||||
|
||||
const workers = new Map();
|
||||
const workerTeardownFns = new WeakMap();
|
||||
|
||||
function createSharedWorker(filename, initialData, teardown) {
|
||||
const channel = subprocess.registerSharedWorker(filename, initialData, teardown);
|
||||
|
||||
class ReceivedMessage {
|
||||
constructor(id, serializedData) {
|
||||
this.id = id;
|
||||
this.data = v8.deserialize(new Uint8Array(serializedData));
|
||||
}
|
||||
|
||||
reply(data) {
|
||||
return publishMessage(data, this.id);
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure that, no matter how often it's received, we have a stable message
|
||||
// object.
|
||||
const messageCache = new WeakMap();
|
||||
async function * receiveMessages(replyTo) {
|
||||
for await (const evt of channel.receive()) {
|
||||
if (replyTo === undefined && evt.replyTo !== undefined) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (replyTo !== undefined && evt.replyTo !== replyTo) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let message = messageCache.get(evt);
|
||||
if (message === undefined) {
|
||||
message = new ReceivedMessage(evt.messageId, evt.serializedData);
|
||||
messageCache.set(evt, message);
|
||||
}
|
||||
|
||||
yield message;
|
||||
}
|
||||
}
|
||||
|
||||
function publishMessage(data, replyTo) {
|
||||
const id = channel.post([...v8.serialize(data)], replyTo);
|
||||
|
||||
return {
|
||||
id,
|
||||
async * replies() {
|
||||
yield * receiveMessages(id);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
available: channel.available,
|
||||
protocol: 'experimental',
|
||||
|
||||
get currentlyAvailable() {
|
||||
return channel.currentlyAvailable;
|
||||
},
|
||||
|
||||
publish(data) {
|
||||
return publishMessage(data);
|
||||
},
|
||||
|
||||
async * subscribe() {
|
||||
yield * receiveMessages();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const supportsSharedWorkers = process.versions.node >= '12.17.0';
|
||||
|
||||
function registerSharedWorker({
|
||||
filename,
|
||||
initialData,
|
||||
supportedProtocols,
|
||||
teardown
|
||||
}) {
|
||||
if (!options.get().experiments.sharedWorkers) {
|
||||
throw new Error('Shared workers are experimental. Opt in to them in your AVA configuration');
|
||||
}
|
||||
|
||||
if (!supportsSharedWorkers) {
|
||||
throw new Error('Shared workers require Node.js 12.17 or newer');
|
||||
}
|
||||
|
||||
if (!supportedProtocols.includes('experimental')) {
|
||||
throw new Error(`This version of AVA (${pkg.version}) does not support any of the desired shared worker protocols: ${supportedProtocols.join()}`);
|
||||
}
|
||||
|
||||
let worker = workers.get(filename);
|
||||
if (worker === undefined) {
|
||||
worker = createSharedWorker(filename, initialData, async () => {
|
||||
// Run possibly asynchronous teardown functions serially, in reverse
|
||||
// order. Any error will crash the worker.
|
||||
const teardownFns = workerTeardownFns.get(worker);
|
||||
if (teardownFns !== undefined) {
|
||||
for await (const fn of [...teardownFns].reverse()) {
|
||||
await fn();
|
||||
}
|
||||
}
|
||||
});
|
||||
workers.set(filename, worker);
|
||||
}
|
||||
|
||||
if (teardown !== undefined) {
|
||||
if (workerTeardownFns.has(worker)) {
|
||||
workerTeardownFns.get(worker).push(teardown);
|
||||
} else {
|
||||
workerTeardownFns.set(worker, [teardown]);
|
||||
}
|
||||
}
|
||||
|
||||
return worker;
|
||||
}
|
||||
|
||||
exports.registerSharedWorker = registerSharedWorker;
|
||||
43
node_modules/ava/lib/worker/subprocess.js
generated
vendored
43
node_modules/ava/lib/worker/subprocess.js
generated
vendored
|
|
@ -32,6 +32,8 @@ ipc.options.then(async options => {
|
|||
const dependencyTracking = require('./dependency-tracker');
|
||||
const lineNumberSelection = require('./line-numbers');
|
||||
|
||||
const sharedWorkerTeardowns = [];
|
||||
|
||||
async function exit(code) {
|
||||
if (!process.exitCode) {
|
||||
process.exitCode = code;
|
||||
|
|
@ -89,10 +91,12 @@ ipc.options.then(async options => {
|
|||
exit(1);
|
||||
});
|
||||
|
||||
runner.on('finish', () => {
|
||||
runner.on('finish', async () => {
|
||||
try {
|
||||
const touchedFiles = runner.saveSnapshotState();
|
||||
if (touchedFiles) {
|
||||
const {cannotSave, touchedFiles} = runner.saveSnapshotState();
|
||||
if (cannotSave) {
|
||||
ipc.send({type: 'snapshot-error'});
|
||||
} else if (touchedFiles) {
|
||||
ipc.send({type: 'touched-files', files: touchedFiles});
|
||||
}
|
||||
} catch (error) {
|
||||
|
|
@ -101,6 +105,14 @@ ipc.options.then(async options => {
|
|||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await Promise.all(sharedWorkerTeardowns.map(fn => fn()));
|
||||
} catch (error) {
|
||||
ipc.send({type: 'uncaught-exception', err: serializeError('Shared worker teardown error', false, error, runner.file)});
|
||||
exit(1);
|
||||
return;
|
||||
}
|
||||
|
||||
nowAndTimers.setImmediate(() => {
|
||||
currentlyUnhandled()
|
||||
.filter(rejection => !attributedRejections.has(rejection.promise))
|
||||
|
|
@ -127,6 +139,19 @@ ipc.options.then(async options => {
|
|||
return runner;
|
||||
};
|
||||
|
||||
exports.registerSharedWorker = (filename, initialData, teardown) => {
|
||||
const {channel, forceUnref, ready} = ipc.registerSharedWorker(filename, initialData);
|
||||
runner.waitForReady.push(ready);
|
||||
sharedWorkerTeardowns.push(async () => {
|
||||
try {
|
||||
await teardown();
|
||||
} finally {
|
||||
forceUnref();
|
||||
}
|
||||
});
|
||||
return channel;
|
||||
};
|
||||
|
||||
// Store value to prevent required modules from modifying it.
|
||||
const testPath = options.file;
|
||||
|
||||
|
|
@ -196,15 +221,21 @@ ipc.options.then(async options => {
|
|||
if (Reflect.has(mod, Symbol.for('esm:package'))) {
|
||||
requireFn = mod(module);
|
||||
}
|
||||
} catch (_) {}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// Install dependency tracker after the require configuration has been evaluated
|
||||
// to make sure we also track dependencies with custom require hooks
|
||||
dependencyTracking.install(testPath);
|
||||
|
||||
if (options.debug) {
|
||||
require('inspector').open(options.debug.port, options.debug.host, true); // eslint-disable-line node/no-unsupported-features/node-builtins
|
||||
if (options.debug && options.debug.port !== undefined && options.debug.host !== undefined) {
|
||||
// If an inspector was active when the main process started, and is
|
||||
// already active for the worker process, do not open a new one.
|
||||
const inspector = require('inspector'); // eslint-disable-line node/no-unsupported-features/node-builtins
|
||||
if (!options.debug.active || inspector.url() === undefined) {
|
||||
inspector.open(options.debug.port, options.debug.host, true);
|
||||
}
|
||||
|
||||
if (options.debug.break) {
|
||||
debugger; // eslint-disable-line no-debugger
|
||||
}
|
||||
|
|
|
|||
1
node_modules/ava/node_modules/.bin/acorn
generated
vendored
Symbolic link
1
node_modules/ava/node_modules/.bin/acorn
generated
vendored
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../acorn/bin/acorn
|
||||
1
node_modules/ava/node_modules/acorn-walk/dist/walk.js.map
generated
vendored
1
node_modules/ava/node_modules/acorn-walk/dist/walk.js.map
generated
vendored
File diff suppressed because one or more lines are too long
1
node_modules/ava/node_modules/acorn-walk/dist/walk.mjs.map
generated
vendored
1
node_modules/ava/node_modules/acorn-walk/dist/walk.mjs.map
generated
vendored
File diff suppressed because one or more lines are too long
764
node_modules/ava/node_modules/acorn/CHANGELOG.md
generated
vendored
Normal file
764
node_modules/ava/node_modules/acorn/CHANGELOG.md
generated
vendored
Normal file
|
|
@ -0,0 +1,764 @@
|
|||
## 8.5.0 (2021-09-06)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Improve context-dependent tokenization in a number of corner cases.
|
||||
|
||||
Fix location tracking after a 0x2028 or 0x2029 character in a string literal (which before did not increase the line number).
|
||||
|
||||
Fix an issue where arrow function bodies in for loop context would inappropriately consume `in` operators.
|
||||
|
||||
Fix wrong end locations stored on SequenceExpression nodes.
|
||||
|
||||
Implement restriction that `for`/`of` loop LHS can't start with `let`.
|
||||
|
||||
### New features
|
||||
|
||||
Add support for ES2022 class static blocks.
|
||||
|
||||
Allow multiple input files to be passed to the CLI tool.
|
||||
|
||||
## 8.4.1 (2021-06-24)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a bug where `allowAwaitOutsideFunction` would allow `await` in class field initializers, and setting `ecmaVersion` to 13 or higher would allow top-level await in non-module sources.
|
||||
|
||||
## 8.4.0 (2021-06-11)
|
||||
|
||||
### New features
|
||||
|
||||
A new option, `allowSuperOutsideMethod`, can be used to suppress the error when `super` is used in the wrong context.
|
||||
|
||||
## 8.3.0 (2021-05-31)
|
||||
|
||||
### New features
|
||||
|
||||
Default `allowAwaitOutsideFunction` to true for ECMAScript 2022 an higher.
|
||||
|
||||
Add support for the `p` ([indices](https://github.com/tc39/proposal-regexp-match-indices)) regexp flag.
|
||||
|
||||
## 8.2.4 (2021-05-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix spec conformity in corner case 'for await (async of ...)'.
|
||||
|
||||
## 8.2.3 (2021-05-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix an issue where the library couldn't parse 'for (async of ...)'.
|
||||
|
||||
Fix a bug in UTF-16 decoding that would read characters incorrectly in some circumstances.
|
||||
|
||||
## 8.2.2 (2021-04-29)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a bug where a class field initialized to an async arrow function wouldn't allow await inside it. Same issue existed for generator arrow functions with yield.
|
||||
|
||||
## 8.2.1 (2021-04-24)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a regression introduced in 8.2.0 where static or async class methods with keyword names fail to parse.
|
||||
|
||||
## 8.2.0 (2021-04-24)
|
||||
|
||||
### New features
|
||||
|
||||
Add support for ES2022 class fields and private methods.
|
||||
|
||||
## 8.1.1 (2021-04-12)
|
||||
|
||||
### Various
|
||||
|
||||
Stop shipping source maps in the NPM package.
|
||||
|
||||
## 8.1.0 (2021-03-09)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a spurious error in nested destructuring arrays.
|
||||
|
||||
### New features
|
||||
|
||||
Expose `allowAwaitOutsideFunction` in CLI interface.
|
||||
|
||||
Make `allowImportExportAnywhere` also apply to `import.meta`.
|
||||
|
||||
## 8.0.5 (2021-01-25)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Adjust package.json to work with Node 12.16.0 and 13.0-13.6.
|
||||
|
||||
## 8.0.4 (2020-10-05)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Make `await x ** y` an error, following the spec.
|
||||
|
||||
Fix potentially exponential regular expression.
|
||||
|
||||
## 8.0.3 (2020-10-02)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a wasteful loop during `Parser` creation when setting `ecmaVersion` to `"latest"`.
|
||||
|
||||
## 8.0.2 (2020-09-30)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Make the TypeScript types reflect the current allowed values for `ecmaVersion`.
|
||||
|
||||
Fix another regexp/division tokenizer issue.
|
||||
|
||||
## 8.0.1 (2020-08-12)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Provide the correct value in the `version` export.
|
||||
|
||||
## 8.0.0 (2020-08-12)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Disallow expressions like `(a = b) = c`.
|
||||
|
||||
Make non-octal escape sequences a syntax error in strict mode.
|
||||
|
||||
### New features
|
||||
|
||||
The package can now be loaded directly as an ECMAScript module in node 13+.
|
||||
|
||||
Update to the set of Unicode properties from ES2021.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
The `ecmaVersion` option is now required. For the moment, omitting it will still work with a warning, but that will change in a future release.
|
||||
|
||||
Some changes to method signatures that may be used by plugins.
|
||||
|
||||
## 7.4.0 (2020-08-03)
|
||||
|
||||
### New features
|
||||
|
||||
Add support for logical assignment operators.
|
||||
|
||||
Add support for numeric separators.
|
||||
|
||||
## 7.3.1 (2020-06-11)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Make the string in the `version` export match the actual library version.
|
||||
|
||||
## 7.3.0 (2020-06-11)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a bug that caused parsing of object patterns with a property named `set` that had a default value to fail.
|
||||
|
||||
### New features
|
||||
|
||||
Add support for optional chaining (`?.`).
|
||||
|
||||
## 7.2.0 (2020-05-09)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix precedence issue in parsing of async arrow functions.
|
||||
|
||||
### New features
|
||||
|
||||
Add support for nullish coalescing.
|
||||
|
||||
Add support for `import.meta`.
|
||||
|
||||
Support `export * as ...` syntax.
|
||||
|
||||
Upgrade to Unicode 13.
|
||||
|
||||
## 6.4.1 (2020-03-09)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
More carefully check for valid UTF16 surrogate pairs in regexp validator.
|
||||
|
||||
## 7.1.1 (2020-03-01)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Treat `\8` and `\9` as invalid escapes in template strings.
|
||||
|
||||
Allow unicode escapes in property names that are keywords.
|
||||
|
||||
Don't error on an exponential operator expression as argument to `await`.
|
||||
|
||||
More carefully check for valid UTF16 surrogate pairs in regexp validator.
|
||||
|
||||
## 7.1.0 (2019-09-24)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Disallow trailing object literal commas when ecmaVersion is less than 5.
|
||||
|
||||
### New features
|
||||
|
||||
Add a static `acorn` property to the `Parser` class that contains the entire module interface, to allow plugins to access the instance of the library that they are acting on.
|
||||
|
||||
## 7.0.0 (2019-08-13)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
Changes the node format for dynamic imports to use the `ImportExpression` node type, as defined in [ESTree](https://github.com/estree/estree/blob/master/es2020.md#importexpression).
|
||||
|
||||
Makes 10 (ES2019) the default value for the `ecmaVersion` option.
|
||||
|
||||
## 6.3.0 (2019-08-12)
|
||||
|
||||
### New features
|
||||
|
||||
`sourceType: "module"` can now be used even when `ecmaVersion` is less than 6, to parse module-style code that otherwise conforms to an older standard.
|
||||
|
||||
## 6.2.1 (2019-07-21)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix bug causing Acorn to treat some characters as identifier characters that shouldn't be treated as such.
|
||||
|
||||
Fix issue where setting the `allowReserved` option to `"never"` allowed reserved words in some circumstances.
|
||||
|
||||
## 6.2.0 (2019-07-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Improve valid assignment checking in `for`/`in` and `for`/`of` loops.
|
||||
|
||||
Disallow binding `let` in patterns.
|
||||
|
||||
### New features
|
||||
|
||||
Support bigint syntax with `ecmaVersion` >= 11.
|
||||
|
||||
Support dynamic `import` syntax with `ecmaVersion` >= 11.
|
||||
|
||||
Upgrade to Unicode version 12.
|
||||
|
||||
## 6.1.1 (2019-02-27)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix bug that caused parsing default exports of with names to fail.
|
||||
|
||||
## 6.1.0 (2019-02-08)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix scope checking when redefining a `var` as a lexical binding.
|
||||
|
||||
### New features
|
||||
|
||||
Split up `parseSubscripts` to use an internal `parseSubscript` method to make it easier to extend with plugins.
|
||||
|
||||
## 6.0.7 (2019-02-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Check that exported bindings are defined.
|
||||
|
||||
Don't treat `\u180e` as a whitespace character.
|
||||
|
||||
Check for duplicate parameter names in methods.
|
||||
|
||||
Don't allow shorthand properties when they are generators or async methods.
|
||||
|
||||
Forbid binding `await` in async arrow function's parameter list.
|
||||
|
||||
## 6.0.6 (2019-01-30)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
The content of class declarations and expressions is now always parsed in strict mode.
|
||||
|
||||
Don't allow `let` or `const` to bind the variable name `let`.
|
||||
|
||||
Treat class declarations as lexical.
|
||||
|
||||
Don't allow a generator function declaration as the sole body of an `if` or `else`.
|
||||
|
||||
Ignore `"use strict"` when after an empty statement.
|
||||
|
||||
Allow string line continuations with special line terminator characters.
|
||||
|
||||
Treat `for` bodies as part of the `for` scope when checking for conflicting bindings.
|
||||
|
||||
Fix bug with parsing `yield` in a `for` loop initializer.
|
||||
|
||||
Implement special cases around scope checking for functions.
|
||||
|
||||
## 6.0.5 (2019-01-02)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix TypeScript type for `Parser.extend` and add `allowAwaitOutsideFunction` to options type.
|
||||
|
||||
Don't treat `let` as a keyword when the next token is `{` on the next line.
|
||||
|
||||
Fix bug that broke checking for parentheses around an object pattern in a destructuring assignment when `preserveParens` was on.
|
||||
|
||||
## 6.0.4 (2018-11-05)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Further improvements to tokenizing regular expressions in corner cases.
|
||||
|
||||
## 6.0.3 (2018-11-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix bug in tokenizing an expression-less return followed by a function followed by a regular expression.
|
||||
|
||||
Remove stray symlink in the package tarball.
|
||||
|
||||
## 6.0.2 (2018-09-26)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix bug where default expressions could fail to parse inside an object destructuring assignment expression.
|
||||
|
||||
## 6.0.1 (2018-09-14)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix wrong value in `version` export.
|
||||
|
||||
## 6.0.0 (2018-09-14)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Better handle variable-redefinition checks for catch bindings and functions directly under if statements.
|
||||
|
||||
Forbid `new.target` in top-level arrow functions.
|
||||
|
||||
Fix issue with parsing a regexp after `yield` in some contexts.
|
||||
|
||||
### New features
|
||||
|
||||
The package now comes with TypeScript definitions.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
The default value of the `ecmaVersion` option is now 9 (2018).
|
||||
|
||||
Plugins work differently, and will have to be rewritten to work with this version.
|
||||
|
||||
The loose parser and walker have been moved into separate packages (`acorn-loose` and `acorn-walk`).
|
||||
|
||||
## 5.7.3 (2018-09-10)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix failure to tokenize regexps after expressions like `x.of`.
|
||||
|
||||
Better error message for unterminated template literals.
|
||||
|
||||
## 5.7.2 (2018-08-24)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Properly handle `allowAwaitOutsideFunction` in for statements.
|
||||
|
||||
Treat function declarations at the top level of modules like let bindings.
|
||||
|
||||
Don't allow async function declarations as the only statement under a label.
|
||||
|
||||
## 5.7.0 (2018-06-15)
|
||||
|
||||
### New features
|
||||
|
||||
Upgraded to Unicode 11.
|
||||
|
||||
## 5.6.0 (2018-05-31)
|
||||
|
||||
### New features
|
||||
|
||||
Allow U+2028 and U+2029 in string when ECMAVersion >= 10.
|
||||
|
||||
Allow binding-less catch statements when ECMAVersion >= 10.
|
||||
|
||||
Add `allowAwaitOutsideFunction` option for parsing top-level `await`.
|
||||
|
||||
## 5.5.3 (2018-03-08)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
A _second_ republish of the code in 5.5.1, this time with yarn, to hopefully get valid timestamps.
|
||||
|
||||
## 5.5.2 (2018-03-08)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
A republish of the code in 5.5.1 in an attempt to solve an issue with the file timestamps in the npm package being 0.
|
||||
|
||||
## 5.5.1 (2018-03-06)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix misleading error message for octal escapes in template strings.
|
||||
|
||||
## 5.5.0 (2018-02-27)
|
||||
|
||||
### New features
|
||||
|
||||
The identifier character categorization is now based on Unicode version 10.
|
||||
|
||||
Acorn will now validate the content of regular expressions, including new ES9 features.
|
||||
|
||||
## 5.4.0 (2018-02-01)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Disallow duplicate or escaped flags on regular expressions.
|
||||
|
||||
Disallow octal escapes in strings in strict mode.
|
||||
|
||||
### New features
|
||||
|
||||
Add support for async iteration.
|
||||
|
||||
Add support for object spread and rest.
|
||||
|
||||
## 5.3.0 (2017-12-28)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix parsing of floating point literals with leading zeroes in loose mode.
|
||||
|
||||
Allow duplicate property names in object patterns.
|
||||
|
||||
Don't allow static class methods named `prototype`.
|
||||
|
||||
Disallow async functions directly under `if` or `else`.
|
||||
|
||||
Parse right-hand-side of `for`/`of` as an assignment expression.
|
||||
|
||||
Stricter parsing of `for`/`in`.
|
||||
|
||||
Don't allow unicode escapes in contextual keywords.
|
||||
|
||||
### New features
|
||||
|
||||
Parsing class members was factored into smaller methods to allow plugins to hook into it.
|
||||
|
||||
## 5.2.1 (2017-10-30)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a token context corruption bug.
|
||||
|
||||
## 5.2.0 (2017-10-30)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix token context tracking for `class` and `function` in property-name position.
|
||||
|
||||
Make sure `%*` isn't parsed as a valid operator.
|
||||
|
||||
Allow shorthand properties `get` and `set` to be followed by default values.
|
||||
|
||||
Disallow `super` when not in callee or object position.
|
||||
|
||||
### New features
|
||||
|
||||
Support [`directive` property](https://github.com/estree/estree/compare/b3de58c9997504d6fba04b72f76e6dd1619ee4eb...1da8e603237144f44710360f8feb7a9977e905e0) on directive expression statements.
|
||||
|
||||
## 5.1.2 (2017-09-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Disable parsing of legacy HTML-style comments in modules.
|
||||
|
||||
Fix parsing of async methods whose names are keywords.
|
||||
|
||||
## 5.1.1 (2017-07-06)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix problem with disambiguating regexp and division after a class.
|
||||
|
||||
## 5.1.0 (2017-07-05)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix tokenizing of regexps in an object-desctructuring `for`/`of` loop and after `yield`.
|
||||
|
||||
Parse zero-prefixed numbers with non-octal digits as decimal.
|
||||
|
||||
Allow object/array patterns in rest parameters.
|
||||
|
||||
Don't error when `yield` is used as a property name.
|
||||
|
||||
Allow `async` as a shorthand object property.
|
||||
|
||||
### New features
|
||||
|
||||
Implement the [template literal revision proposal](https://github.com/tc39/proposal-template-literal-revision) for ES9.
|
||||
|
||||
## 5.0.3 (2017-04-01)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix spurious duplicate variable definition errors for named functions.
|
||||
|
||||
## 5.0.2 (2017-03-30)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
A binary operator after a parenthesized arrow expression is no longer incorrectly treated as an error.
|
||||
|
||||
## 5.0.0 (2017-03-28)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Raise an error for duplicated lexical bindings.
|
||||
|
||||
Fix spurious error when an assignement expression occurred after a spread expression.
|
||||
|
||||
Accept regular expressions after `of` (in `for`/`of`), `yield` (in a generator), and braced arrow functions.
|
||||
|
||||
Allow labels in front or `var` declarations, even in strict mode.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
Parse declarations following `export default` as declaration nodes, not expressions. This means that class and function declarations nodes can now have `null` as their `id`.
|
||||
|
||||
## 4.0.11 (2017-02-07)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Allow all forms of member expressions to be parenthesized as lvalue.
|
||||
|
||||
## 4.0.10 (2017-02-07)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Don't expect semicolons after default-exported functions or classes, even when they are expressions.
|
||||
|
||||
Check for use of `'use strict'` directives in non-simple parameter functions, even when already in strict mode.
|
||||
|
||||
## 4.0.9 (2017-02-06)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix incorrect error raised for parenthesized simple assignment targets, so that `(x) = 1` parses again.
|
||||
|
||||
## 4.0.8 (2017-02-03)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Solve spurious parenthesized pattern errors by temporarily erring on the side of accepting programs that our delayed errors don't handle correctly yet.
|
||||
|
||||
## 4.0.7 (2017-02-02)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Accept invalidly rejected code like `(x).y = 2` again.
|
||||
|
||||
Don't raise an error when a function _inside_ strict code has a non-simple parameter list.
|
||||
|
||||
## 4.0.6 (2017-02-02)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix exponential behavior (manifesting itself as a complete hang for even relatively small source files) introduced by the new 'use strict' check.
|
||||
|
||||
## 4.0.5 (2017-02-02)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Disallow parenthesized pattern expressions.
|
||||
|
||||
Allow keywords as export names.
|
||||
|
||||
Don't allow the `async` keyword to be parenthesized.
|
||||
|
||||
Properly raise an error when a keyword contains a character escape.
|
||||
|
||||
Allow `"use strict"` to appear after other string literal expressions.
|
||||
|
||||
Disallow labeled declarations.
|
||||
|
||||
## 4.0.4 (2016-12-19)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix crash when `export` was followed by a keyword that can't be
|
||||
exported.
|
||||
|
||||
## 4.0.3 (2016-08-16)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Allow regular function declarations inside single-statement `if` branches in loose mode. Forbid them entirely in strict mode.
|
||||
|
||||
Properly parse properties named `async` in ES2017 mode.
|
||||
|
||||
Fix bug where reserved words were broken in ES2017 mode.
|
||||
|
||||
## 4.0.2 (2016-08-11)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Don't ignore period or 'e' characters after octal numbers.
|
||||
|
||||
Fix broken parsing for call expressions in default parameter values of arrow functions.
|
||||
|
||||
## 4.0.1 (2016-08-08)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix false positives in duplicated export name errors.
|
||||
|
||||
## 4.0.0 (2016-08-07)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
The default `ecmaVersion` option value is now 7.
|
||||
|
||||
A number of internal method signatures changed, so plugins might need to be updated.
|
||||
|
||||
### Bug fixes
|
||||
|
||||
The parser now raises errors on duplicated export names.
|
||||
|
||||
`arguments` and `eval` can now be used in shorthand properties.
|
||||
|
||||
Duplicate parameter names in non-simple argument lists now always produce an error.
|
||||
|
||||
### New features
|
||||
|
||||
The `ecmaVersion` option now also accepts year-style version numbers
|
||||
(2015, etc).
|
||||
|
||||
Support for `async`/`await` syntax when `ecmaVersion` is >= 8.
|
||||
|
||||
Support for trailing commas in call expressions when `ecmaVersion` is >= 8.
|
||||
|
||||
## 3.3.0 (2016-07-25)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix bug in tokenizing of regexp operator after a function declaration.
|
||||
|
||||
Fix parser crash when parsing an array pattern with a hole.
|
||||
|
||||
### New features
|
||||
|
||||
Implement check against complex argument lists in functions that enable strict mode in ES7.
|
||||
|
||||
## 3.2.0 (2016-06-07)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Improve handling of lack of unicode regexp support in host
|
||||
environment.
|
||||
|
||||
Properly reject shorthand properties whose name is a keyword.
|
||||
|
||||
### New features
|
||||
|
||||
Visitors created with `visit.make` now have their base as _prototype_, rather than copying properties into a fresh object.
|
||||
|
||||
## 3.1.0 (2016-04-18)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Properly tokenize the division operator directly after a function expression.
|
||||
|
||||
Allow trailing comma in destructuring arrays.
|
||||
|
||||
## 3.0.4 (2016-02-25)
|
||||
|
||||
### Fixes
|
||||
|
||||
Allow update expressions as left-hand-side of the ES7 exponential operator.
|
||||
|
||||
## 3.0.2 (2016-02-10)
|
||||
|
||||
### Fixes
|
||||
|
||||
Fix bug that accidentally made `undefined` a reserved word when parsing ES7.
|
||||
|
||||
## 3.0.0 (2016-02-10)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
The default value of the `ecmaVersion` option is now 6 (used to be 5).
|
||||
|
||||
Support for comprehension syntax (which was dropped from the draft spec) has been removed.
|
||||
|
||||
### Fixes
|
||||
|
||||
`let` and `yield` are now “contextual keywords”, meaning you can mostly use them as identifiers in ES5 non-strict code.
|
||||
|
||||
A parenthesized class or function expression after `export default` is now parsed correctly.
|
||||
|
||||
### New features
|
||||
|
||||
When `ecmaVersion` is set to 7, Acorn will parse the exponentiation operator (`**`).
|
||||
|
||||
The identifier character ranges are now based on Unicode 8.0.0.
|
||||
|
||||
Plugins can now override the `raiseRecoverable` method to override the way non-critical errors are handled.
|
||||
|
||||
## 2.7.0 (2016-01-04)
|
||||
|
||||
### Fixes
|
||||
|
||||
Stop allowing rest parameters in setters.
|
||||
|
||||
Disallow `y` rexexp flag in ES5.
|
||||
|
||||
Disallow `\00` and `\000` escapes in strict mode.
|
||||
|
||||
Raise an error when an import name is a reserved word.
|
||||
|
||||
## 2.6.2 (2015-11-10)
|
||||
|
||||
### Fixes
|
||||
|
||||
Don't crash when no options object is passed.
|
||||
|
||||
## 2.6.0 (2015-11-09)
|
||||
|
||||
### Fixes
|
||||
|
||||
Add `await` as a reserved word in module sources.
|
||||
|
||||
Disallow `yield` in a parameter default value for a generator.
|
||||
|
||||
Forbid using a comma after a rest pattern in an array destructuring.
|
||||
|
||||
### New features
|
||||
|
||||
Support parsing stdin in command-line tool.
|
||||
|
||||
## 2.5.0 (2015-10-27)
|
||||
|
||||
### Fixes
|
||||
|
||||
Fix tokenizer support in the command-line tool.
|
||||
|
||||
Stop allowing `new.target` outside of functions.
|
||||
|
||||
Remove legacy `guard` and `guardedHandler` properties from try nodes.
|
||||
|
||||
Stop allowing multiple `__proto__` properties on an object literal in strict mode.
|
||||
|
||||
Don't allow rest parameters to be non-identifier patterns.
|
||||
|
||||
Check for duplicate paramter names in arrow functions.
|
||||
6
node_modules/isobject/LICENSE → node_modules/ava/node_modules/acorn/LICENSE
generated
vendored
6
node_modules/isobject/LICENSE → node_modules/ava/node_modules/acorn/LICENSE
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
The MIT License (MIT)
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2014-2017, Jon Schlinkert.
|
||||
Copyright (C) 2012-2020 by various contributors (see AUTHORS)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
@ -18,4 +18,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
THE SOFTWARE.
|
||||
280
node_modules/ava/node_modules/acorn/README.md
generated
vendored
Normal file
280
node_modules/ava/node_modules/acorn/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,280 @@
|
|||
# Acorn
|
||||
|
||||
A tiny, fast JavaScript parser written in JavaScript.
|
||||
|
||||
## Community
|
||||
|
||||
Acorn is open source software released under an
|
||||
[MIT license](https://github.com/acornjs/acorn/blob/master/acorn/LICENSE).
|
||||
|
||||
You are welcome to
|
||||
[report bugs](https://github.com/acornjs/acorn/issues) or create pull
|
||||
requests on [github](https://github.com/acornjs/acorn). For questions
|
||||
and discussion, please use the
|
||||
[Tern discussion forum](https://discuss.ternjs.net).
|
||||
|
||||
## Installation
|
||||
|
||||
The easiest way to install acorn is from [`npm`](https://www.npmjs.com/):
|
||||
|
||||
```sh
|
||||
npm install acorn
|
||||
```
|
||||
|
||||
Alternately, you can download the source and build acorn yourself:
|
||||
|
||||
```sh
|
||||
git clone https://github.com/acornjs/acorn.git
|
||||
cd acorn
|
||||
npm install
|
||||
```
|
||||
|
||||
## Interface
|
||||
|
||||
**parse**`(input, options)` is the main interface to the library. The
|
||||
`input` parameter is a string, `options` must be an object setting
|
||||
some of the options listed below. The return value will be an abstract
|
||||
syntax tree object as specified by the [ESTree
|
||||
spec](https://github.com/estree/estree).
|
||||
|
||||
```javascript
|
||||
let acorn = require("acorn");
|
||||
console.log(acorn.parse("1 + 1", {ecmaVersion: 2020}));
|
||||
```
|
||||
|
||||
When encountering a syntax error, the parser will raise a
|
||||
`SyntaxError` object with a meaningful message. The error object will
|
||||
have a `pos` property that indicates the string offset at which the
|
||||
error occurred, and a `loc` object that contains a `{line, column}`
|
||||
object referring to that same position.
|
||||
|
||||
Options are provided by in a second argument, which should be an
|
||||
object containing any of these fields (only `ecmaVersion` is
|
||||
required):
|
||||
|
||||
- **ecmaVersion**: Indicates the ECMAScript version to parse. Must be
|
||||
either 3, 5, 6 (or 2015), 7 (2016), 8 (2017), 9 (2018), 10 (2019),
|
||||
11 (2020), 12 (2021), 13 (2022, partial support)
|
||||
or `"latest"` (the latest the library supports). This influences
|
||||
support for strict mode, the set of reserved words, and support
|
||||
for new syntax features.
|
||||
|
||||
**NOTE**: Only 'stage 4' (finalized) ECMAScript features are being
|
||||
implemented by Acorn. Other proposed new features must be
|
||||
implemented through plugins.
|
||||
|
||||
- **sourceType**: Indicate the mode the code should be parsed in. Can be
|
||||
either `"script"` or `"module"`. This influences global strict mode
|
||||
and parsing of `import` and `export` declarations.
|
||||
|
||||
**NOTE**: If set to `"module"`, then static `import` / `export` syntax
|
||||
will be valid, even if `ecmaVersion` is less than 6.
|
||||
|
||||
- **onInsertedSemicolon**: If given a callback, that callback will be
|
||||
called whenever a missing semicolon is inserted by the parser. The
|
||||
callback will be given the character offset of the point where the
|
||||
semicolon is inserted as argument, and if `locations` is on, also a
|
||||
`{line, column}` object representing this position.
|
||||
|
||||
- **onTrailingComma**: Like `onInsertedSemicolon`, but for trailing
|
||||
commas.
|
||||
|
||||
- **allowReserved**: If `false`, using a reserved word will generate
|
||||
an error. Defaults to `true` for `ecmaVersion` 3, `false` for higher
|
||||
versions. When given the value `"never"`, reserved words and
|
||||
keywords can also not be used as property names (as in Internet
|
||||
Explorer's old parser).
|
||||
|
||||
- **allowReturnOutsideFunction**: By default, a return statement at
|
||||
the top level raises an error. Set this to `true` to accept such
|
||||
code.
|
||||
|
||||
- **allowImportExportEverywhere**: By default, `import` and `export`
|
||||
declarations can only appear at a program's top level. Setting this
|
||||
option to `true` allows them anywhere where a statement is allowed,
|
||||
and also allows `import.meta` expressions to appear in scripts
|
||||
(when `sourceType` is not `"module"`).
|
||||
|
||||
- **allowAwaitOutsideFunction**: If `false`, `await` expressions can
|
||||
only appear inside `async` functions. Defaults to `true` for
|
||||
`ecmaVersion` 2022 and later, `false` for lower versions. Setting this option to
|
||||
`true` allows to have top-level `await` expressions. They are
|
||||
still not allowed in non-`async` functions, though.
|
||||
|
||||
- **allowSuperOutsideMethod**: By default, `super` outside a method
|
||||
raises an error. Set this to `true` to accept such code.
|
||||
|
||||
- **allowHashBang**: When this is enabled (off by default), if the
|
||||
code starts with the characters `#!` (as in a shellscript), the
|
||||
first line will be treated as a comment.
|
||||
|
||||
- **locations**: When `true`, each node has a `loc` object attached
|
||||
with `start` and `end` subobjects, each of which contains the
|
||||
one-based line and zero-based column numbers in `{line, column}`
|
||||
form. Default is `false`.
|
||||
|
||||
- **onToken**: If a function is passed for this option, each found
|
||||
token will be passed in same format as tokens returned from
|
||||
`tokenizer().getToken()`.
|
||||
|
||||
If array is passed, each found token is pushed to it.
|
||||
|
||||
Note that you are not allowed to call the parser from the
|
||||
callback—that will corrupt its internal state.
|
||||
|
||||
- **onComment**: If a function is passed for this option, whenever a
|
||||
comment is encountered the function will be called with the
|
||||
following parameters:
|
||||
|
||||
- `block`: `true` if the comment is a block comment, false if it
|
||||
is a line comment.
|
||||
- `text`: The content of the comment.
|
||||
- `start`: Character offset of the start of the comment.
|
||||
- `end`: Character offset of the end of the comment.
|
||||
|
||||
When the `locations` options is on, the `{line, column}` locations
|
||||
of the comment’s start and end are passed as two additional
|
||||
parameters.
|
||||
|
||||
If array is passed for this option, each found comment is pushed
|
||||
to it as object in Esprima format:
|
||||
|
||||
```javascript
|
||||
{
|
||||
"type": "Line" | "Block",
|
||||
"value": "comment text",
|
||||
"start": Number,
|
||||
"end": Number,
|
||||
// If `locations` option is on:
|
||||
"loc": {
|
||||
"start": {line: Number, column: Number}
|
||||
"end": {line: Number, column: Number}
|
||||
},
|
||||
// If `ranges` option is on:
|
||||
"range": [Number, Number]
|
||||
}
|
||||
```
|
||||
|
||||
Note that you are not allowed to call the parser from the
|
||||
callback—that will corrupt its internal state.
|
||||
|
||||
- **ranges**: Nodes have their start and end characters offsets
|
||||
recorded in `start` and `end` properties (directly on the node,
|
||||
rather than the `loc` object, which holds line/column data. To also
|
||||
add a
|
||||
[semi-standardized](https://bugzilla.mozilla.org/show_bug.cgi?id=745678)
|
||||
`range` property holding a `[start, end]` array with the same
|
||||
numbers, set the `ranges` option to `true`.
|
||||
|
||||
- **program**: It is possible to parse multiple files into a single
|
||||
AST by passing the tree produced by parsing the first file as the
|
||||
`program` option in subsequent parses. This will add the toplevel
|
||||
forms of the parsed file to the "Program" (top) node of an existing
|
||||
parse tree.
|
||||
|
||||
- **sourceFile**: When the `locations` option is `true`, you can pass
|
||||
this option to add a `source` attribute in every node’s `loc`
|
||||
object. Note that the contents of this option are not examined or
|
||||
processed in any way; you are free to use whatever format you
|
||||
choose.
|
||||
|
||||
- **directSourceFile**: Like `sourceFile`, but a `sourceFile` property
|
||||
will be added (regardless of the `location` option) directly to the
|
||||
nodes, rather than the `loc` object.
|
||||
|
||||
- **preserveParens**: If this option is `true`, parenthesized expressions
|
||||
are represented by (non-standard) `ParenthesizedExpression` nodes
|
||||
that have a single `expression` property containing the expression
|
||||
inside parentheses.
|
||||
|
||||
**parseExpressionAt**`(input, offset, options)` will parse a single
|
||||
expression in a string, and return its AST. It will not complain if
|
||||
there is more of the string left after the expression.
|
||||
|
||||
**tokenizer**`(input, options)` returns an object with a `getToken`
|
||||
method that can be called repeatedly to get the next token, a `{start,
|
||||
end, type, value}` object (with added `loc` property when the
|
||||
`locations` option is enabled and `range` property when the `ranges`
|
||||
option is enabled). When the token's type is `tokTypes.eof`, you
|
||||
should stop calling the method, since it will keep returning that same
|
||||
token forever.
|
||||
|
||||
In ES6 environment, returned result can be used as any other
|
||||
protocol-compliant iterable:
|
||||
|
||||
```javascript
|
||||
for (let token of acorn.tokenizer(str)) {
|
||||
// iterate over the tokens
|
||||
}
|
||||
|
||||
// transform code to array of tokens:
|
||||
var tokens = [...acorn.tokenizer(str)];
|
||||
```
|
||||
|
||||
**tokTypes** holds an object mapping names to the token type objects
|
||||
that end up in the `type` properties of tokens.
|
||||
|
||||
**getLineInfo**`(input, offset)` can be used to get a `{line,
|
||||
column}` object for a given program string and offset.
|
||||
|
||||
### The `Parser` class
|
||||
|
||||
Instances of the **`Parser`** class contain all the state and logic
|
||||
that drives a parse. It has static methods `parse`,
|
||||
`parseExpressionAt`, and `tokenizer` that match the top-level
|
||||
functions by the same name.
|
||||
|
||||
When extending the parser with plugins, you need to call these methods
|
||||
on the extended version of the class. To extend a parser with plugins,
|
||||
you can use its static `extend` method.
|
||||
|
||||
```javascript
|
||||
var acorn = require("acorn");
|
||||
var jsx = require("acorn-jsx");
|
||||
var JSXParser = acorn.Parser.extend(jsx());
|
||||
JSXParser.parse("foo(<bar/>)", {ecmaVersion: 2020});
|
||||
```
|
||||
|
||||
The `extend` method takes any number of plugin values, and returns a
|
||||
new `Parser` class that includes the extra parser logic provided by
|
||||
the plugins.
|
||||
|
||||
## Command line interface
|
||||
|
||||
The `bin/acorn` utility can be used to parse a file from the command
|
||||
line. It accepts as arguments its input file and the following
|
||||
options:
|
||||
|
||||
- `--ecma3|--ecma5|--ecma6|--ecma7|--ecma8|--ecma9|--ecma10`: Sets the ECMAScript version
|
||||
to parse. Default is version 9.
|
||||
|
||||
- `--module`: Sets the parsing mode to `"module"`. Is set to `"script"` otherwise.
|
||||
|
||||
- `--locations`: Attaches a "loc" object to each node with "start" and
|
||||
"end" subobjects, each of which contains the one-based line and
|
||||
zero-based column numbers in `{line, column}` form.
|
||||
|
||||
- `--allow-hash-bang`: If the code starts with the characters #! (as
|
||||
in a shellscript), the first line will be treated as a comment.
|
||||
|
||||
- `--allow-await-outside-function`: Allows top-level `await` expressions.
|
||||
See the `allowAwaitOutsideFunction` option for more information.
|
||||
|
||||
- `--compact`: No whitespace is used in the AST output.
|
||||
|
||||
- `--silent`: Do not output the AST, just return the exit status.
|
||||
|
||||
- `--help`: Print the usage information and quit.
|
||||
|
||||
The utility spits out the syntax tree as JSON data.
|
||||
|
||||
## Existing plugins
|
||||
|
||||
- [`acorn-jsx`](https://github.com/RReverser/acorn-jsx): Parse [Facebook JSX syntax extensions](https://github.com/facebook/jsx)
|
||||
|
||||
Plugins for ECMAScript proposals:
|
||||
|
||||
- [`acorn-stage3`](https://github.com/acornjs/acorn-stage3): Parse most stage 3 proposals, bundling:
|
||||
- [`acorn-class-fields`](https://github.com/acornjs/acorn-class-fields): Parse [class fields proposal](https://github.com/tc39/proposal-class-fields)
|
||||
- [`acorn-import-meta`](https://github.com/acornjs/acorn-import-meta): Parse [import.meta proposal](https://github.com/tc39/proposal-import-meta)
|
||||
- [`acorn-private-methods`](https://github.com/acornjs/acorn-private-methods): parse [private methods, getters and setters proposal](https://github.com/tc39/proposal-private-methods)n
|
||||
4
node_modules/ava/node_modules/acorn/bin/acorn
generated
vendored
Executable file
4
node_modules/ava/node_modules/acorn/bin/acorn
generated
vendored
Executable file
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
|
||||
require('../dist/bin.js');
|
||||
214
node_modules/ava/node_modules/acorn/dist/acorn.d.ts
generated
vendored
Normal file
214
node_modules/ava/node_modules/acorn/dist/acorn.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,214 @@
|
|||
export as namespace acorn
|
||||
export = acorn
|
||||
|
||||
declare namespace acorn {
|
||||
function parse(input: string, options: Options): Node
|
||||
|
||||
function parseExpressionAt(input: string, pos: number, options: Options): Node
|
||||
|
||||
function tokenizer(input: string, options: Options): {
|
||||
getToken(): Token
|
||||
[Symbol.iterator](): Iterator<Token>
|
||||
}
|
||||
|
||||
interface Options {
|
||||
ecmaVersion: 3 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 2015 | 2016 | 2017 | 2018 | 2019 | 2020 | 2021 | 2022 | 'latest'
|
||||
sourceType?: 'script' | 'module'
|
||||
onInsertedSemicolon?: (lastTokEnd: number, lastTokEndLoc?: Position) => void
|
||||
onTrailingComma?: (lastTokEnd: number, lastTokEndLoc?: Position) => void
|
||||
allowReserved?: boolean | 'never'
|
||||
allowReturnOutsideFunction?: boolean
|
||||
allowImportExportEverywhere?: boolean
|
||||
allowAwaitOutsideFunction?: boolean
|
||||
allowSuperOutsideMethod?: boolean
|
||||
allowHashBang?: boolean
|
||||
locations?: boolean
|
||||
onToken?: ((token: Token) => any) | Token[]
|
||||
onComment?: ((
|
||||
isBlock: boolean, text: string, start: number, end: number, startLoc?: Position,
|
||||
endLoc?: Position
|
||||
) => void) | Comment[]
|
||||
ranges?: boolean
|
||||
program?: Node
|
||||
sourceFile?: string
|
||||
directSourceFile?: string
|
||||
preserveParens?: boolean
|
||||
}
|
||||
|
||||
class Parser {
|
||||
constructor(options: Options, input: string, startPos?: number)
|
||||
parse(this: Parser): Node
|
||||
static parse(this: typeof Parser, input: string, options: Options): Node
|
||||
static parseExpressionAt(this: typeof Parser, input: string, pos: number, options: Options): Node
|
||||
static tokenizer(this: typeof Parser, input: string, options: Options): {
|
||||
getToken(): Token
|
||||
[Symbol.iterator](): Iterator<Token>
|
||||
}
|
||||
static extend(this: typeof Parser, ...plugins: ((BaseParser: typeof Parser) => typeof Parser)[]): typeof Parser
|
||||
}
|
||||
|
||||
interface Position { line: number; column: number; offset: number }
|
||||
|
||||
const defaultOptions: Options
|
||||
|
||||
function getLineInfo(input: string, offset: number): Position
|
||||
|
||||
class SourceLocation {
|
||||
start: Position
|
||||
end: Position
|
||||
source?: string | null
|
||||
constructor(p: Parser, start: Position, end: Position)
|
||||
}
|
||||
|
||||
class Node {
|
||||
type: string
|
||||
start: number
|
||||
end: number
|
||||
loc?: SourceLocation
|
||||
sourceFile?: string
|
||||
range?: [number, number]
|
||||
constructor(parser: Parser, pos: number, loc?: SourceLocation)
|
||||
}
|
||||
|
||||
class TokenType {
|
||||
label: string
|
||||
keyword: string
|
||||
beforeExpr: boolean
|
||||
startsExpr: boolean
|
||||
isLoop: boolean
|
||||
isAssign: boolean
|
||||
prefix: boolean
|
||||
postfix: boolean
|
||||
binop: number
|
||||
updateContext?: (prevType: TokenType) => void
|
||||
constructor(label: string, conf?: any)
|
||||
}
|
||||
|
||||
const tokTypes: {
|
||||
num: TokenType
|
||||
regexp: TokenType
|
||||
string: TokenType
|
||||
name: TokenType
|
||||
privateId: TokenType
|
||||
eof: TokenType
|
||||
bracketL: TokenType
|
||||
bracketR: TokenType
|
||||
braceL: TokenType
|
||||
braceR: TokenType
|
||||
parenL: TokenType
|
||||
parenR: TokenType
|
||||
comma: TokenType
|
||||
semi: TokenType
|
||||
colon: TokenType
|
||||
dot: TokenType
|
||||
question: TokenType
|
||||
arrow: TokenType
|
||||
template: TokenType
|
||||
ellipsis: TokenType
|
||||
backQuote: TokenType
|
||||
dollarBraceL: TokenType
|
||||
eq: TokenType
|
||||
assign: TokenType
|
||||
incDec: TokenType
|
||||
prefix: TokenType
|
||||
logicalOR: TokenType
|
||||
logicalAND: TokenType
|
||||
bitwiseOR: TokenType
|
||||
bitwiseXOR: TokenType
|
||||
bitwiseAND: TokenType
|
||||
equality: TokenType
|
||||
relational: TokenType
|
||||
bitShift: TokenType
|
||||
plusMin: TokenType
|
||||
modulo: TokenType
|
||||
star: TokenType
|
||||
slash: TokenType
|
||||
starstar: TokenType
|
||||
_break: TokenType
|
||||
_case: TokenType
|
||||
_catch: TokenType
|
||||
_continue: TokenType
|
||||
_debugger: TokenType
|
||||
_default: TokenType
|
||||
_do: TokenType
|
||||
_else: TokenType
|
||||
_finally: TokenType
|
||||
_for: TokenType
|
||||
_function: TokenType
|
||||
_if: TokenType
|
||||
_return: TokenType
|
||||
_switch: TokenType
|
||||
_throw: TokenType
|
||||
_try: TokenType
|
||||
_var: TokenType
|
||||
_const: TokenType
|
||||
_while: TokenType
|
||||
_with: TokenType
|
||||
_new: TokenType
|
||||
_this: TokenType
|
||||
_super: TokenType
|
||||
_class: TokenType
|
||||
_extends: TokenType
|
||||
_export: TokenType
|
||||
_import: TokenType
|
||||
_null: TokenType
|
||||
_true: TokenType
|
||||
_false: TokenType
|
||||
_in: TokenType
|
||||
_instanceof: TokenType
|
||||
_typeof: TokenType
|
||||
_void: TokenType
|
||||
_delete: TokenType
|
||||
}
|
||||
|
||||
class TokContext {
|
||||
constructor(token: string, isExpr: boolean, preserveSpace: boolean, override?: (p: Parser) => void)
|
||||
}
|
||||
|
||||
const tokContexts: {
|
||||
b_stat: TokContext
|
||||
b_expr: TokContext
|
||||
b_tmpl: TokContext
|
||||
p_stat: TokContext
|
||||
p_expr: TokContext
|
||||
q_tmpl: TokContext
|
||||
f_expr: TokContext
|
||||
f_stat: TokContext
|
||||
f_expr_gen: TokContext
|
||||
f_gen: TokContext
|
||||
}
|
||||
|
||||
function isIdentifierStart(code: number, astral?: boolean): boolean
|
||||
|
||||
function isIdentifierChar(code: number, astral?: boolean): boolean
|
||||
|
||||
interface AbstractToken {
|
||||
}
|
||||
|
||||
interface Comment extends AbstractToken {
|
||||
type: string
|
||||
value: string
|
||||
start: number
|
||||
end: number
|
||||
loc?: SourceLocation
|
||||
range?: [number, number]
|
||||
}
|
||||
|
||||
class Token {
|
||||
type: TokenType
|
||||
value: any
|
||||
start: number
|
||||
end: number
|
||||
loc?: SourceLocation
|
||||
range?: [number, number]
|
||||
constructor(p: Parser)
|
||||
}
|
||||
|
||||
function isNewLine(code: number): boolean
|
||||
|
||||
const lineBreak: RegExp
|
||||
|
||||
const lineBreakG: RegExp
|
||||
|
||||
const version: string
|
||||
}
|
||||
5572
node_modules/ava/node_modules/acorn/dist/acorn.js
generated
vendored
Normal file
5572
node_modules/ava/node_modules/acorn/dist/acorn.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
5541
node_modules/ava/node_modules/acorn/dist/acorn.mjs
generated
vendored
Normal file
5541
node_modules/ava/node_modules/acorn/dist/acorn.mjs
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
2
node_modules/ava/node_modules/acorn/dist/acorn.mjs.d.ts
generated
vendored
Normal file
2
node_modules/ava/node_modules/acorn/dist/acorn.mjs.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
import * as acorn from "./acorn";
|
||||
export = acorn;
|
||||
71
node_modules/ava/node_modules/acorn/dist/bin.js
generated
vendored
Normal file
71
node_modules/ava/node_modules/acorn/dist/bin.js
generated
vendored
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
'use strict';
|
||||
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var acorn = require('./acorn.js');
|
||||
|
||||
var inputFilePaths = [], forceFileName = false, fileMode = false, silent = false, compact = false, tokenize = false;
|
||||
var options = {};
|
||||
|
||||
function help(status) {
|
||||
var print = (status === 0) ? console.log : console.error;
|
||||
print("usage: " + path.basename(process.argv[1]) + " [--ecma3|--ecma5|--ecma6|--ecma7|--ecma8|--ecma9|...|--ecma2015|--ecma2016|--ecma2017|--ecma2018|...]");
|
||||
print(" [--tokenize] [--locations] [---allow-hash-bang] [--allow-await-outside-function] [--compact] [--silent] [--module] [--help] [--] [<infile>...]");
|
||||
process.exit(status);
|
||||
}
|
||||
|
||||
for (var i = 2; i < process.argv.length; ++i) {
|
||||
var arg = process.argv[i];
|
||||
if (arg[0] !== "-" || arg === "-") { inputFilePaths.push(arg); }
|
||||
else if (arg === "--") {
|
||||
inputFilePaths.push.apply(inputFilePaths, process.argv.slice(i + 1));
|
||||
forceFileName = true;
|
||||
break
|
||||
} else if (arg === "--locations") { options.locations = true; }
|
||||
else if (arg === "--allow-hash-bang") { options.allowHashBang = true; }
|
||||
else if (arg === "--allow-await-outside-function") { options.allowAwaitOutsideFunction = true; }
|
||||
else if (arg === "--silent") { silent = true; }
|
||||
else if (arg === "--compact") { compact = true; }
|
||||
else if (arg === "--help") { help(0); }
|
||||
else if (arg === "--tokenize") { tokenize = true; }
|
||||
else if (arg === "--module") { options.sourceType = "module"; }
|
||||
else {
|
||||
var match = arg.match(/^--ecma(\d+)$/);
|
||||
if (match)
|
||||
{ options.ecmaVersion = +match[1]; }
|
||||
else
|
||||
{ help(1); }
|
||||
}
|
||||
}
|
||||
|
||||
function run(codeList) {
|
||||
var result = [], fileIdx = 0;
|
||||
try {
|
||||
codeList.forEach(function (code, idx) {
|
||||
fileIdx = idx;
|
||||
if (!tokenize) {
|
||||
result = acorn.parse(code, options);
|
||||
options.program = result;
|
||||
} else {
|
||||
var tokenizer = acorn.tokenizer(code, options), token;
|
||||
do {
|
||||
token = tokenizer.getToken();
|
||||
result.push(token);
|
||||
} while (token.type !== acorn.tokTypes.eof)
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
console.error(fileMode ? e.message.replace(/\(\d+:\d+\)$/, function (m) { return m.slice(0, 1) + inputFilePaths[fileIdx] + " " + m.slice(1); }) : e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
if (!silent) { console.log(JSON.stringify(result, null, compact ? null : 2)); }
|
||||
}
|
||||
|
||||
if (fileMode = inputFilePaths.length && (forceFileName || !inputFilePaths.includes("-") || inputFilePaths.length !== 1)) {
|
||||
run(inputFilePaths.map(function (path) { return fs.readFileSync(path, "utf8"); }));
|
||||
} else {
|
||||
var code = "";
|
||||
process.stdin.resume();
|
||||
process.stdin.on("data", function (chunk) { return code += chunk; });
|
||||
process.stdin.on("end", function () { return run([code]); });
|
||||
}
|
||||
50
node_modules/ava/node_modules/acorn/package.json
generated
vendored
Normal file
50
node_modules/ava/node_modules/acorn/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
{
|
||||
"name": "acorn",
|
||||
"description": "ECMAScript parser",
|
||||
"homepage": "https://github.com/acornjs/acorn",
|
||||
"main": "dist/acorn.js",
|
||||
"types": "dist/acorn.d.ts",
|
||||
"module": "dist/acorn.mjs",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"import": "./dist/acorn.mjs",
|
||||
"require": "./dist/acorn.js",
|
||||
"default": "./dist/acorn.js"
|
||||
},
|
||||
"./dist/acorn.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"version": "8.5.0",
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
},
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "Marijn Haverbeke",
|
||||
"email": "marijnh@gmail.com",
|
||||
"web": "https://marijnhaverbeke.nl"
|
||||
},
|
||||
{
|
||||
"name": "Ingvar Stepanyan",
|
||||
"email": "me@rreverser.com",
|
||||
"web": "https://rreverser.com/"
|
||||
},
|
||||
{
|
||||
"name": "Adrian Heine",
|
||||
"web": "http://adrianheine.de"
|
||||
}
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/acornjs/acorn.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"prepare": "cd ..; npm run build:main && npm run build:bin"
|
||||
},
|
||||
"bin": {
|
||||
"acorn": "./bin/acorn"
|
||||
}
|
||||
}
|
||||
104
node_modules/ava/node_modules/ansi-styles/index.d.ts
generated
vendored
104
node_modules/ava/node_modules/ansi-styles/index.d.ts
generated
vendored
|
|
@ -1,66 +1,4 @@
|
|||
import * as cssColors from 'color-name';
|
||||
|
||||
declare namespace ansiStyles {
|
||||
interface ColorConvert {
|
||||
/**
|
||||
The RGB color space.
|
||||
|
||||
@param red - (`0`-`255`)
|
||||
@param green - (`0`-`255`)
|
||||
@param blue - (`0`-`255`)
|
||||
*/
|
||||
rgb(red: number, green: number, blue: number): string;
|
||||
|
||||
/**
|
||||
The RGB HEX color space.
|
||||
|
||||
@param hex - A hexadecimal string containing RGB data.
|
||||
*/
|
||||
hex(hex: string): string;
|
||||
|
||||
/**
|
||||
@param keyword - A CSS color name.
|
||||
*/
|
||||
keyword(keyword: keyof typeof cssColors): string;
|
||||
|
||||
/**
|
||||
The HSL color space.
|
||||
|
||||
@param hue - (`0`-`360`)
|
||||
@param saturation - (`0`-`100`)
|
||||
@param lightness - (`0`-`100`)
|
||||
*/
|
||||
hsl(hue: number, saturation: number, lightness: number): string;
|
||||
|
||||
/**
|
||||
The HSV color space.
|
||||
|
||||
@param hue - (`0`-`360`)
|
||||
@param saturation - (`0`-`100`)
|
||||
@param value - (`0`-`100`)
|
||||
*/
|
||||
hsv(hue: number, saturation: number, value: number): string;
|
||||
|
||||
/**
|
||||
The HSV color space.
|
||||
|
||||
@param hue - (`0`-`360`)
|
||||
@param whiteness - (`0`-`100`)
|
||||
@param blackness - (`0`-`100`)
|
||||
*/
|
||||
hwb(hue: number, whiteness: number, blackness: number): string;
|
||||
|
||||
/**
|
||||
Use a [4-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4-bit) to set text color.
|
||||
*/
|
||||
ansi(ansi: number): string;
|
||||
|
||||
/**
|
||||
Use an [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set text color.
|
||||
*/
|
||||
ansi256(ansi: number): string;
|
||||
}
|
||||
|
||||
interface CSPair {
|
||||
/**
|
||||
The ANSI terminal control sequence for starting this style.
|
||||
|
|
@ -74,14 +12,14 @@ declare namespace ansiStyles {
|
|||
}
|
||||
|
||||
interface ColorBase {
|
||||
readonly ansi: ColorConvert;
|
||||
readonly ansi256: ColorConvert;
|
||||
readonly ansi16m: ColorConvert;
|
||||
|
||||
/**
|
||||
The ANSI terminal control sequence for ending this color.
|
||||
*/
|
||||
readonly close: string;
|
||||
|
||||
ansi256(code: number): string;
|
||||
|
||||
ansi16m(red: number, green: number, blue: number): string;
|
||||
}
|
||||
|
||||
interface Modifier {
|
||||
|
|
@ -110,6 +48,13 @@ declare namespace ansiStyles {
|
|||
*/
|
||||
readonly underline: CSPair;
|
||||
|
||||
/**
|
||||
Make text overline.
|
||||
|
||||
Supported on VTE-based terminals, the GNOME terminal, mintty, and Git Bash.
|
||||
*/
|
||||
readonly overline: CSPair;
|
||||
|
||||
/**
|
||||
Inverse background and foreground colors.
|
||||
*/
|
||||
|
|
@ -185,6 +130,31 @@ declare namespace ansiStyles {
|
|||
readonly bgMagentaBright: CSPair;
|
||||
readonly bgWhiteBright: CSPair;
|
||||
}
|
||||
|
||||
interface ConvertColor {
|
||||
/**
|
||||
Convert from the RGB color space to the ANSI 256 color space.
|
||||
|
||||
@param red - (`0...255`)
|
||||
@param green - (`0...255`)
|
||||
@param blue - (`0...255`)
|
||||
*/
|
||||
rgbToAnsi256(red: number, green: number, blue: number): number;
|
||||
|
||||
/**
|
||||
Convert from the RGB HEX color space to the RGB color space.
|
||||
|
||||
@param hex - A hexadecimal string containing RGB data.
|
||||
*/
|
||||
hexToRgb(hex: string): [red: number, green: number, blue: number];
|
||||
|
||||
/**
|
||||
Convert from the RGB HEX color space to the ANSI 256 color space.
|
||||
|
||||
@param hex - A hexadecimal string containing RGB data.
|
||||
*/
|
||||
hexToAnsi256(hex: string): number;
|
||||
}
|
||||
}
|
||||
|
||||
declare const ansiStyles: {
|
||||
|
|
@ -192,6 +162,6 @@ declare const ansiStyles: {
|
|||
readonly color: ansiStyles.ForegroundColor & ansiStyles.ColorBase;
|
||||
readonly bgColor: ansiStyles.BackgroundColor & ansiStyles.ColorBase;
|
||||
readonly codes: ReadonlyMap<number, number>;
|
||||
} & ansiStyles.BackgroundColor & ansiStyles.ForegroundColor & ansiStyles.Modifier;
|
||||
} & ansiStyles.BackgroundColor & ansiStyles.ForegroundColor & ansiStyles.Modifier & ansiStyles.ConvertColor;
|
||||
|
||||
export = ansiStyles;
|
||||
|
|
|
|||
123
node_modules/ava/node_modules/ansi-styles/index.js
generated
vendored
123
node_modules/ava/node_modules/ansi-styles/index.js
generated
vendored
|
|
@ -1,62 +1,10 @@
|
|||
'use strict';
|
||||
|
||||
const wrapAnsi16 = (fn, offset) => (...args) => {
|
||||
const code = fn(...args);
|
||||
return `\u001B[${code + offset}m`;
|
||||
};
|
||||
const ANSI_BACKGROUND_OFFSET = 10;
|
||||
|
||||
const wrapAnsi256 = (fn, offset) => (...args) => {
|
||||
const code = fn(...args);
|
||||
return `\u001B[${38 + offset};5;${code}m`;
|
||||
};
|
||||
const wrapAnsi256 = (offset = 0) => code => `\u001B[${38 + offset};5;${code}m`;
|
||||
|
||||
const wrapAnsi16m = (fn, offset) => (...args) => {
|
||||
const rgb = fn(...args);
|
||||
return `\u001B[${38 + offset};2;${rgb[0]};${rgb[1]};${rgb[2]}m`;
|
||||
};
|
||||
|
||||
const ansi2ansi = n => n;
|
||||
const rgb2rgb = (r, g, b) => [r, g, b];
|
||||
|
||||
const setLazyProperty = (object, property, get) => {
|
||||
Object.defineProperty(object, property, {
|
||||
get: () => {
|
||||
const value = get();
|
||||
|
||||
Object.defineProperty(object, property, {
|
||||
value,
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
|
||||
return value;
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
};
|
||||
|
||||
/** @type {typeof import('color-convert')} */
|
||||
let colorConvert;
|
||||
const makeDynamicStyles = (wrap, targetSpace, identity, isBackground) => {
|
||||
if (colorConvert === undefined) {
|
||||
colorConvert = require('color-convert');
|
||||
}
|
||||
|
||||
const offset = isBackground ? 10 : 0;
|
||||
const styles = {};
|
||||
|
||||
for (const [sourceSpace, suite] of Object.entries(colorConvert)) {
|
||||
const name = sourceSpace === 'ansi16' ? 'ansi' : sourceSpace;
|
||||
if (sourceSpace === targetSpace) {
|
||||
styles[name] = wrap(identity, offset);
|
||||
} else if (typeof suite === 'object') {
|
||||
styles[name] = wrap(suite[targetSpace], offset);
|
||||
}
|
||||
}
|
||||
|
||||
return styles;
|
||||
};
|
||||
const wrapAnsi16m = (offset = 0) => (red, green, blue) => `\u001B[${38 + offset};2;${red};${green};${blue}m`;
|
||||
|
||||
function assembleStyles() {
|
||||
const codes = new Map();
|
||||
|
|
@ -68,6 +16,7 @@ function assembleStyles() {
|
|||
dim: [2, 22],
|
||||
italic: [3, 23],
|
||||
underline: [4, 24],
|
||||
overline: [53, 55],
|
||||
inverse: [7, 27],
|
||||
hidden: [8, 28],
|
||||
strikethrough: [9, 29]
|
||||
|
|
@ -146,12 +95,64 @@ function assembleStyles() {
|
|||
styles.color.close = '\u001B[39m';
|
||||
styles.bgColor.close = '\u001B[49m';
|
||||
|
||||
setLazyProperty(styles.color, 'ansi', () => makeDynamicStyles(wrapAnsi16, 'ansi16', ansi2ansi, false));
|
||||
setLazyProperty(styles.color, 'ansi256', () => makeDynamicStyles(wrapAnsi256, 'ansi256', ansi2ansi, false));
|
||||
setLazyProperty(styles.color, 'ansi16m', () => makeDynamicStyles(wrapAnsi16m, 'rgb', rgb2rgb, false));
|
||||
setLazyProperty(styles.bgColor, 'ansi', () => makeDynamicStyles(wrapAnsi16, 'ansi16', ansi2ansi, true));
|
||||
setLazyProperty(styles.bgColor, 'ansi256', () => makeDynamicStyles(wrapAnsi256, 'ansi256', ansi2ansi, true));
|
||||
setLazyProperty(styles.bgColor, 'ansi16m', () => makeDynamicStyles(wrapAnsi16m, 'rgb', rgb2rgb, true));
|
||||
styles.color.ansi256 = wrapAnsi256();
|
||||
styles.color.ansi16m = wrapAnsi16m();
|
||||
styles.bgColor.ansi256 = wrapAnsi256(ANSI_BACKGROUND_OFFSET);
|
||||
styles.bgColor.ansi16m = wrapAnsi16m(ANSI_BACKGROUND_OFFSET);
|
||||
|
||||
// From https://github.com/Qix-/color-convert/blob/3f0e0d4e92e235796ccb17f6e85c72094a651f49/conversions.js
|
||||
Object.defineProperties(styles, {
|
||||
rgbToAnsi256: {
|
||||
value: (red, green, blue) => {
|
||||
// We use the extended greyscale palette here, with the exception of
|
||||
// black and white. normal palette only has 4 greyscale shades.
|
||||
if (red === green && green === blue) {
|
||||
if (red < 8) {
|
||||
return 16;
|
||||
}
|
||||
|
||||
if (red > 248) {
|
||||
return 231;
|
||||
}
|
||||
|
||||
return Math.round(((red - 8) / 247) * 24) + 232;
|
||||
}
|
||||
|
||||
return 16 +
|
||||
(36 * Math.round(red / 255 * 5)) +
|
||||
(6 * Math.round(green / 255 * 5)) +
|
||||
Math.round(blue / 255 * 5);
|
||||
},
|
||||
enumerable: false
|
||||
},
|
||||
hexToRgb: {
|
||||
value: hex => {
|
||||
const matches = /(?<colorString>[a-f\d]{6}|[a-f\d]{3})/i.exec(hex.toString(16));
|
||||
if (!matches) {
|
||||
return [0, 0, 0];
|
||||
}
|
||||
|
||||
let {colorString} = matches.groups;
|
||||
|
||||
if (colorString.length === 3) {
|
||||
colorString = colorString.split('').map(character => character + character).join('');
|
||||
}
|
||||
|
||||
const integer = Number.parseInt(colorString, 16);
|
||||
|
||||
return [
|
||||
(integer >> 16) & 0xFF,
|
||||
(integer >> 8) & 0xFF,
|
||||
integer & 0xFF
|
||||
];
|
||||
},
|
||||
enumerable: false
|
||||
},
|
||||
hexToAnsi256: {
|
||||
value: hex => styles.rgbToAnsi256(...styles.hexToRgb(hex)),
|
||||
enumerable: false
|
||||
}
|
||||
});
|
||||
|
||||
return styles;
|
||||
}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue