Merge pull request #913 from github/update-v1.0.32-4eb03fb6
Merge main into v1
This commit is contained in:
commit
2b46439dd5
1216 changed files with 32178 additions and 97341 deletions
|
|
@ -10,7 +10,8 @@
|
|||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:@typescript-eslint/recommended-requiring-type-checking",
|
||||
"plugin:github/recommended",
|
||||
"plugin:github/typescript"
|
||||
"plugin:github/typescript",
|
||||
"plugin:import/typescript"
|
||||
],
|
||||
"rules": {
|
||||
"filenames/match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"],
|
||||
|
|
|
|||
63
.github/workflows/__analyze-ref-input.yml
generated
vendored
Normal file
63
.github/workflows/__analyze-ref-input.yml
generated
vendored
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: "PR Check - Analyze: 'ref' and 'sha' from inputs"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
analyze-ref-input:
|
||||
strategy:
|
||||
matrix:
|
||||
version:
|
||||
- stable-20201028
|
||||
- stable-20210319
|
||||
- stable-20210809
|
||||
- cached
|
||||
- latest
|
||||
- nightly-latest
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- macos-latest
|
||||
- windows-latest
|
||||
name: "Analyze: 'ref' and 'sha' from inputs"
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: cpp,csharp,java,javascript,python
|
||||
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
|
||||
github.sha }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
2
.github/workflows/__remote-config.yml
generated
vendored
2
.github/workflows/__remote-config.yml
generated
vendored
|
|
@ -48,7 +48,7 @@ jobs:
|
|||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: cpp,csharp,java,javascript,python
|
||||
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
|
||||
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
|
||||
github.sha }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
|
|
|
|||
70
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
Normal file
70
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: "PR Check - Upload-sarif: 'ref' and 'sha' from inputs"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
upload-ref-sha-input:
|
||||
strategy:
|
||||
matrix:
|
||||
version:
|
||||
- stable-20201028
|
||||
- stable-20210319
|
||||
- stable-20210809
|
||||
- cached
|
||||
- latest
|
||||
- nightly-latest
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- macos-latest
|
||||
- windows-latest
|
||||
name: "Upload-sarif: 'ref' and 'sha' from inputs"
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: cpp,csharp,java,javascript,python
|
||||
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
|
||||
github.sha }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
upload: false
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- uses: ./../action/upload-sarif
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -1,2 +1,4 @@
|
|||
/runner/dist/
|
||||
/runner/node_modules/
|
||||
# Ignore for example failing-tests.json from AVA
|
||||
node_modules/.cache
|
||||
|
|
|
|||
|
|
@ -1,5 +1,11 @@
|
|||
# CodeQL Action and CodeQL Runner Changelog
|
||||
|
||||
## 1.0.32 - 07 Feb 2022
|
||||
|
||||
- Add `sarif-id` as an output for the `upload-sarif` and `analyze` actions. [#889](https://github.com/github/codeql-action/pull/889)
|
||||
- Add `ref` and `sha` inputs to the `analyze` action, which override the defaults provided by the GitHub Action context. [#889](https://github.com/github/codeql-action/pull/889)
|
||||
- Update default CodeQL bundle version to 2.8.0. [#911](https://github.com/github/codeql-action/pull/911)
|
||||
|
||||
## 1.0.31 - 31 Jan 2022
|
||||
|
||||
- Remove `experimental` message when using custom CodeQL packages. [#888](https://github.com/github/codeql-action/pull/888)
|
||||
|
|
|
|||
|
|
@ -45,6 +45,12 @@ inputs:
|
|||
description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file."
|
||||
required: false
|
||||
default: ${{ github.workspace }}
|
||||
ref:
|
||||
description: "The ref where results will be uploaded. If not provided, the Action will use the GITHUB_REF environment variable. If provided, the sha input must be provided as well. This input is not available in pull requests from forks."
|
||||
required: false
|
||||
sha:
|
||||
description: "The sha of the HEAD of the ref where results will be uploaded. If not provided, the Action will use the GITHUB_SHA environment variable. If provided, the ref input must be provided as well. This input is not available in pull requests from forks."
|
||||
required: false
|
||||
category:
|
||||
description: String used by Code Scanning for matching the analyses
|
||||
required: false
|
||||
|
|
@ -63,6 +69,8 @@ inputs:
|
|||
outputs:
|
||||
db-locations:
|
||||
description: A map from language to absolute path for each database created by CodeQL.
|
||||
sarif-id:
|
||||
description: The ID of the uploaded SARIF file.
|
||||
runs:
|
||||
using: "node12"
|
||||
main: "../lib/analyze-action.js"
|
||||
|
|
|
|||
86
lib/actions-util.js
generated
86
lib/actions-util.js
generated
|
|
@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
|
|
@ -51,10 +51,10 @@ exports.getRequiredInput = getRequiredInput;
|
|||
* This allows us to get stronger type checking of required/optional inputs
|
||||
* and make behaviour more consistent between actions and the runner.
|
||||
*/
|
||||
function getOptionalInput(name) {
|
||||
const getOptionalInput = function (name) {
|
||||
const value = core.getInput(name);
|
||||
return value.length > 0 ? value : undefined;
|
||||
}
|
||||
};
|
||||
exports.getOptionalInput = getOptionalInput;
|
||||
function getTemporaryDirectory() {
|
||||
const value = process.env["CODEQL_ACTION_TEMP"];
|
||||
|
|
@ -97,12 +97,61 @@ const getCommitOid = async function (ref = "HEAD") {
|
|||
return commitOid.trim();
|
||||
}
|
||||
catch (e) {
|
||||
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
|
||||
core.info(`Failed to call git to get current commit. Continuing with data from environment or input: ${e}`);
|
||||
core.info(e.stack || "NO STACK");
|
||||
return (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
return (0, exports.getOptionalInput)("sha") || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
}
|
||||
};
|
||||
exports.getCommitOid = getCommitOid;
|
||||
/**
|
||||
* If the action was triggered by a pull request, determine the commit sha of the merge base.
|
||||
* Returns undefined if run by other triggers or the merge base cannot be determined.
|
||||
*/
|
||||
const determineMergeBaseCommitOid = async function () {
|
||||
if (process.env.GITHUB_EVENT_NAME !== "pull_request") {
|
||||
return undefined;
|
||||
}
|
||||
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
try {
|
||||
let commitOid = "";
|
||||
let baseOid = "";
|
||||
let headOid = "";
|
||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), ["show", "-s", "--format=raw", mergeSha], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdline: (data) => {
|
||||
if (data.startsWith("commit ") && commitOid === "") {
|
||||
commitOid = data.substring(7);
|
||||
}
|
||||
else if (data.startsWith("parent ")) {
|
||||
if (baseOid === "") {
|
||||
baseOid = data.substring(7);
|
||||
}
|
||||
else if (headOid === "") {
|
||||
headOid = data.substring(7);
|
||||
}
|
||||
}
|
||||
},
|
||||
stderr: (data) => {
|
||||
process.stderr.write(data);
|
||||
},
|
||||
},
|
||||
}).exec();
|
||||
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
|
||||
if (commitOid === mergeSha &&
|
||||
headOid.length === 40 &&
|
||||
baseOid.length === 40) {
|
||||
return baseOid;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
catch (e) {
|
||||
core.info(`Failed to call git to determine merge base. Continuing with data from environment: ${e}`);
|
||||
core.info(e.stack || "NO STACK");
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
exports.determineMergeBaseCommitOid = determineMergeBaseCommitOid;
|
||||
function isObject(o) {
|
||||
return o !== null && typeof o === "object";
|
||||
}
|
||||
|
|
@ -373,8 +422,21 @@ exports.computeAutomationID = computeAutomationID;
|
|||
async function getRef() {
|
||||
// Will be in the form "refs/heads/master" on a push event
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const ref = (0, util_1.getRequiredEnvParam)("GITHUB_REF");
|
||||
const sha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
const refInput = (0, exports.getOptionalInput)("ref");
|
||||
const shaInput = (0, exports.getOptionalInput)("sha");
|
||||
const hasRefInput = !!refInput;
|
||||
const hasShaInput = !!shaInput;
|
||||
// If one of 'ref' or 'sha' are provided, both are required
|
||||
if ((hasRefInput || hasShaInput) && !(hasRefInput && hasShaInput)) {
|
||||
throw new Error("Both 'ref' and 'sha' are required if one of them is provided.");
|
||||
}
|
||||
const ref = refInput || (0, util_1.getRequiredEnvParam)("GITHUB_REF");
|
||||
const sha = shaInput || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
// If the ref is a user-provided input, we have to skip logic
|
||||
// and assume that it is really where they want to upload the results.
|
||||
if (refInput) {
|
||||
return refInput;
|
||||
}
|
||||
// For pull request refs we want to detect whether the workflow
|
||||
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
|
||||
// than the 'merge' ref. If so, we want to convert the ref that
|
||||
|
|
@ -412,7 +474,7 @@ exports.getRef = getRef;
|
|||
* @param exception Exception (only supply if status is 'failure')
|
||||
*/
|
||||
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
|
||||
const commitOid = process.env["GITHUB_SHA"] || "";
|
||||
const commitOid = (0, exports.getOptionalInput)("sha") || process.env["GITHUB_SHA"] || "";
|
||||
const ref = await getRef();
|
||||
const workflowRunIDStr = process.env["GITHUB_RUN_ID"];
|
||||
let workflowRunID = -1;
|
||||
|
|
@ -464,7 +526,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
|||
}
|
||||
exports.createStatusReportBase = createStatusReportBase;
|
||||
const GENERIC_403_MSG = "The repo on which this action is running is not opted-in to CodeQL code scanning.";
|
||||
const GENERIC_404_MSG = "Not authorized to used the CodeQL code scanning feature on this repo.";
|
||||
const GENERIC_404_MSG = "Not authorized to use the CodeQL code scanning feature on this repo.";
|
||||
const OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
|
||||
const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
|
||||
/**
|
||||
|
|
@ -479,6 +541,12 @@ const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code sc
|
|||
async function sendStatusReport(statusReport) {
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
core.debug(`Sending status report: ${statusReportJSON}`);
|
||||
// If in test mode we don't want to upload the results
|
||||
const testMode = process.env["TEST_MODE"] === "true" || false;
|
||||
if (testMode) {
|
||||
core.debug("In test mode. Status reports are not uploaded.");
|
||||
return true;
|
||||
}
|
||||
const nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY");
|
||||
const [owner, repo] = nwo.split("/");
|
||||
const client = api.getActionsApiClient();
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
37
lib/actions-util.test.js
generated
37
lib/actions-util.test.js
generated
|
|
@ -71,6 +71,43 @@ function errorCodes(actual, expected) {
|
|||
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||
callback.restore();
|
||||
});
|
||||
(0, ava_1.default)("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
||||
// These values are be ignored
|
||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
|
||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||
const actualRef = await actionsutil.getRef();
|
||||
t.deepEqual(actualRef, "refs/pull/2/merge");
|
||||
callback.restore();
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
||||
await t.throwsAsync(async () => {
|
||||
await actionsutil.getRef();
|
||||
}, {
|
||||
instanceOf: Error,
|
||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
});
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
(0, ava_1.default)("getRef() throws an error if only `sha` is provided as an input", async (t) => {
|
||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
||||
await t.throwsAsync(async () => {
|
||||
await actionsutil.getRef();
|
||||
}, {
|
||||
instanceOf: Error,
|
||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
});
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
(0, ava_1.default)("computeAutomationID()", async (t) => {
|
||||
let actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
|
||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
1
lib/analyze-action.js
generated
1
lib/analyze-action.js
generated
|
|
@ -109,6 +109,7 @@ async function run() {
|
|||
core.setOutput("db-locations", dbLocations);
|
||||
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
|
||||
uploadResult = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
}
|
||||
else {
|
||||
logger.info("Not uploading results");
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
13
lib/config-utils.js
generated
13
lib/config-utils.js
generated
|
|
@ -120,24 +120,23 @@ const builtinSuites = ["security-extended", "security-and-quality"];
|
|||
* Throws an error if suiteName is not a valid builtin suite.
|
||||
*/
|
||||
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureFlags, configFile) {
|
||||
var _a;
|
||||
const found = builtinSuites.find((suite) => suite === suiteName);
|
||||
if (!found) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
||||
}
|
||||
// If we're running the JavaScript security-extended analysis (or a superset of it) and the repo
|
||||
// is opted into the ML-powered queries beta, then add the ML-powered query pack so that we run
|
||||
// the ML-powered queries.
|
||||
// If we're running the JavaScript security-extended analysis (or a superset of it), the repo is
|
||||
// opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query
|
||||
// pack, then add the ML-powered query pack so that we run ML-powered queries.
|
||||
if (languages.includes("javascript") &&
|
||||
(found === "security-extended" || found === "security-and-quality") &&
|
||||
!((_a = packs.javascript) === null || _a === void 0 ? void 0 : _a.some((pack) => pack.packName === util_1.ML_POWERED_JS_QUERIES_PACK.packName)) &&
|
||||
(await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled)) &&
|
||||
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES))) {
|
||||
if (!packs.javascript) {
|
||||
packs.javascript = [];
|
||||
}
|
||||
packs.javascript.push({
|
||||
packName: "codeql/javascript-experimental-atm-queries",
|
||||
version: "~0.0.2",
|
||||
});
|
||||
packs.javascript.push(util_1.ML_POWERED_JS_QUERIES_PACK);
|
||||
}
|
||||
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
|
||||
await runResolveQueries(codeQL, resultMap, suites, undefined);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
123
lib/config-utils.test.js
generated
123
lib/config-utils.test.js
generated
|
|
@ -117,7 +117,10 @@ function mockListLanguages(languages) {
|
|||
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||
// And that same newly-initialised config should now be returned by getConfig
|
||||
const config2 = await configUtils.getConfig(tmpDir, logger);
|
||||
t.deepEqual(config1, config2);
|
||||
t.not(config2, undefined);
|
||||
if (config2 !== undefined) {
|
||||
t.deepEqual(config1, config2);
|
||||
}
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("load input outside of workspace", async (t) => {
|
||||
|
|
@ -760,28 +763,26 @@ const invalidPaths = ["a/***/b", "a/**b", "a/b**", "**"];
|
|||
/**
|
||||
* Test macro for ensuring the packs block is valid
|
||||
*/
|
||||
function parsePacksMacro(t, packsByLanguage, languages, expected) {
|
||||
t.deepEqual(configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), expected);
|
||||
}
|
||||
parsePacksMacro.title = (providedTitle) => `Parse Packs: ${providedTitle}`;
|
||||
const parsePacksMacro = ava_1.default.macro({
|
||||
exec: (t, packsByLanguage, languages, expected) => t.deepEqual(configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), expected),
|
||||
title: (providedTitle = "") => `Parse Packs: ${providedTitle}`,
|
||||
});
|
||||
/**
|
||||
* Test macro for testing when the packs block is invalid
|
||||
*/
|
||||
function parsePacksErrorMacro(t, packsByLanguage, languages, expected) {
|
||||
t.throws(() => {
|
||||
configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b");
|
||||
}, {
|
||||
const parsePacksErrorMacro = ava_1.default.macro({
|
||||
exec: (t, packsByLanguage, languages, expected) => t.throws(() => configUtils.parsePacksFromConfig(packsByLanguage, languages, "/a/b"), {
|
||||
message: expected,
|
||||
});
|
||||
}
|
||||
parsePacksErrorMacro.title = (providedTitle) => `Parse Packs Error: ${providedTitle}`;
|
||||
}),
|
||||
title: (providedTitle = "") => `Parse Packs Error: ${providedTitle}`,
|
||||
});
|
||||
/**
|
||||
* Test macro for testing when the packs block is invalid
|
||||
*/
|
||||
function invalidPackNameMacro(t, name) {
|
||||
parsePacksErrorMacro(t, { [languages_1.Language.cpp]: [name] }, [languages_1.Language.cpp], new RegExp(`The configuration file "/a/b" is invalid: property "packs" "${name}" is not a valid pack`));
|
||||
}
|
||||
invalidPackNameMacro.title = (_, arg) => `Invalid pack string: ${arg}`;
|
||||
const invalidPackNameMacro = ava_1.default.macro({
|
||||
exec: (t, name) => parsePacksErrorMacro.exec(t, { [languages_1.Language.cpp]: [name] }, [languages_1.Language.cpp], new RegExp(`The configuration file "/a/b" is invalid: property "packs" "${name}" is not a valid pack`)),
|
||||
title: (_providedTitle, arg) => `Invalid pack string: ${arg}`,
|
||||
});
|
||||
(0, ava_1.default)("no packs", parsePacksMacro, {}, [], {});
|
||||
(0, ava_1.default)("two packs", parsePacksMacro, ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp], {
|
||||
[languages_1.Language.cpp]: [
|
||||
|
|
@ -870,50 +871,50 @@ parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and
|
|||
(0, ava_1.default)("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], /multi-language analysis/);
|
||||
(0, ava_1.default)("input with + only", parseInputAndConfigErrorMacro, {}, " + ", [languages_1.Language.cpp], /remove the '\+'/);
|
||||
(0, ava_1.default)("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], /"xxx" is not a valid pack/);
|
||||
async function mlPoweredQueriesMacro(t, codeQLVersion, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries) {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async getVersion() {
|
||||
return codeQLVersion;
|
||||
},
|
||||
async resolveQueries() {
|
||||
return {
|
||||
byLanguage: {
|
||||
javascript: { "fake-query.ql": {} },
|
||||
},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
const { packs } = await configUtils.initConfig("javascript", queriesInput, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)(isMlPoweredQueriesFlagEnabled
|
||||
? [feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled]
|
||||
: []), (0, logging_1.getRunnerLogger)(true));
|
||||
if (shouldRunMlPoweredQueries) {
|
||||
t.deepEqual(packs, {
|
||||
[languages_1.Language.javascript]: [
|
||||
{
|
||||
packName: "codeql/javascript-experimental-atm-queries",
|
||||
version: "~0.0.2",
|
||||
},
|
||||
],
|
||||
const mlPoweredQueriesMacro = ava_1.default.macro({
|
||||
exec: async (t, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async getVersion() {
|
||||
return codeQLVersion;
|
||||
},
|
||||
async resolveQueries() {
|
||||
return {
|
||||
byLanguage: {
|
||||
javascript: { "fake-query.ql": {} },
|
||||
},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
}
|
||||
else {
|
||||
t.deepEqual(packs, {});
|
||||
}
|
||||
});
|
||||
}
|
||||
mlPoweredQueriesMacro.title = (_providedTitle, codeQLVersion, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries) => {
|
||||
const queriesInputDescription = queriesInput
|
||||
? `'queries: ${queriesInput}'`
|
||||
: "default config";
|
||||
return `ML-powered queries ${shouldRunMlPoweredQueries ? "are" : "aren't"} loaded for ${queriesInputDescription} using CLI v${codeQLVersion} when feature flag is ${isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"}`;
|
||||
};
|
||||
// macro, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, "security-extended", false);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, "security-extended", false);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, false);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "security-extended", true);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "security-and-quality", true);
|
||||
const { packs } = await configUtils.initConfig("javascript", queriesInput, packsInput, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)(isMlPoweredQueriesFlagEnabled
|
||||
? [feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled]
|
||||
: []), (0, logging_1.getRunnerLogger)(true));
|
||||
if (expectedVersionString !== undefined) {
|
||||
t.deepEqual(packs, {
|
||||
[languages_1.Language.javascript]: [
|
||||
{
|
||||
packName: "codeql/javascript-experimental-atm-queries",
|
||||
version: expectedVersionString,
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
else {
|
||||
t.deepEqual(packs, {});
|
||||
}
|
||||
});
|
||||
},
|
||||
title: (_providedTitle, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString) => `ML-powered queries ${expectedVersionString !== undefined
|
||||
? `${expectedVersionString} are`
|
||||
: "aren't"} loaded for packs: ${packsInput}, queries: ${queriesInput} using CLI v${codeQLVersion} when feature flag is ${isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"}`,
|
||||
});
|
||||
// macro, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, versionString
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, undefined, "security-extended", undefined);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, undefined, "security-extended", undefined);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-extended", "~0.0.2");
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-and-quality", "~0.0.2");
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "codeql/javascript-experimental-atm-queries@0.0.1", "security-and-quality", "0.0.1");
|
||||
//# sourceMappingURL=config-utils.test.js.map
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -1,3 +1,3 @@
|
|||
{
|
||||
"bundleVersion": "codeql-bundle-20220120"
|
||||
"bundleVersion": "codeql-bundle-20220128"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"feature-flags.test.js","sourceRoot":"","sources":["../src/feature-flags.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAGvB,mDAAkE;AAClE,uCAA4C;AAC5C,6CAAkD;AAClD,mDAMyB;AAEzB,iCAAgF;AAEhF,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC;AAEH,MAAM,cAAc,GAAqB;IACvC,IAAI,EAAE,MAAM;IACZ,GAAG,EAAE,oBAAoB;CAC1B,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAA,+BAAkB,EAAC,gBAAgB,CAAC,CAAC;AAE/D,MAAM,mCAAmC,GAGpC;IACH;QACE,WAAW,EAAE,MAAM;QACnB,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC9D;IACD,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,EAAE;CACrE,CAAC;AAEF,KAAK,MAAM,OAAO,IAAI,mCAAmC,EAAE;IACzD,IAAA,aAAI,EAAC,qDAAqD,OAAO,CAAC,WAAW,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC3F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;YAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,OAAO,CAAC,aAAa,EACrB,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;YAEF,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;gBAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;aACzD;YAED,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,8DAA8D,CACnE,KAAK,SAAS,CAChB,CAAC;QACJ,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ;AAED,IAAA,aAAI,EAAC,oEAAoE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;QAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;YAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;SACzD;QAED,KAAK,MAAM,WAAW,IAAI;YACxB,0BAA0B;YAC1B,4BAA4B;SAC7B,EAAE;YACD,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,iBAAiB,WAAW,uDAAuD,CACxF,KAAK,SAAS,CAChB,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iEAAiE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,MAAM,CAAC,CAAC,WAAW,CACjB,KAAK,IAAI,EAAE,CAAC,YAAY,CAAC,QAAQ,CAAC,2BAAW,CAAC,sBAAsB,CAAC,EACrE;YACE,OAAO,EACL,oFAAoF;SACvF,CACF,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,MAAM,aAAa,GAAG;IACpB,0BAA0B;IAC1B,4BAA4B;CAC7B,CAAC;AAEF,KAAK,MAAM,WAAW,IAAI,aAAa,EAAE;IACvC,IAAA,aAAI,EAAC,iBAAiB,WAAW,6CAA6C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC1F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;YAEF,MAAM,oBAAoB,GAAG,EAAE,CAAC;YAChC,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;gBAC7B,oBAAoB,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;aACjC;YACD,oBAAoB,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC;YACzC,IAAA,0CAA0B,EAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;YAEtD,MAAM,kBAAkB,GAAG;gBACzB,wBAAwB,EAAE,MAAM,YAAY,CAAC,QAAQ,CACnD,2BAAW,CAAC,sBAAsB,CACnC;gBACD,0BAA0B,EAAE,MAAM,YAAY,CAAC,QAAQ,CACrD,2BAAW,CAAC,uBAAuB,CACpC;aACF,CAAC;YAEF,CAAC,CAAC,SAAS,CAAC,kBAAkB,EAAE,oBAAoB,CAAC,CAAC;QACxD,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ"}
|
||||
{"version":3,"file":"feature-flags.test.js","sourceRoot":"","sources":["../src/feature-flags.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAGvB,mDAAkE;AAClE,uCAA4C;AAC5C,6CAAkD;AAClD,mDAMyB;AAEzB,iCAAgF;AAEhF,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC;AAEH,MAAM,cAAc,GAAqB;IACvC,IAAI,EAAE,MAAM;IACZ,GAAG,EAAE,oBAAoB;CAC1B,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAA,+BAAkB,EAAC,gBAAgB,CAAC,CAAC;AAE/D,MAAM,mCAAmC,GAGpC;IACH;QACE,WAAW,EAAE,MAAM;QACnB,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC9D;IACD,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,EAAE;CACrE,CAAC;AAEF,KAAK,MAAM,OAAO,IAAI,mCAAmC,EAAE;IACzD,IAAA,aAAI,EAAC,qDAAqD,OAAO,CAAC,WAAW,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC3F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;YAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,OAAO,CAAC,aAAa,EACrB,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;YAEF,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;gBAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;aACzD;YAED,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,8DAA8D,CACnE,KAAK,SAAS,CAChB,CAAC;QACJ,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ;AAED,IAAA,aAAI,EAAC,oEAAoE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;QAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;YAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;SACzD;QAED,KAAK,MAAM,WAAW,IAAI;YACxB,0BAA0B;YAC1B,4BAA4B;SAC7B,EAAE;YACD,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,iBAAiB,WAAW,uDAAuD,CACxF,KAAK,SAAS,CAChB,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iEAAiE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,MAAM,CAAC,CAAC,WAAW,CACjB,KAAK,IAAI,EAAE,CAAC,YAAY,CAAC,QAAQ,CAAC,2BAAW,CAAC,sBAAsB,CAAC,EACrE;YACE,OAAO,EACL,oFAAoF;SACvF,CACF,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,MAAM,aAAa,GAAG;IACpB,0BAA0B;IAC1B,4BAA4B;CAC7B,CAAC;AAEF,KAAK,MAAM,WAAW,IAAI,aAAa,EAAE;IACvC,IAAA,aAAI,EAAC,iBAAiB,WAAW,6CAA6C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC1F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;YAEF,MAAM,oBAAoB,GAAgC,EAAE,CAAC;YAC7D,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;gBAC7B,oBAAoB,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;aACjC;YACD,oBAAoB,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC;YACzC,IAAA,0CAA0B,EAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;YAEtD,MAAM,kBAAkB,GAAgC;gBACtD,wBAAwB,EAAE,MAAM,YAAY,CAAC,QAAQ,CACnD,2BAAW,CAAC,sBAAsB,CACnC;gBACD,0BAA0B,EAAE,MAAM,YAAY,CAAC,QAAQ,CACrD,2BAAW,CAAC,uBAAuB,CACpC;aACF,CAAC;YAEF,CAAC,CAAC,SAAS,CAAC,kBAAkB,EAAE,oBAAoB,CAAC,CAAC;QACxD,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ"}
|
||||
5
lib/init-action.js
generated
5
lib/init-action.js
generated
|
|
@ -54,14 +54,15 @@ async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
|
|||
}
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
disable_default_queries: disableDefaultQueries,
|
||||
languages,
|
||||
workflow_languages: workflowLanguages || "",
|
||||
ml_powered_js_queries: (0, util_1.getMlPoweredJsQueriesStatus)(config),
|
||||
paths,
|
||||
paths_ignore: pathsIgnore,
|
||||
disable_default_queries: disableDefaultQueries,
|
||||
queries: queries.join(","),
|
||||
tools_input: (0, actions_util_1.getOptionalInput)("tools") || "",
|
||||
tools_resolved_version: toolsVersion,
|
||||
workflow_languages: workflowLanguages || "",
|
||||
};
|
||||
await (0, actions_util_1.sendStatusReport)(statusReport);
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAEnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC"}
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAEnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAiB;IAC1C,MAAM,SAAS,GAAG,IAA2B,CAAC;IAE9C,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC"}
|
||||
5
lib/tracer-config.test.js
generated
5
lib/tracer-config.test.js
generated
|
|
@ -162,7 +162,10 @@ function getTestConfig(tmpDir) {
|
|||
javascript: { spec, env: { a: "a", b: "b" } },
|
||||
python: { spec, env: { b: "c" } },
|
||||
}, config));
|
||||
t.deepEqual(e.message, "Incompatible values in environment parameter b: b and c");
|
||||
// If e is undefined, then the previous assertion will fail.
|
||||
if (e !== undefined) {
|
||||
t.deepEqual(e.message, "Incompatible values in environment parameter b: b and c");
|
||||
}
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("concatTracerConfigs - cpp spec lines come last if present", async (t) => {
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
27
lib/upload-lib.js
generated
27
lib/upload-lib.js
generated
|
|
@ -95,6 +95,7 @@ async function uploadPayload(payload, repositoryNwo, apiDetails, logger) {
|
|||
// If in test mode we don't want to upload the results
|
||||
const testMode = process.env["TEST_MODE"] === "true" || false;
|
||||
if (testMode) {
|
||||
logger.debug("In test mode. Results are not uploaded.");
|
||||
return;
|
||||
}
|
||||
const client = api.getApiClient(apiDetails);
|
||||
|
|
@ -203,7 +204,7 @@ function validateSarifFileSchema(sarifFilePath, logger) {
|
|||
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||
// buildPayload constructs a map ready to be uploaded to the API from the given
|
||||
// parameters, respecting the current mode and target GitHub instance version.
|
||||
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion) {
|
||||
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion, mergeBaseCommitOid) {
|
||||
if (util.isActions()) {
|
||||
const payloadObj = {
|
||||
commit_oid: commitOid,
|
||||
|
|
@ -222,11 +223,23 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
|||
// This behaviour can be made the default when support for GHES 3.0 is discontinued.
|
||||
if (gitHubVersion.type !== util.GitHubVariant.GHES ||
|
||||
semver.satisfies(gitHubVersion.version, `>=3.1`)) {
|
||||
if (process.env.GITHUB_EVENT_NAME === "pull_request" &&
|
||||
process.env.GITHUB_EVENT_PATH) {
|
||||
const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8"));
|
||||
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
|
||||
payloadObj.base_sha = githubEvent.pull_request.base.sha;
|
||||
if (process.env.GITHUB_EVENT_NAME === "pull_request") {
|
||||
if (commitOid === util.getRequiredEnvParam("GITHUB_SHA") &&
|
||||
mergeBaseCommitOid) {
|
||||
// We're uploading results for the merge commit
|
||||
// and were able to determine the merge base.
|
||||
// So we use that as the most accurate base.
|
||||
payloadObj.base_ref = `refs/heads/${util.getRequiredEnvParam("GITHUB_BASE_REF")}`;
|
||||
payloadObj.base_sha = mergeBaseCommitOid;
|
||||
}
|
||||
else if (process.env.GITHUB_EVENT_PATH) {
|
||||
// Either we're not uploading results for the merge commit
|
||||
// or we could not determine the merge base.
|
||||
// Using the PR base is the only option here
|
||||
const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8"));
|
||||
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
|
||||
payloadObj.base_sha = githubEvent.pull_request.base.sha;
|
||||
}
|
||||
}
|
||||
}
|
||||
return payloadObj;
|
||||
|
|
@ -259,7 +272,7 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
|||
const sarifPayload = JSON.stringify(sarif);
|
||||
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = (0, file_url_1.default)(sourceRoot);
|
||||
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion);
|
||||
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion, await actionsUtil.determineMergeBaseCommitOid());
|
||||
// Log some useful debug info about the info
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
15
lib/upload-lib.test.js
generated
15
lib/upload-lib.test.js
generated
|
|
@ -53,20 +53,29 @@ ava_1.default.beforeEach(() => {
|
|||
const allVersions = newVersions.concat(oldVersions);
|
||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||
for (const version of allVersions) {
|
||||
const payload = uploadLib.buildPayload("commit", "refs/heads/master", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version);
|
||||
const payload = uploadLib.buildPayload("commit", "refs/heads/master", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "mergeBaseCommit");
|
||||
// Not triggered by a pull request
|
||||
t.falsy(payload.base_ref);
|
||||
t.falsy(payload.base_sha);
|
||||
}
|
||||
process.env["GITHUB_EVENT_NAME"] = "pull_request";
|
||||
process.env["GITHUB_SHA"] = "commit";
|
||||
process.env["GITHUB_BASE_REF"] = "master";
|
||||
process.env["GITHUB_EVENT_PATH"] = `${__dirname}/../src/testdata/pull_request.json`;
|
||||
for (const version of newVersions) {
|
||||
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version);
|
||||
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "mergeBaseCommit");
|
||||
// Uploads for a merge commit use the merge base
|
||||
t.deepEqual(payload.base_ref, "refs/heads/master");
|
||||
t.deepEqual(payload.base_sha, "mergeBaseCommit");
|
||||
}
|
||||
for (const version of newVersions) {
|
||||
const payload = uploadLib.buildPayload("headCommit", "refs/pull/123/head", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "mergeBaseCommit");
|
||||
// Uploads for the head use the PR base
|
||||
t.deepEqual(payload.base_ref, "refs/heads/master");
|
||||
t.deepEqual(payload.base_sha, "f95f852bd8fca8fcc58a9a2d6c842781e32a215e");
|
||||
}
|
||||
for (const version of oldVersions) {
|
||||
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version);
|
||||
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "mergeBaseCommit");
|
||||
// These older versions won't expect these values
|
||||
t.falsy(payload.base_ref);
|
||||
t.falsy(payload.base_sha);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
1
lib/upload-sarif-action.js
generated
1
lib/upload-sarif-action.js
generated
|
|
@ -48,6 +48,7 @@ async function run() {
|
|||
};
|
||||
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
|
||||
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YAClE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QACjD,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YAClE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
55
lib/util.js
generated
55
lib/util.js
generated
|
|
@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.checkNotWindows11 = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
exports.getMlPoweredJsQueriesStatus = exports.ML_POWERED_JS_QUERIES_PACK = exports.checkNotWindows11 = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
|
|
@ -524,4 +524,57 @@ function checkNotWindows11() {
|
|||
}
|
||||
}
|
||||
exports.checkNotWindows11 = checkNotWindows11;
|
||||
/**
|
||||
* The ML-powered JS query pack to add to the analysis if a repo is opted into the ML-powered
|
||||
* queries beta.
|
||||
*/
|
||||
exports.ML_POWERED_JS_QUERIES_PACK = {
|
||||
packName: "codeql/javascript-experimental-atm-queries",
|
||||
version: "~0.0.2",
|
||||
};
|
||||
/**
|
||||
* Get information about ML-powered JS queries to populate status reports with.
|
||||
*
|
||||
* This will be:
|
||||
*
|
||||
* - The version string if the analysis is using the ML-powered query pack that will be added to the
|
||||
* analysis if the repo is opted into the ML-powered queries beta, i.e.
|
||||
* {@link ML_POWERED_JS_QUERIES_PACK.version}. If the version string
|
||||
* {@link ML_POWERED_JS_QUERIES_PACK.version} is undefined, then the status report string will be
|
||||
* "latest", however this shouldn't occur in practice (see comment below).
|
||||
* - "false" if the analysis won't run any ML-powered JS queries.
|
||||
* - "other" in all other cases.
|
||||
*
|
||||
* Our goal of the status report here is to allow us to compare the occurrence of timeouts and other
|
||||
* errors with ML-powered queries turned on and off. We also want to be able to compare minor
|
||||
* version bumps caused by us bumping the version range of `ML_POWERED_JS_QUERIES_PACK` in a new
|
||||
* version of the CodeQL Action. For instance, we might want to compare the `~0.1.0` and `~0.0.2`
|
||||
* version strings.
|
||||
*
|
||||
* We restrict the set of strings we report here by excluding other version strings and combinations
|
||||
* of version strings. We do this to limit the cardinality of the ML-powered JS queries status
|
||||
* report field, since some platforms that ingest this status report bill based on the cardinality
|
||||
* of its fields.
|
||||
*
|
||||
* This function lives here rather than in `init-action.ts` so it's easier to test, since tests for
|
||||
* `init-action.ts` would each need to live in their own file. See `analyze-action-env.ts` for an
|
||||
* explanation as to why this is.
|
||||
*/
|
||||
function getMlPoweredJsQueriesStatus(config) {
|
||||
const mlPoweredJsQueryPacks = (config.packs.javascript || []).filter((pack) => pack.packName === exports.ML_POWERED_JS_QUERIES_PACK.packName);
|
||||
if (mlPoweredJsQueryPacks.length === 0) {
|
||||
return "false";
|
||||
}
|
||||
const firstVersionString = mlPoweredJsQueryPacks[0].version;
|
||||
if (mlPoweredJsQueryPacks.length === 1 &&
|
||||
exports.ML_POWERED_JS_QUERIES_PACK.version === firstVersionString) {
|
||||
// We should always specify an explicit version string in `ML_POWERED_JS_QUERIES_PACK`,
|
||||
// otherwise we won't be able to make changes to the pack unless those changes are compatible
|
||||
// with each version of the CodeQL Action. Therefore in practice, we should never hit the
|
||||
// `latest` case here.
|
||||
return exports.ML_POWERED_JS_QUERIES_PACK.version || "latest";
|
||||
}
|
||||
return "other";
|
||||
}
|
||||
exports.getMlPoweredJsQueriesStatus = getMlPoweredJsQueriesStatus;
|
||||
//# sourceMappingURL=util.js.map
|
||||
File diff suppressed because one or more lines are too long
58
lib/util.test.js
generated
58
lib/util.test.js
generated
|
|
@ -204,4 +204,62 @@ async function mockStdInForAuthExpectError(t, mockLogger, ...text) {
|
|||
const stdin = stream.Readable.from(text);
|
||||
await t.throwsAsync(async () => util.getGitHubAuth(mockLogger, undefined, true, stdin));
|
||||
}
|
||||
const ML_POWERED_JS_STATUS_TESTS = [
|
||||
[[], "false"],
|
||||
[[{ packName: "someOtherPack" }], "false"],
|
||||
[
|
||||
[{ packName: "someOtherPack" }, util.ML_POWERED_JS_QUERIES_PACK],
|
||||
util.ML_POWERED_JS_QUERIES_PACK.version,
|
||||
],
|
||||
[[util.ML_POWERED_JS_QUERIES_PACK], util.ML_POWERED_JS_QUERIES_PACK.version],
|
||||
[[{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName }], "other"],
|
||||
[
|
||||
[{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName, version: "~0.0.1" }],
|
||||
"other",
|
||||
],
|
||||
[
|
||||
[
|
||||
{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName, version: "0.0.1" },
|
||||
{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName, version: "0.0.2" },
|
||||
],
|
||||
"other",
|
||||
],
|
||||
[
|
||||
[
|
||||
{ packName: "someOtherPack" },
|
||||
{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName },
|
||||
],
|
||||
"other",
|
||||
],
|
||||
];
|
||||
for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
|
||||
const packDescriptions = `[${packs
|
||||
.map((pack) => JSON.stringify(pack))
|
||||
.join(", ")}]`;
|
||||
(0, ava_1.default)(`ML-powered JS queries status report is "${expectedStatus}" for packs = ${packDescriptions}`, (t) => {
|
||||
return util.withTmpDir(async (tmpDir) => {
|
||||
const config = {
|
||||
languages: [],
|
||||
queries: {},
|
||||
paths: [],
|
||||
pathsIgnore: [],
|
||||
originalUserInput: {},
|
||||
tempDir: tmpDir,
|
||||
toolCacheDir: tmpDir,
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
},
|
||||
dbLocation: "",
|
||||
packs: {
|
||||
javascript: packs,
|
||||
},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
t.is(util.getMlPoweredJsQueriesStatus(config), expectedStatus);
|
||||
});
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=util.test.js.map
|
||||
File diff suppressed because one or more lines are too long
2
node_modules/.bin/ava
generated
vendored
2
node_modules/.bin/ava
generated
vendored
|
|
@ -1 +1 @@
|
|||
../ava/cli.js
|
||||
../ava/entrypoints/cli.mjs
|
||||
1
node_modules/.bin/import-local-fixture
generated
vendored
1
node_modules/.bin/import-local-fixture
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
../import-local/fixtures/cli.js
|
||||
1
node_modules/.bin/is-ci
generated
vendored
1
node_modules/.bin/is-ci
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
../is-ci/bin.js
|
||||
1
node_modules/.bin/rc
generated
vendored
1
node_modules/.bin/rc
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
../rc/cli.js
|
||||
2039
node_modules/.package-lock.json
generated
vendored
2039
node_modules/.package-lock.json
generated
vendored
File diff suppressed because it is too large
Load diff
|
|
@ -1,6 +1,6 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
Copyright (c) Mark Wubben (https://novemberborn.net)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
16
node_modules/@ava/typescript/README.md
generated
vendored
16
node_modules/@ava/typescript/README.md
generated
vendored
|
|
@ -1,11 +1,21 @@
|
|||
# @ava/typescript
|
||||
|
||||
Adds [TypeScript](https://www.typescriptlang.org/) support to [AVA](https://avajs.dev).
|
||||
Adds [TypeScript](https://www.typescriptlang.org/) support to [AVA 4](https://avajs.dev).
|
||||
|
||||
This is designed to work for projects that precompile TypeScript. It allows AVA to load the compiled JavaScript, while configuring AVA to treat the TypeScript files as test files.
|
||||
|
||||
In other words, say you have a test file at `src/test.ts`. You've configured TypeScript to output to `build/`. Using `@ava/typescript` you can run the test using `npx ava src/test.ts`.
|
||||
|
||||
## For AVA 3 users
|
||||
|
||||
Use version 2:
|
||||
|
||||
```console
|
||||
npm install --save-dev @ava/typescript@2
|
||||
```
|
||||
|
||||
Note that v2 does not support ES modules. This requires v3 and AVA 4.
|
||||
|
||||
## Enabling TypeScript support
|
||||
|
||||
Add this package to your project:
|
||||
|
|
@ -39,6 +49,10 @@ Output files are expected to have the `.js` extension.
|
|||
|
||||
AVA searches your entire project for `*.js`, `*.cjs`, `*.mjs` and `*.ts` files (or other extensions you've configured). It will ignore such files found in the `rewritePaths` targets (e.g. `build/`). If you use more specific paths, for instance `build/main/`, you may need to change AVA's `files` configuration to ignore other directories.
|
||||
|
||||
## ES Modules
|
||||
|
||||
If your `package.json` has configured `"type": "module"`, or you've configured AVA to treat the `js` extension as `module`, then `@ava/typescript` will import the output file as an ES module. Note that this is based on the *output file*, not the `ts` extension.
|
||||
|
||||
## Add additional extensions
|
||||
|
||||
You can configure AVA to recognize additional file extensions. To add (partial†) JSX support:
|
||||
|
|
|
|||
66
node_modules/@ava/typescript/index.js
generated
vendored
66
node_modules/@ava/typescript/index.js
generated
vendored
|
|
@ -1,9 +1,10 @@
|
|||
'use strict';
|
||||
const path = require('path');
|
||||
const escapeStringRegexp = require('escape-string-regexp');
|
||||
const execa = require('execa');
|
||||
const pkg = require('./package.json');
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import {pathToFileURL} from 'node:url';
|
||||
import escapeStringRegexp from 'escape-string-regexp';
|
||||
import execa from 'execa';
|
||||
|
||||
const pkg = JSON.parse(fs.readFileSync(new URL('package.json', import.meta.url)));
|
||||
const help = `See https://github.com/avajs/typescript/blob/v${pkg.version}/README.md`;
|
||||
|
||||
function isPlainObject(x) {
|
||||
|
|
@ -44,7 +45,7 @@ const configProperties = {
|
|||
required: true,
|
||||
isValid(compile) {
|
||||
return compile === false || compile === 'tsc';
|
||||
}
|
||||
},
|
||||
},
|
||||
rewritePaths: {
|
||||
required: true,
|
||||
|
|
@ -53,23 +54,21 @@ const configProperties = {
|
|||
return false;
|
||||
}
|
||||
|
||||
return Object.entries(rewritePaths).every(([from, to]) => {
|
||||
return from.endsWith('/') && typeof to === 'string' && to.endsWith('/');
|
||||
});
|
||||
}
|
||||
return Object.entries(rewritePaths).every(([from, to]) => from.endsWith('/') && typeof to === 'string' && to.endsWith('/'));
|
||||
},
|
||||
},
|
||||
extensions: {
|
||||
required: false,
|
||||
isValid(extensions) {
|
||||
return Array.isArray(extensions) &&
|
||||
extensions.length > 0 &&
|
||||
extensions.every(ext => typeof ext === 'string' && ext !== '') &&
|
||||
new Set(extensions).size === extensions.length;
|
||||
}
|
||||
}
|
||||
return Array.isArray(extensions)
|
||||
&& extensions.length > 0
|
||||
&& extensions.every(ext => typeof ext === 'string' && ext !== '')
|
||||
&& new Set(extensions).size === extensions.length;
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = ({negotiateProtocol}) => {
|
||||
export default function typescriptProvider({negotiateProtocol}) {
|
||||
const protocol = negotiateProtocol(['ava-3.2'], {version: pkg.version});
|
||||
if (protocol === null) {
|
||||
return;
|
||||
|
|
@ -86,12 +85,12 @@ module.exports = ({negotiateProtocol}) => {
|
|||
const {
|
||||
extensions = ['ts'],
|
||||
rewritePaths: relativeRewritePaths,
|
||||
compile
|
||||
compile,
|
||||
} = config;
|
||||
|
||||
const rewritePaths = Object.entries(relativeRewritePaths).map(([from, to]) => [
|
||||
path.join(protocol.projectDir, from),
|
||||
path.join(protocol.projectDir, to)
|
||||
path.join(protocol.projectDir, to),
|
||||
]);
|
||||
const testFileExtension = new RegExp(`\\.(${extensions.map(ext => escapeStringRegexp(ext)).join('|')})$`);
|
||||
|
||||
|
|
@ -102,13 +101,13 @@ module.exports = ({negotiateProtocol}) => {
|
|||
}
|
||||
|
||||
return {
|
||||
extensions: extensions.slice(),
|
||||
rewritePaths: rewritePaths.slice()
|
||||
extensions: [...extensions],
|
||||
rewritePaths: [...rewritePaths],
|
||||
};
|
||||
},
|
||||
|
||||
get extensions() {
|
||||
return extensions.slice();
|
||||
return [...extensions];
|
||||
},
|
||||
|
||||
ignoreChange(filePath) {
|
||||
|
|
@ -139,18 +138,19 @@ module.exports = ({negotiateProtocol}) => {
|
|||
filePatterns: [
|
||||
...filePatterns,
|
||||
'!**/*.d.ts',
|
||||
...Object.values(relativeRewritePaths).map(to => `!${to}**`)
|
||||
...Object.values(relativeRewritePaths).map(to => `!${to}**`),
|
||||
],
|
||||
ignoredByWatcherPatterns: [
|
||||
...ignoredByWatcherPatterns,
|
||||
...Object.values(relativeRewritePaths).map(to => `${to}**/*.js.map`)
|
||||
]
|
||||
...Object.values(relativeRewritePaths).map(to => `${to}**/*.js.map`),
|
||||
],
|
||||
};
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
worker({extensionsToLoadAsModules, state: {extensions, rewritePaths}}) {
|
||||
const useImport = extensionsToLoadAsModules.includes('js');
|
||||
const testFileExtension = new RegExp(`\\.(${extensions.map(ext => escapeStringRegexp(ext)).join('|')})$`);
|
||||
|
||||
return {
|
||||
|
|
@ -159,18 +159,12 @@ module.exports = ({negotiateProtocol}) => {
|
|||
},
|
||||
|
||||
async load(ref, {requireFn}) {
|
||||
for (const extension of extensionsToLoadAsModules) {
|
||||
if (ref.endsWith(`.${extension}`)) {
|
||||
throw new Error('@ava/typescript cannot yet load ESM files');
|
||||
}
|
||||
}
|
||||
|
||||
const [from, to] = rewritePaths.find(([from]) => ref.startsWith(from));
|
||||
// TODO: Support JSX preserve mode — https://www.typescriptlang.org/docs/handbook/jsx.html
|
||||
const rewritten = `${to}${ref.slice(from.length)}`.replace(testFileExtension, '.js');
|
||||
return requireFn(rewritten);
|
||||
}
|
||||
return useImport ? import(pathToFileURL(rewritten)) : requireFn(rewritten); // eslint-disable-line node/no-unsupported-features/es-syntax
|
||||
},
|
||||
};
|
||||
}
|
||||
},
|
||||
};
|
||||
};
|
||||
}
|
||||
|
|
|
|||
6
node_modules/@ava/typescript/node_modules/escape-string-regexp/index.d.ts
generated
vendored
6
node_modules/@ava/typescript/node_modules/escape-string-regexp/index.d.ts
generated
vendored
|
|
@ -5,7 +5,7 @@ You can also use this to escape a string that is inserted into the middle of a r
|
|||
|
||||
@example
|
||||
```
|
||||
import escapeStringRegexp = require('escape-string-regexp');
|
||||
import escapeStringRegexp from 'escape-string-regexp';
|
||||
|
||||
const escapedString = escapeStringRegexp('How much $ for a 🦄?');
|
||||
//=> 'How much \\$ for a 🦄\\?'
|
||||
|
|
@ -13,6 +13,4 @@ const escapedString = escapeStringRegexp('How much $ for a 🦄?');
|
|||
new RegExp(escapedString);
|
||||
```
|
||||
*/
|
||||
declare const escapeStringRegexp: (string: string) => string;
|
||||
|
||||
export = escapeStringRegexp;
|
||||
export default function escapeStringRegexp(string: string): string;
|
||||
|
|
|
|||
8
node_modules/@ava/typescript/node_modules/escape-string-regexp/index.js
generated
vendored
8
node_modules/@ava/typescript/node_modules/escape-string-regexp/index.js
generated
vendored
|
|
@ -1,13 +1,11 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = string => {
|
||||
export default function escapeStringRegexp(string) {
|
||||
if (typeof string !== 'string') {
|
||||
throw new TypeError('Expected a string');
|
||||
}
|
||||
|
||||
// Escape characters with special meaning either inside or outside character sets.
|
||||
// Use a simple backslash escape when it’s always valid, and a \unnnn escape when the simpler form would be disallowed by Unicode patterns’ stricter grammar.
|
||||
// Use a simple backslash escape when it’s always valid, and a `\xnn` escape when the simpler form would be disallowed by Unicode patterns’ stricter grammar.
|
||||
return string
|
||||
.replace(/[|\\{}()[\]^$+*?.]/g, '\\$&')
|
||||
.replace(/-/g, '\\x2d');
|
||||
};
|
||||
}
|
||||
|
|
|
|||
12
node_modules/@ava/typescript/node_modules/escape-string-regexp/package.json
generated
vendored
12
node_modules/@ava/typescript/node_modules/escape-string-regexp/package.json
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "escape-string-regexp",
|
||||
"version": "4.0.0",
|
||||
"version": "5.0.0",
|
||||
"description": "Escape RegExp special characters",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/escape-string-regexp",
|
||||
|
|
@ -10,8 +10,10 @@
|
|||
"email": "sindresorhus@gmail.com",
|
||||
"url": "https://sindresorhus.com"
|
||||
},
|
||||
"type": "module",
|
||||
"exports": "./index.js",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
"node": ">=12"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava && tsd"
|
||||
|
|
@ -31,8 +33,8 @@
|
|||
"characters"
|
||||
],
|
||||
"devDependencies": {
|
||||
"ava": "^1.4.1",
|
||||
"tsd": "^0.11.0",
|
||||
"xo": "^0.28.3"
|
||||
"ava": "^3.15.0",
|
||||
"tsd": "^0.14.0",
|
||||
"xo": "^0.38.2"
|
||||
}
|
||||
}
|
||||
4
node_modules/@ava/typescript/node_modules/escape-string-regexp/readme.md
generated
vendored
4
node_modules/@ava/typescript/node_modules/escape-string-regexp/readme.md
generated
vendored
|
|
@ -1,4 +1,4 @@
|
|||
# escape-string-regexp [](https://travis-ci.org/sindresorhus/escape-string-regexp)
|
||||
# escape-string-regexp
|
||||
|
||||
> Escape RegExp special characters
|
||||
|
||||
|
|
@ -11,7 +11,7 @@ $ npm install escape-string-regexp
|
|||
## Usage
|
||||
|
||||
```js
|
||||
const escapeStringRegexp = require('escape-string-regexp');
|
||||
import escapeStringRegexp from 'escape-string-regexp';
|
||||
|
||||
const escapedString = escapeStringRegexp('How much $ for a 🦄?');
|
||||
//=> 'How much \\$ for a 🦄\\?'
|
||||
|
|
|
|||
29
node_modules/@ava/typescript/package.json
generated
vendored
29
node_modules/@ava/typescript/package.json
generated
vendored
|
|
@ -1,13 +1,17 @@
|
|||
{
|
||||
"name": "@ava/typescript",
|
||||
"version": "2.0.0",
|
||||
"version": "3.0.1",
|
||||
"description": "TypeScript provider for AVA",
|
||||
"engines": {
|
||||
"node": ">=12.22 <13 || >=14.16 <15 || >=15"
|
||||
"node": ">=12.22 <13 || >=14.17 <15 || >=16.4 <17 || >=17"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"exports": {
|
||||
".": "./index.js"
|
||||
},
|
||||
"type": "module",
|
||||
"author": "Mark Wubben (https://novemberborn.net)",
|
||||
"repository": "avajs/typescript",
|
||||
"license": "MIT",
|
||||
|
|
@ -19,15 +23,15 @@
|
|||
"test": "xo && c8 ava"
|
||||
},
|
||||
"dependencies": {
|
||||
"escape-string-regexp": "^4.0.0",
|
||||
"execa": "^5.0.0"
|
||||
"escape-string-regexp": "^5.0.0",
|
||||
"execa": "^5.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"ava": "^3.15.0",
|
||||
"c8": "^7.7.1",
|
||||
"ava": "4.0.0-rc.1",
|
||||
"c8": "^7.10.0",
|
||||
"del": "^6.0.0",
|
||||
"typescript": "^4.2.4",
|
||||
"xo": "^0.38.2"
|
||||
"typescript": "^4.4.4",
|
||||
"xo": "^0.46.3"
|
||||
},
|
||||
"c8": {
|
||||
"reporter": [
|
||||
|
|
@ -40,14 +44,15 @@
|
|||
"files": [
|
||||
"!test/broken-fixtures/**"
|
||||
],
|
||||
"ignoredByWatcher": [
|
||||
"test/fixtures/**",
|
||||
"test/broken-fixtures/**"
|
||||
],
|
||||
"timeout": "60s"
|
||||
},
|
||||
"xo": {
|
||||
"ignores": [
|
||||
"test/broken-fixtures"
|
||||
],
|
||||
"rules": {
|
||||
"import/order": "off"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
14
node_modules/@concordance/react/LICENSE
generated
vendored
14
node_modules/@concordance/react/LICENSE
generated
vendored
|
|
@ -1,14 +0,0 @@
|
|||
ISC License (ISC)
|
||||
Copyright (c) 2017, Mark Wubben <mark@novemberborn.net> (novemberborn.net)
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any purpose
|
||||
with or without fee is hereby granted, provided that the above copyright notice
|
||||
and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
|
||||
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
|
||||
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
|
||||
THIS SOFTWARE.
|
||||
18
node_modules/@concordance/react/README.md
generated
vendored
18
node_modules/@concordance/react/README.md
generated
vendored
|
|
@ -1,18 +0,0 @@
|
|||
# @concordance/react
|
||||
|
||||
React plugin for [Concordance](https://github.com/concordancejs/concordance).
|
||||
|
||||
Allows
|
||||
[`React.createElement()`](https://facebook.github.io/react/docs/react-api.html#createelement)
|
||||
objects to be compared, formatted, diffed and serialized. Also supports
|
||||
`toJSON()` renderings of
|
||||
[`react-test-renderer`](https://www.npmjs.com/package/react-test-renderer).
|
||||
These may be compared to `React.createElement()` objects.
|
||||
|
||||
When comparing [React
|
||||
component](https://facebook.github.io/react/docs/components-and-props.html)
|
||||
elements, the element type is compared by identity. After deserialization the
|
||||
element types are compared by function name.
|
||||
|
||||
Component elements are formatted with a ⍟ character after the element
|
||||
name. Properties and children are formatted by [Concordance](https://github.com/concordancejs/concordance).
|
||||
75
node_modules/@concordance/react/index.js
generated
vendored
75
node_modules/@concordance/react/index.js
generated
vendored
|
|
@ -1,75 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
const pkg = require('./package.json')
|
||||
const elementFactory = require('./lib/elementFactory')
|
||||
const testJsonFactory = require('./lib/testJsonFactory')
|
||||
|
||||
// Must be unique across all registered plugins.
|
||||
exports.name = pkg.name
|
||||
|
||||
// Expected API version to be passed to register().
|
||||
exports.apiVersion = 1
|
||||
|
||||
// Expected minimal version of Concordance. Concordance will increment its API
|
||||
// version for breaking changes, this is useful if you rely on features or
|
||||
// patches that were introduced in a specific version of Concordance.
|
||||
exports.minimalConcordanceVersion = '1.0.0'
|
||||
|
||||
// Plugin-specific version of its serialization output.
|
||||
exports.serializerVersion = 2
|
||||
|
||||
exports.theme = {
|
||||
react: {
|
||||
functionType: '\u235F',
|
||||
openTag: {
|
||||
start: '<',
|
||||
end: '>',
|
||||
selfClose: '/',
|
||||
selfCloseVoid: ' /'
|
||||
},
|
||||
closeTag: {
|
||||
open: '</',
|
||||
close: '>'
|
||||
},
|
||||
tagName: {open: '', close: ''},
|
||||
attribute: {
|
||||
separator: '=',
|
||||
value: {
|
||||
openBracket: '{',
|
||||
closeBracket: '}',
|
||||
string: {
|
||||
line: {open: '"', close: '"', escapeQuote: '"'}
|
||||
}
|
||||
}
|
||||
},
|
||||
child: {
|
||||
openBracket: '{',
|
||||
closeBracket: '}',
|
||||
string: {
|
||||
line: {open: '', close: '', escapeQuote: ''},
|
||||
multiline: {start: '', end: '', escapeQuote: ''}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const ELEMENT = Symbol.for('react.element')
|
||||
const TEST_JSON = Symbol.for('react.test.json')
|
||||
|
||||
function register (api) {
|
||||
const reactTags = new Set()
|
||||
const element = elementFactory(api, reactTags)
|
||||
const testJson = testJsonFactory(api, element)
|
||||
|
||||
api.addDescriptor(0x01, element.tag, element.deserialize)
|
||||
api.addDescriptor(0x02, testJson.tag, testJson.deserialize)
|
||||
|
||||
reactTags.add(element.tag).add(testJson.tag)
|
||||
|
||||
return value => {
|
||||
if (value.$$typeof === ELEMENT) return element.describe
|
||||
if (value.$$typeof === TEST_JSON) return testJson.describe
|
||||
return null
|
||||
}
|
||||
}
|
||||
exports.register = register
|
||||
239
node_modules/@concordance/react/lib/diffShallow.js
generated
vendored
239
node_modules/@concordance/react/lib/diffShallow.js
generated
vendored
|
|
@ -1,239 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
function diffShallow (api, actual, expected, theme, indent) {
|
||||
const childBuffer = api.lineBuilder.buffer()
|
||||
const propertyBuffer = api.lineBuilder.buffer()
|
||||
|
||||
return {
|
||||
append (formatted, origin) {
|
||||
if (origin.isItem === true) {
|
||||
childBuffer.append(formatted)
|
||||
} else {
|
||||
propertyBuffer.append(formatted)
|
||||
}
|
||||
},
|
||||
|
||||
finalize: () => {
|
||||
const namesAreEqual = actual.compareNames(expected)
|
||||
const actualName = actual.formatName(theme)
|
||||
const expectedName = expected.formatName(theme)
|
||||
|
||||
const openTag = theme.react.openTag
|
||||
const innerIndentation = indent.increase()
|
||||
|
||||
const allChildren = childBuffer.withFirstPrefixed(innerIndentation)
|
||||
const children = allChildren.decompose()
|
||||
|
||||
const allProperties = propertyBuffer.withFirstPrefixed(innerIndentation)
|
||||
const properties = allProperties.decompose()
|
||||
// If the first properties are also the last, and either side has no
|
||||
// children, ensure the properties are treated as being last. This
|
||||
// leads to a better balanced diff.
|
||||
if (properties.remaining.isEmpty && (!actual.hasChildren || !expected.hasChildren)) {
|
||||
properties.last = properties.first
|
||||
properties.first = {actual: api.lineBuilder.buffer(), expected: api.lineBuilder.buffer()}
|
||||
}
|
||||
|
||||
const result = api.lineBuilder.buffer()
|
||||
|
||||
// Create a custom diff that is as neat as possible. It's likely
|
||||
// there's a generic algorithm that can be used, but for expediency's
|
||||
// sake handles all possible diffs by brute force instead.
|
||||
if (actual.hasProperties && expected.hasProperties) {
|
||||
if (namesAreEqual) {
|
||||
result
|
||||
.append(api.lineBuilder.first(openTag.start + actualName))
|
||||
.append(properties.first.actual.stripFlags())
|
||||
.append(properties.first.expected.stripFlags())
|
||||
} else {
|
||||
result
|
||||
.append(api.lineBuilder.actual.first(openTag.start + actualName))
|
||||
.append(properties.first.actual.stripFlags())
|
||||
.append(api.lineBuilder.expected.first(openTag.start + expectedName))
|
||||
.append(properties.first.expected.stripFlags())
|
||||
}
|
||||
result.append(properties.remaining.stripFlags())
|
||||
|
||||
if (actual.hasChildren && expected.hasChildren) {
|
||||
result
|
||||
.append(properties.last.actual.stripFlags())
|
||||
.append(properties.last.expected.stripFlags())
|
||||
.append(api.lineBuilder.line(indent + openTag.end))
|
||||
|
||||
if (namesAreEqual) {
|
||||
result
|
||||
.append(allChildren.stripFlags())
|
||||
.append(api.lineBuilder.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
|
||||
} else {
|
||||
result
|
||||
.append(children.first.actual.stripFlags())
|
||||
.append(children.first.expected.stripFlags())
|
||||
.append(children.remaining.stripFlags())
|
||||
.append(children.last.actual.stripFlags())
|
||||
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
|
||||
.append(children.last.expected.stripFlags())
|
||||
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
|
||||
}
|
||||
} else if (actual.hasChildren) {
|
||||
result
|
||||
.append(properties.last.actual.stripFlags())
|
||||
.append(api.lineBuilder.actual.line(indent + openTag.end))
|
||||
.append(allChildren.stripFlags())
|
||||
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
|
||||
.append(properties.last.expected.stripFlags())
|
||||
.append(api.lineBuilder.expected.last(indent + openTag.selfClose + openTag.end))
|
||||
} else if (expected.hasChildren) {
|
||||
result
|
||||
.append(properties.last.actual.stripFlags())
|
||||
.append(api.lineBuilder.actual.last(indent + openTag.selfClose + openTag.end))
|
||||
.append(properties.last.expected.stripFlags())
|
||||
.append(api.lineBuilder.expected.line(indent + openTag.end))
|
||||
.append(allChildren.stripFlags())
|
||||
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
|
||||
} else {
|
||||
result
|
||||
.append(properties.last.actual.stripFlags())
|
||||
.append(properties.last.expected.stripFlags())
|
||||
.append(api.lineBuilder.last(indent + openTag.selfClose + openTag.end))
|
||||
}
|
||||
} else if (actual.hasProperties) {
|
||||
result
|
||||
.append(api.lineBuilder.actual.first(openTag.start + actualName))
|
||||
.append(allProperties.stripFlags())
|
||||
|
||||
if (actual.hasChildren && expected.hasChildren) {
|
||||
result
|
||||
.append(api.lineBuilder.actual.line(indent + openTag.end))
|
||||
.append(children.first.actual.stripFlags())
|
||||
.append(api.lineBuilder.expected.first(openTag.start + expectedName + openTag.end))
|
||||
.append(children.first.expected.stripFlags())
|
||||
.append(children.remaining.stripFlags())
|
||||
|
||||
if (namesAreEqual) {
|
||||
result
|
||||
.append(children.last.actual.stripFlags())
|
||||
.append(children.last.expected.stripFlags())
|
||||
.append(api.lineBuilder.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
|
||||
} else {
|
||||
result
|
||||
.append(children.last.actual.stripFlags())
|
||||
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
|
||||
.append(children.last.expected.stripFlags())
|
||||
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
|
||||
}
|
||||
} else if (actual.hasChildren) {
|
||||
result
|
||||
.append(api.lineBuilder.actual.last(indent + openTag.selfClose + openTag.end))
|
||||
.append(allChildren.stripFlags())
|
||||
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
|
||||
.append(api.lineBuilder.expected.single(openTag.start + expectedName + openTag.selfCloseVoid + openTag.end))
|
||||
} else if (expected.hasChildren) {
|
||||
result
|
||||
.append(api.lineBuilder.actual.last(indent + openTag.selfClose + openTag.end))
|
||||
.append(api.lineBuilder.expected.first(openTag.start + expectedName + openTag.end))
|
||||
.append(allChildren.stripFlags())
|
||||
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
|
||||
} else {
|
||||
result
|
||||
.append(api.lineBuilder.actual.last(indent + openTag.selfClose + openTag.end))
|
||||
.append(api.lineBuilder.expected.single(openTag.start + expectedName + openTag.selfCloseVoid + openTag.end))
|
||||
}
|
||||
} else if (expected.hasProperties) {
|
||||
if (actual.hasChildren && expected.hasChildren) {
|
||||
result
|
||||
.append(api.lineBuilder.actual.first(openTag.start + actualName + openTag.end))
|
||||
.append(children.first.actual.stripFlags())
|
||||
.append(api.lineBuilder.expected.first(openTag.start + expectedName))
|
||||
.append(allProperties.stripFlags())
|
||||
.append(api.lineBuilder.expected.line(indent + openTag.end))
|
||||
.append(children.first.expected.stripFlags())
|
||||
.append(children.remaining.stripFlags())
|
||||
|
||||
if (namesAreEqual) {
|
||||
result
|
||||
.append(children.last.actual.stripFlags())
|
||||
.append(children.last.expected.stripFlags())
|
||||
.append(api.lineBuilder.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
|
||||
} else {
|
||||
result
|
||||
.append(children.last.actual.stripFlags())
|
||||
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
|
||||
.append(children.last.expected.stripFlags())
|
||||
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
|
||||
}
|
||||
} else if (actual.hasChildren) {
|
||||
result
|
||||
.append(api.lineBuilder.actual.first(openTag.start + actualName + openTag.end))
|
||||
.append(allChildren.stripFlags())
|
||||
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
|
||||
.append(api.lineBuilder.expected.first(openTag.start + expectedName))
|
||||
.append(allProperties.stripFlags())
|
||||
.append(api.lineBuilder.expected.last(indent + openTag.selfClose + openTag.end))
|
||||
} else if (expected.hasChildren) {
|
||||
result
|
||||
.append(api.lineBuilder.actual.single(openTag.start + actualName + openTag.selfCloseVoid + openTag.end))
|
||||
.append(api.lineBuilder.expected.first(openTag.start + expectedName))
|
||||
.append(allProperties.stripFlags())
|
||||
.append(api.lineBuilder.expected.line(indent + openTag.end))
|
||||
.append(allChildren.stripFlags())
|
||||
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
|
||||
} else {
|
||||
result
|
||||
.append(api.lineBuilder.actual.single(openTag.start + actualName + openTag.selfCloseVoid + openTag.end))
|
||||
.append(api.lineBuilder.expected.first(openTag.start + expectedName))
|
||||
.append(allProperties.stripFlags())
|
||||
.append(api.lineBuilder.expected.last(indent + openTag.selfCloseVoid + openTag.end))
|
||||
}
|
||||
} else {
|
||||
if (actual.hasChildren && expected.hasChildren) {
|
||||
if (namesAreEqual) {
|
||||
result
|
||||
.append(api.lineBuilder.first(openTag.start + actualName + openTag.end))
|
||||
.append(allChildren.stripFlags())
|
||||
.append(api.lineBuilder.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
|
||||
} else {
|
||||
result
|
||||
.append(api.lineBuilder.actual.first(openTag.start + actualName + openTag.end))
|
||||
.append(children.first.actual.stripFlags())
|
||||
.append(api.lineBuilder.expected.first(openTag.start + expectedName + openTag.end))
|
||||
.append(children.first.expected.stripFlags())
|
||||
.append(children.remaining.stripFlags())
|
||||
.append(children.last.actual.stripFlags())
|
||||
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
|
||||
.append(children.last.expected.stripFlags())
|
||||
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, expectedName)))
|
||||
}
|
||||
} else if (actual.hasChildren) {
|
||||
result
|
||||
.append(api.lineBuilder.actual.first(openTag.start + actualName + openTag.end))
|
||||
.append(allChildren.stripFlags())
|
||||
.append(api.lineBuilder.actual.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
|
||||
.append(api.lineBuilder.expected.single(openTag.start + expectedName + openTag.selfCloseVoid + openTag.end))
|
||||
} else if (expected.hasChildren) {
|
||||
result
|
||||
.append(api.lineBuilder.actual.single(openTag.start + actualName + openTag.selfCloseVoid + openTag.end))
|
||||
.append(api.lineBuilder.expected.first(openTag.start + expectedName + openTag.end))
|
||||
.append(allChildren.stripFlags())
|
||||
.append(api.lineBuilder.expected.last(indent + api.wrapFromTheme(theme.react.closeTag, actualName)))
|
||||
} else {
|
||||
if (namesAreEqual) {
|
||||
result.append(api.lineBuilder.single(openTag.start + actualName + openTag.selfCloseVoid + openTag.end))
|
||||
} else {
|
||||
result
|
||||
.append(api.lineBuilder.actual.single(openTag.start + actualName + openTag.selfCloseVoid + openTag.end))
|
||||
.append(api.lineBuilder.expected.single(openTag.start + expectedName + openTag.selfCloseVoid + openTag.end))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
|
||||
shouldFormat (subject) {
|
||||
return subject.isItem === true || subject.isProperty === true
|
||||
},
|
||||
|
||||
increaseIndent: true
|
||||
}
|
||||
}
|
||||
module.exports = diffShallow
|
||||
353
node_modules/@concordance/react/lib/elementFactory.js
generated
vendored
353
node_modules/@concordance/react/lib/elementFactory.js
generated
vendored
|
|
@ -1,353 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
const arrify = require('arrify')
|
||||
const diffShallow = require('./diffShallow')
|
||||
const escapeText = require('./escapeText')
|
||||
|
||||
const FRAGMENT_NAME = Symbol.for('react.fragment')
|
||||
|
||||
function factory (api, reactTags) {
|
||||
const tag = Symbol('@concordance/react.ElementValue')
|
||||
|
||||
function customPropertyFormatter (theme, indent, key, value) {
|
||||
const separator = theme.react.attribute.separator + theme.react.attribute.value.openBracket
|
||||
if (value.isSingle) {
|
||||
return value
|
||||
.withFirstPrefixed(key.formatAsKey(theme) + separator)
|
||||
.withLastPostfixed(theme.react.attribute.value.closeBracket)
|
||||
}
|
||||
|
||||
return api.lineBuilder.first(key.formatAsKey(theme) + separator)
|
||||
.concat(value.withFirstPrefixed(indent.increase()).stripFlags())
|
||||
.append(api.lineBuilder.last(indent + theme.react.attribute.value.closeBracket))
|
||||
}
|
||||
|
||||
function themeProperty (theme) {
|
||||
theme.property.increaseValueIndent = true
|
||||
theme.property.customFormat = customPropertyFormatter
|
||||
}
|
||||
|
||||
function themeStringProperty (theme) {
|
||||
theme.property.separator = theme.react.attribute.separator
|
||||
theme.property.after = ''
|
||||
Object.assign(theme.string.line, theme.react.attribute.value.string.line)
|
||||
}
|
||||
|
||||
function customItemFormatter (theme, indent, value) {
|
||||
if (value.isSingle) {
|
||||
return value
|
||||
.withFirstPrefixed(theme.react.child.openBracket)
|
||||
.withLastPostfixed(theme.react.child.closeBracket)
|
||||
}
|
||||
|
||||
return api.lineBuilder.first(theme.react.child.openBracket)
|
||||
.concat(value.withFirstPrefixed(indent.increase()).stripFlags())
|
||||
.append(api.lineBuilder.last(indent + theme.react.child.closeBracket))
|
||||
}
|
||||
|
||||
function themeChild (theme) {
|
||||
theme.item.increaseValueIndent = true
|
||||
theme.item.customFormat = customItemFormatter
|
||||
}
|
||||
|
||||
function themeReactChild (theme) {
|
||||
theme.item.after = ''
|
||||
}
|
||||
|
||||
function themeStringChild (theme) {
|
||||
theme.item.after = ''
|
||||
Object.assign(theme.string, theme.react.child.string)
|
||||
}
|
||||
|
||||
function describe (props) {
|
||||
const element = props.value
|
||||
|
||||
const type = element.type
|
||||
const hasTypeFn = typeof type === 'function'
|
||||
const typeFn = hasTypeFn ? type : null
|
||||
const name = hasTypeFn ? type.displayName || type.name : type
|
||||
|
||||
const children = arrify(element.props.children)
|
||||
|
||||
const properties = Object.assign({}, element.props)
|
||||
delete properties.children
|
||||
if (element.key !== null) {
|
||||
properties.key = element.key
|
||||
}
|
||||
const hasProperties = Object.keys(properties).length > 0
|
||||
|
||||
return new DescribedElementValue(Object.assign({
|
||||
children,
|
||||
hasProperties,
|
||||
hasTypeFn,
|
||||
name,
|
||||
properties,
|
||||
typeFn,
|
||||
isList: children.length > 0
|
||||
}, props))
|
||||
}
|
||||
|
||||
function deserialize (state, recursor) {
|
||||
return new DeserializedElementValue(state, recursor)
|
||||
}
|
||||
|
||||
class ElementValue extends api.ObjectValue {
|
||||
constructor (props) {
|
||||
super(props)
|
||||
this.isFragment = props.name === FRAGMENT_NAME
|
||||
this.name = props.name
|
||||
this.hasProperties = props.hasProperties
|
||||
this.hasTypeFn = props.hasTypeFn
|
||||
|
||||
this.hasChildren = this.isList
|
||||
}
|
||||
|
||||
compare (expected) {
|
||||
return this.tag === expected.tag && this.name === expected.name
|
||||
? api.SHALLOW_EQUAL
|
||||
: api.UNEQUAL
|
||||
}
|
||||
|
||||
formatName (theme) {
|
||||
const formatted = api.wrapFromTheme(theme.react.tagName, this.isFragment ? 'React.Fragment' : this.name)
|
||||
return this.hasTypeFn
|
||||
? formatted + theme.react.functionType
|
||||
: formatted
|
||||
}
|
||||
|
||||
compareNames (expected) {
|
||||
return this.name === expected.name && this.hasTypeFn === expected.hasTypeFn
|
||||
}
|
||||
|
||||
formatShallow (theme, indent) {
|
||||
const childBuffer = api.lineBuilder.buffer()
|
||||
const propertyBuffer = api.lineBuilder.buffer()
|
||||
|
||||
return {
|
||||
append (formatted, origin) {
|
||||
if (origin.isItem === true) {
|
||||
childBuffer.append(formatted)
|
||||
} else {
|
||||
propertyBuffer.append(formatted)
|
||||
}
|
||||
},
|
||||
|
||||
finalize: () => {
|
||||
const name = this.formatName(theme)
|
||||
const openTag = theme.react.openTag
|
||||
|
||||
if (!this.hasChildren && !this.hasProperties) {
|
||||
return api.lineBuilder.single(openTag.start + name + openTag.selfCloseVoid + openTag.end)
|
||||
}
|
||||
|
||||
const innerIndentation = indent.increase()
|
||||
const children = childBuffer.withFirstPrefixed(innerIndentation).stripFlags()
|
||||
const properties = propertyBuffer.withFirstPrefixed(innerIndentation).stripFlags()
|
||||
|
||||
const result = api.lineBuilder.buffer()
|
||||
if (this.hasProperties) {
|
||||
result
|
||||
.append(api.lineBuilder.first(openTag.start + name))
|
||||
.append(properties)
|
||||
|
||||
if (this.hasChildren) {
|
||||
result.append(api.lineBuilder.line(indent + openTag.end))
|
||||
} else {
|
||||
result.append(api.lineBuilder.last(indent + openTag.selfClose + openTag.end))
|
||||
}
|
||||
} else {
|
||||
result.append(api.lineBuilder.first(openTag.start + name + openTag.end))
|
||||
}
|
||||
|
||||
if (this.hasChildren) {
|
||||
result
|
||||
.append(children)
|
||||
.append(api.lineBuilder.last(indent + api.wrapFromTheme(theme.react.closeTag, name)))
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
|
||||
maxDepth: () => {
|
||||
const name = this.formatName(theme)
|
||||
const openTag = theme.react.openTag
|
||||
|
||||
if (!this.hasChildren && !this.hasProperties) {
|
||||
return api.lineBuilder.single(openTag.start + name + openTag.selfCloseVoid + openTag.end)
|
||||
}
|
||||
|
||||
let str = openTag.start + name
|
||||
if (this.hasProperties) {
|
||||
str += theme.maxDepth
|
||||
if (this.hasChildren) {
|
||||
str += openTag.end
|
||||
} else {
|
||||
str += ' ' + openTag.selfClose + openTag.end
|
||||
}
|
||||
} else {
|
||||
str += openTag.end
|
||||
}
|
||||
|
||||
if (this.hasChildren) {
|
||||
str += theme.maxDepth + api.wrapFromTheme(theme.react.closeTag, name)
|
||||
}
|
||||
|
||||
return api.lineBuilder.single(str)
|
||||
},
|
||||
|
||||
shouldFormat (subject) {
|
||||
return subject.isItem === true || subject.isProperty === true
|
||||
},
|
||||
|
||||
increaseIndent: true
|
||||
}
|
||||
}
|
||||
|
||||
prepareDiff (expected) {
|
||||
return {
|
||||
compareResult: this.tag === expected.tag
|
||||
? api.SHALLOW_EQUAL
|
||||
: api.UNEQUAL
|
||||
}
|
||||
}
|
||||
|
||||
diffShallow (expected, theme, indent) {
|
||||
return diffShallow(api, this, expected, theme, indent)
|
||||
}
|
||||
|
||||
serialize () {
|
||||
return [this.isFragment, this.isFragment ? null : this.name, this.hasProperties, this.hasTypeFn, super.serialize()]
|
||||
}
|
||||
}
|
||||
Object.defineProperty(ElementValue.prototype, 'tag', {value: tag})
|
||||
|
||||
function modifyThemes (recursor) {
|
||||
return api.mapRecursor(recursor, next => {
|
||||
let modifier
|
||||
if (next.isItem === true) {
|
||||
if (next.tag === api.descriptorTags.primitiveItem && next.value.tag === api.descriptorTags.string) {
|
||||
modifier = themeStringChild
|
||||
} else if (next.tag === api.descriptorTags.complexItem && reactTags.has(next.value.tag)) {
|
||||
modifier = themeReactChild
|
||||
} else {
|
||||
modifier = themeChild
|
||||
}
|
||||
} else if (next.isProperty === true) {
|
||||
if (
|
||||
next.tag === api.descriptorTags.primitiveProperty &&
|
||||
next.value.tag === api.descriptorTags.string &&
|
||||
!next.value.includesLinebreaks
|
||||
) {
|
||||
modifier = themeStringProperty
|
||||
} else {
|
||||
modifier = themeProperty
|
||||
}
|
||||
}
|
||||
|
||||
return modifier
|
||||
? api.modifyTheme(next, modifier)
|
||||
: next
|
||||
})
|
||||
}
|
||||
|
||||
function DescribedMixin (base) {
|
||||
return class extends api.DescribedMixin(base) {
|
||||
constructor (props) {
|
||||
super(props)
|
||||
this.children = props.children
|
||||
this.properties = props.properties
|
||||
this.typeFn = props.typeFn
|
||||
}
|
||||
|
||||
compare (expected) {
|
||||
const result = super.compare(expected)
|
||||
return result === api.SHALLOW_EQUAL && this.typeFn !== expected.typeFn
|
||||
? api.UNEQUAL
|
||||
: result
|
||||
}
|
||||
|
||||
compareNames (expected) {
|
||||
return super.compareNames(expected) && this.typeFn === expected.typeFn
|
||||
}
|
||||
|
||||
createPropertyRecursor () {
|
||||
// Symbols are not valid property keys for React elements. This code
|
||||
// also assumes that the keys can be formatted as JSX-like attribute
|
||||
// names. Keys are not pre-escaped before being passed to Concordance's
|
||||
// property descriptor.
|
||||
const keys = Object.keys(this.properties).sort()
|
||||
const size = keys.length
|
||||
|
||||
let index = 0
|
||||
const next = () => {
|
||||
if (index === size) return null
|
||||
|
||||
const key = keys[index++]
|
||||
// Note that string values are not specifically escaped such that the
|
||||
// output is valid JSX.
|
||||
return this.describeProperty(key, this.describeAny(this.properties[key]))
|
||||
}
|
||||
|
||||
return {size, next}
|
||||
}
|
||||
|
||||
createListRecursor () {
|
||||
if (!this.isList) return super.createListRecursor()
|
||||
|
||||
const size = this.children.length
|
||||
|
||||
let index = 0
|
||||
const next = () => {
|
||||
if (index === size) return null
|
||||
|
||||
const current = index++
|
||||
const child = this.children[current]
|
||||
const type = typeof child
|
||||
let descriptor
|
||||
if (type === 'string') {
|
||||
descriptor = this.describeAny(escapeText(child))
|
||||
} else {
|
||||
descriptor = this.describeAny(child)
|
||||
}
|
||||
|
||||
return this.describeItem(current, descriptor)
|
||||
}
|
||||
|
||||
return {size, next}
|
||||
}
|
||||
|
||||
createRecursor () {
|
||||
return modifyThemes(super.createRecursor())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function DeserializedMixin (base) {
|
||||
return class extends api.DeserializedMixin(base) {
|
||||
constructor (state, recursor) {
|
||||
super(state[4], recursor)
|
||||
this.isFragment = state[0]
|
||||
this.name = this.isFragment ? FRAGMENT_NAME : state[1]
|
||||
this.hasProperties = state[2]
|
||||
this.hasTypeFn = state[3]
|
||||
}
|
||||
|
||||
createRecursor () {
|
||||
return modifyThemes(super.createRecursor())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const DescribedElementValue = DescribedMixin(ElementValue)
|
||||
const DeserializedElementValue = DeserializedMixin(ElementValue)
|
||||
|
||||
return {
|
||||
DescribedMixin,
|
||||
DeserializedMixin,
|
||||
ElementValue,
|
||||
describe,
|
||||
deserialize,
|
||||
tag
|
||||
}
|
||||
}
|
||||
module.exports = factory
|
||||
10
node_modules/@concordance/react/lib/escapeText.js
generated
vendored
10
node_modules/@concordance/react/lib/escapeText.js
generated
vendored
|
|
@ -1,10 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
function escapeText (text) {
|
||||
return text
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
// TODO: Escape characters that Concordance would otherwise replace with \u
|
||||
// sequences.
|
||||
}
|
||||
module.exports = escapeText
|
||||
59
node_modules/@concordance/react/lib/testJsonFactory.js
generated
vendored
59
node_modules/@concordance/react/lib/testJsonFactory.js
generated
vendored
|
|
@ -1,59 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
const arrify = require('arrify')
|
||||
|
||||
function factory (api, element) {
|
||||
const tag = Symbol('@concordance/react.TestJsonValue')
|
||||
|
||||
function describe (props) {
|
||||
const obj = props.value
|
||||
|
||||
const name = obj.type
|
||||
const children = arrify(obj.children)
|
||||
const properties = Object.assign({}, obj.props)
|
||||
const hasProperties = Object.keys(properties).length > 0
|
||||
|
||||
return new DescribedTestJsonValue(Object.assign({
|
||||
children,
|
||||
hasProperties,
|
||||
hasTypeFn: false,
|
||||
name,
|
||||
properties,
|
||||
typeFn: null,
|
||||
isList: children.length > 0
|
||||
}, props))
|
||||
}
|
||||
|
||||
function deserialize (state, recursor) {
|
||||
return new DeserializedTestJsonValue(state, recursor)
|
||||
}
|
||||
|
||||
class TestJsonValue extends element.ElementValue {
|
||||
compare (expected) {
|
||||
// Allow expected value to be a React element.
|
||||
return (this.tag === expected.tag || expected.tag === element.tag) && this.name === expected.name
|
||||
? api.SHALLOW_EQUAL
|
||||
: api.UNEQUAL
|
||||
}
|
||||
|
||||
prepareDiff (expected) {
|
||||
return {
|
||||
// Allow expected value to be a React element.
|
||||
compareResult: this.tag === expected.tag || expected.tag === element.tag
|
||||
? api.SHALLOW_EQUAL
|
||||
: api.UNEQUAL
|
||||
}
|
||||
}
|
||||
}
|
||||
Object.defineProperty(TestJsonValue.prototype, 'tag', {value: tag})
|
||||
|
||||
const DescribedTestJsonValue = element.DescribedMixin(TestJsonValue)
|
||||
const DeserializedTestJsonValue = element.DeserializedMixin(TestJsonValue)
|
||||
|
||||
return {
|
||||
describe,
|
||||
deserialize,
|
||||
tag
|
||||
}
|
||||
}
|
||||
module.exports = factory
|
||||
8
node_modules/@concordance/react/node_modules/arrify/index.js
generated
vendored
8
node_modules/@concordance/react/node_modules/arrify/index.js
generated
vendored
|
|
@ -1,8 +0,0 @@
|
|||
'use strict';
|
||||
module.exports = function (val) {
|
||||
if (val === null || val === undefined) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return Array.isArray(val) ? val : [val];
|
||||
};
|
||||
21
node_modules/@concordance/react/node_modules/arrify/license
generated
vendored
21
node_modules/@concordance/react/node_modules/arrify/license
generated
vendored
|
|
@ -1,21 +0,0 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
36
node_modules/@concordance/react/node_modules/arrify/readme.md
generated
vendored
36
node_modules/@concordance/react/node_modules/arrify/readme.md
generated
vendored
|
|
@ -1,36 +0,0 @@
|
|||
# arrify [](https://travis-ci.org/sindresorhus/arrify)
|
||||
|
||||
> Convert a value to an array
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install --save arrify
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const arrify = require('arrify');
|
||||
|
||||
arrify('unicorn');
|
||||
//=> ['unicorn']
|
||||
|
||||
arrify(['unicorn']);
|
||||
//=> ['unicorn']
|
||||
|
||||
arrify(null);
|
||||
//=> []
|
||||
|
||||
arrify(undefined);
|
||||
//=> []
|
||||
```
|
||||
|
||||
*Supplying `null` or `undefined` results in an empty array.*
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](http://sindresorhus.com)
|
||||
75
node_modules/@concordance/react/package.json
generated
vendored
75
node_modules/@concordance/react/package.json
generated
vendored
|
|
@ -1,75 +0,0 @@
|
|||
{
|
||||
"name": "@concordance/react",
|
||||
"version": "2.0.0",
|
||||
"description": "Compare, format, diff and serialize React trees with Concordance",
|
||||
"main": "index.js",
|
||||
"files": [
|
||||
"lib",
|
||||
"index.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6.12.3 <7 || >=8.9.4 <9 || >=10.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build:fixtures": "babel --presets=module:@babel/preset-react,module:ava/stage-4 --out-dir=test/fixtures/react --extensions=.jsx test/fixtures/react",
|
||||
"lint": "as-i-preach",
|
||||
"pretest": "npm run -s build:fixtures",
|
||||
"test": "npm run -s lint && nyc ava"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/concordancejs/react.git"
|
||||
},
|
||||
"author": "Mark Wubben (https://novemberborn.net/)",
|
||||
"license": "ISC",
|
||||
"bugs": {
|
||||
"url": "https://github.com/concordancejs/react/issues"
|
||||
},
|
||||
"homepage": "https://github.com/concordancejs/react#readme",
|
||||
"keywords": [
|
||||
"concordance-plugin",
|
||||
"concordance",
|
||||
"react"
|
||||
],
|
||||
"dependencies": {
|
||||
"arrify": "^1.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.1.0",
|
||||
"@babel/core": "^7.1.0",
|
||||
"@babel/preset-react": "^7.0.0",
|
||||
"@novemberborn/as-i-preach": "^10.1.0",
|
||||
"ava": "1.0.0-beta.8",
|
||||
"codecov": "^3.1.0",
|
||||
"concordance": "^4.0.0",
|
||||
"nyc": "^13.0.1",
|
||||
"react": "^16.5.2",
|
||||
"react-test-renderer": "^16.5.2"
|
||||
},
|
||||
"as-i-preach": {
|
||||
"allowDevDependencies": [
|
||||
"test/**/*.js",
|
||||
"test/**/*.jsx"
|
||||
],
|
||||
"ignore": [
|
||||
"test/fixtures/react/*.js"
|
||||
]
|
||||
},
|
||||
"ava": {
|
||||
"babel": {
|
||||
"testOptions": {
|
||||
"presets": [
|
||||
"module:@babel/preset-react"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"nyc": {
|
||||
"reporter": [
|
||||
"html",
|
||||
"lcov",
|
||||
"text"
|
||||
]
|
||||
},
|
||||
"standard-engine": "@novemberborn/as-i-preach"
|
||||
}
|
||||
132
node_modules/@sindresorhus/is/dist/index.d.ts
generated
vendored
132
node_modules/@sindresorhus/is/dist/index.d.ts
generated
vendored
|
|
@ -1,132 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
/// <reference lib="es2016" />
|
||||
/// <reference lib="es2017.sharedmemory" />
|
||||
/// <reference lib="esnext.asynciterable" />
|
||||
/// <reference lib="dom" />
|
||||
declare type TypedArray = Int8Array | Uint8Array | Uint8ClampedArray | Int16Array | Uint16Array | Int32Array | Uint32Array | Float32Array | Float64Array;
|
||||
declare type Primitive = null | undefined | string | number | boolean | Symbol;
|
||||
export interface ArrayLike {
|
||||
length: number;
|
||||
}
|
||||
export interface Class<T = unknown> {
|
||||
new (...args: any[]): T;
|
||||
}
|
||||
declare type DomElement = object & {
|
||||
nodeType: 1;
|
||||
nodeName: string;
|
||||
};
|
||||
declare type NodeStream = object & {
|
||||
pipe: Function;
|
||||
};
|
||||
export declare const enum TypeName {
|
||||
null = "null",
|
||||
boolean = "boolean",
|
||||
undefined = "undefined",
|
||||
string = "string",
|
||||
number = "number",
|
||||
symbol = "symbol",
|
||||
Function = "Function",
|
||||
GeneratorFunction = "GeneratorFunction",
|
||||
AsyncFunction = "AsyncFunction",
|
||||
Observable = "Observable",
|
||||
Array = "Array",
|
||||
Buffer = "Buffer",
|
||||
Object = "Object",
|
||||
RegExp = "RegExp",
|
||||
Date = "Date",
|
||||
Error = "Error",
|
||||
Map = "Map",
|
||||
Set = "Set",
|
||||
WeakMap = "WeakMap",
|
||||
WeakSet = "WeakSet",
|
||||
Int8Array = "Int8Array",
|
||||
Uint8Array = "Uint8Array",
|
||||
Uint8ClampedArray = "Uint8ClampedArray",
|
||||
Int16Array = "Int16Array",
|
||||
Uint16Array = "Uint16Array",
|
||||
Int32Array = "Int32Array",
|
||||
Uint32Array = "Uint32Array",
|
||||
Float32Array = "Float32Array",
|
||||
Float64Array = "Float64Array",
|
||||
ArrayBuffer = "ArrayBuffer",
|
||||
SharedArrayBuffer = "SharedArrayBuffer",
|
||||
DataView = "DataView",
|
||||
Promise = "Promise",
|
||||
URL = "URL"
|
||||
}
|
||||
declare function is(value: unknown): TypeName;
|
||||
declare namespace is {
|
||||
const undefined: (value: unknown) => value is undefined;
|
||||
const string: (value: unknown) => value is string;
|
||||
const number: (value: unknown) => value is number;
|
||||
const function_: (value: unknown) => value is Function;
|
||||
const null_: (value: unknown) => value is null;
|
||||
const class_: (value: unknown) => value is Class<unknown>;
|
||||
const boolean: (value: unknown) => value is boolean;
|
||||
const symbol: (value: unknown) => value is Symbol;
|
||||
const numericString: (value: unknown) => boolean;
|
||||
const array: (arg: any) => arg is any[];
|
||||
const buffer: (input: unknown) => input is Buffer;
|
||||
const nullOrUndefined: (value: unknown) => value is null | undefined;
|
||||
const object: (value: unknown) => value is object;
|
||||
const iterable: (value: unknown) => value is IterableIterator<unknown>;
|
||||
const asyncIterable: (value: unknown) => value is AsyncIterableIterator<unknown>;
|
||||
const generator: (value: unknown) => value is Generator;
|
||||
const nativePromise: (value: unknown) => value is Promise<unknown>;
|
||||
const promise: (value: unknown) => value is Promise<unknown>;
|
||||
const generatorFunction: (value: unknown) => value is GeneratorFunction;
|
||||
const asyncFunction: (value: unknown) => value is Function;
|
||||
const boundFunction: (value: unknown) => value is Function;
|
||||
const regExp: (value: unknown) => value is RegExp;
|
||||
const date: (value: unknown) => value is Date;
|
||||
const error: (value: unknown) => value is Error;
|
||||
const map: (value: unknown) => value is Map<unknown, unknown>;
|
||||
const set: (value: unknown) => value is Set<unknown>;
|
||||
const weakMap: (value: unknown) => value is WeakMap<object, unknown>;
|
||||
const weakSet: (value: unknown) => value is WeakSet<object>;
|
||||
const int8Array: (value: unknown) => value is Int8Array;
|
||||
const uint8Array: (value: unknown) => value is Uint8Array;
|
||||
const uint8ClampedArray: (value: unknown) => value is Uint8ClampedArray;
|
||||
const int16Array: (value: unknown) => value is Int16Array;
|
||||
const uint16Array: (value: unknown) => value is Uint16Array;
|
||||
const int32Array: (value: unknown) => value is Int32Array;
|
||||
const uint32Array: (value: unknown) => value is Uint32Array;
|
||||
const float32Array: (value: unknown) => value is Float32Array;
|
||||
const float64Array: (value: unknown) => value is Float64Array;
|
||||
const arrayBuffer: (value: unknown) => value is ArrayBuffer;
|
||||
const sharedArrayBuffer: (value: unknown) => value is SharedArrayBuffer;
|
||||
const dataView: (value: unknown) => value is DataView;
|
||||
const directInstanceOf: <T>(instance: unknown, klass: Class<T>) => instance is T;
|
||||
const urlInstance: (value: unknown) => value is URL;
|
||||
const urlString: (value: unknown) => boolean;
|
||||
const truthy: (value: unknown) => boolean;
|
||||
const falsy: (value: unknown) => boolean;
|
||||
const nan: (value: unknown) => boolean;
|
||||
const primitive: (value: unknown) => value is Primitive;
|
||||
const integer: (value: unknown) => value is number;
|
||||
const safeInteger: (value: unknown) => value is number;
|
||||
const plainObject: (value: unknown) => boolean;
|
||||
const typedArray: (value: unknown) => value is TypedArray;
|
||||
const arrayLike: (value: unknown) => value is ArrayLike;
|
||||
const inRange: (value: number, range: number | number[]) => boolean;
|
||||
const domElement: (value: unknown) => value is DomElement;
|
||||
const observable: (value: unknown) => boolean;
|
||||
const nodeStream: (value: unknown) => value is NodeStream;
|
||||
const infinite: (value: unknown) => boolean;
|
||||
const even: (value: number) => boolean;
|
||||
const odd: (value: number) => boolean;
|
||||
const emptyArray: (value: unknown) => boolean;
|
||||
const nonEmptyArray: (value: unknown) => boolean;
|
||||
const emptyString: (value: unknown) => boolean;
|
||||
const nonEmptyString: (value: unknown) => boolean;
|
||||
const emptyStringOrWhitespace: (value: unknown) => boolean;
|
||||
const emptyObject: (value: unknown) => boolean;
|
||||
const nonEmptyObject: (value: unknown) => boolean;
|
||||
const emptySet: (value: unknown) => boolean;
|
||||
const nonEmptySet: (value: unknown) => boolean;
|
||||
const emptyMap: (value: unknown) => boolean;
|
||||
const nonEmptyMap: (value: unknown) => boolean;
|
||||
const any: (predicate: unknown, ...values: unknown[]) => boolean;
|
||||
const all: (predicate: unknown, ...values: unknown[]) => boolean;
|
||||
}
|
||||
export default is;
|
||||
245
node_modules/@sindresorhus/is/dist/index.js
generated
vendored
245
node_modules/@sindresorhus/is/dist/index.js
generated
vendored
|
|
@ -1,245 +0,0 @@
|
|||
"use strict";
|
||||
/// <reference lib="es2016"/>
|
||||
/// <reference lib="es2017.sharedmemory"/>
|
||||
/// <reference lib="esnext.asynciterable"/>
|
||||
/// <reference lib="dom"/>
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// TODO: Use the `URL` global when targeting Node.js 10
|
||||
// tslint:disable-next-line
|
||||
const URLGlobal = typeof URL === 'undefined' ? require('url').URL : URL;
|
||||
const toString = Object.prototype.toString;
|
||||
const isOfType = (type) => (value) => typeof value === type;
|
||||
const isBuffer = (input) => !is.nullOrUndefined(input) && !is.nullOrUndefined(input.constructor) && is.function_(input.constructor.isBuffer) && input.constructor.isBuffer(input);
|
||||
const getObjectType = (value) => {
|
||||
const objectName = toString.call(value).slice(8, -1);
|
||||
if (objectName) {
|
||||
return objectName;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
const isObjectOfType = (type) => (value) => getObjectType(value) === type;
|
||||
function is(value) {
|
||||
switch (value) {
|
||||
case null:
|
||||
return "null" /* null */;
|
||||
case true:
|
||||
case false:
|
||||
return "boolean" /* boolean */;
|
||||
default:
|
||||
}
|
||||
switch (typeof value) {
|
||||
case 'undefined':
|
||||
return "undefined" /* undefined */;
|
||||
case 'string':
|
||||
return "string" /* string */;
|
||||
case 'number':
|
||||
return "number" /* number */;
|
||||
case 'symbol':
|
||||
return "symbol" /* symbol */;
|
||||
default:
|
||||
}
|
||||
if (is.function_(value)) {
|
||||
return "Function" /* Function */;
|
||||
}
|
||||
if (is.observable(value)) {
|
||||
return "Observable" /* Observable */;
|
||||
}
|
||||
if (Array.isArray(value)) {
|
||||
return "Array" /* Array */;
|
||||
}
|
||||
if (isBuffer(value)) {
|
||||
return "Buffer" /* Buffer */;
|
||||
}
|
||||
const tagType = getObjectType(value);
|
||||
if (tagType) {
|
||||
return tagType;
|
||||
}
|
||||
if (value instanceof String || value instanceof Boolean || value instanceof Number) {
|
||||
throw new TypeError('Please don\'t use object wrappers for primitive types');
|
||||
}
|
||||
return "Object" /* Object */;
|
||||
}
|
||||
(function (is) {
|
||||
// tslint:disable-next-line:strict-type-predicates
|
||||
const isObject = (value) => typeof value === 'object';
|
||||
// tslint:disable:variable-name
|
||||
is.undefined = isOfType('undefined');
|
||||
is.string = isOfType('string');
|
||||
is.number = isOfType('number');
|
||||
is.function_ = isOfType('function');
|
||||
// tslint:disable-next-line:strict-type-predicates
|
||||
is.null_ = (value) => value === null;
|
||||
is.class_ = (value) => is.function_(value) && value.toString().startsWith('class ');
|
||||
is.boolean = (value) => value === true || value === false;
|
||||
is.symbol = isOfType('symbol');
|
||||
// tslint:enable:variable-name
|
||||
is.numericString = (value) => is.string(value) && value.length > 0 && !Number.isNaN(Number(value));
|
||||
is.array = Array.isArray;
|
||||
is.buffer = isBuffer;
|
||||
is.nullOrUndefined = (value) => is.null_(value) || is.undefined(value);
|
||||
is.object = (value) => !is.nullOrUndefined(value) && (is.function_(value) || isObject(value));
|
||||
is.iterable = (value) => !is.nullOrUndefined(value) && is.function_(value[Symbol.iterator]);
|
||||
is.asyncIterable = (value) => !is.nullOrUndefined(value) && is.function_(value[Symbol.asyncIterator]);
|
||||
is.generator = (value) => is.iterable(value) && is.function_(value.next) && is.function_(value.throw);
|
||||
is.nativePromise = (value) => isObjectOfType("Promise" /* Promise */)(value);
|
||||
const hasPromiseAPI = (value) => !is.null_(value) &&
|
||||
isObject(value) &&
|
||||
is.function_(value.then) &&
|
||||
is.function_(value.catch);
|
||||
is.promise = (value) => is.nativePromise(value) || hasPromiseAPI(value);
|
||||
is.generatorFunction = isObjectOfType("GeneratorFunction" /* GeneratorFunction */);
|
||||
is.asyncFunction = isObjectOfType("AsyncFunction" /* AsyncFunction */);
|
||||
is.boundFunction = (value) => is.function_(value) && !value.hasOwnProperty('prototype');
|
||||
is.regExp = isObjectOfType("RegExp" /* RegExp */);
|
||||
is.date = isObjectOfType("Date" /* Date */);
|
||||
is.error = isObjectOfType("Error" /* Error */);
|
||||
is.map = (value) => isObjectOfType("Map" /* Map */)(value);
|
||||
is.set = (value) => isObjectOfType("Set" /* Set */)(value);
|
||||
is.weakMap = (value) => isObjectOfType("WeakMap" /* WeakMap */)(value);
|
||||
is.weakSet = (value) => isObjectOfType("WeakSet" /* WeakSet */)(value);
|
||||
is.int8Array = isObjectOfType("Int8Array" /* Int8Array */);
|
||||
is.uint8Array = isObjectOfType("Uint8Array" /* Uint8Array */);
|
||||
is.uint8ClampedArray = isObjectOfType("Uint8ClampedArray" /* Uint8ClampedArray */);
|
||||
is.int16Array = isObjectOfType("Int16Array" /* Int16Array */);
|
||||
is.uint16Array = isObjectOfType("Uint16Array" /* Uint16Array */);
|
||||
is.int32Array = isObjectOfType("Int32Array" /* Int32Array */);
|
||||
is.uint32Array = isObjectOfType("Uint32Array" /* Uint32Array */);
|
||||
is.float32Array = isObjectOfType("Float32Array" /* Float32Array */);
|
||||
is.float64Array = isObjectOfType("Float64Array" /* Float64Array */);
|
||||
is.arrayBuffer = isObjectOfType("ArrayBuffer" /* ArrayBuffer */);
|
||||
is.sharedArrayBuffer = isObjectOfType("SharedArrayBuffer" /* SharedArrayBuffer */);
|
||||
is.dataView = isObjectOfType("DataView" /* DataView */);
|
||||
is.directInstanceOf = (instance, klass) => Object.getPrototypeOf(instance) === klass.prototype;
|
||||
is.urlInstance = (value) => isObjectOfType("URL" /* URL */)(value);
|
||||
is.urlString = (value) => {
|
||||
if (!is.string(value)) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
new URLGlobal(value); // tslint:disable-line no-unused-expression
|
||||
return true;
|
||||
}
|
||||
catch (_a) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
is.truthy = (value) => Boolean(value);
|
||||
is.falsy = (value) => !value;
|
||||
is.nan = (value) => Number.isNaN(value);
|
||||
const primitiveTypes = new Set([
|
||||
'undefined',
|
||||
'string',
|
||||
'number',
|
||||
'boolean',
|
||||
'symbol'
|
||||
]);
|
||||
is.primitive = (value) => is.null_(value) || primitiveTypes.has(typeof value);
|
||||
is.integer = (value) => Number.isInteger(value);
|
||||
is.safeInteger = (value) => Number.isSafeInteger(value);
|
||||
is.plainObject = (value) => {
|
||||
// From: https://github.com/sindresorhus/is-plain-obj/blob/master/index.js
|
||||
let prototype;
|
||||
return getObjectType(value) === "Object" /* Object */ &&
|
||||
(prototype = Object.getPrototypeOf(value), prototype === null || // tslint:disable-line:ban-comma-operator
|
||||
prototype === Object.getPrototypeOf({}));
|
||||
};
|
||||
const typedArrayTypes = new Set([
|
||||
"Int8Array" /* Int8Array */,
|
||||
"Uint8Array" /* Uint8Array */,
|
||||
"Uint8ClampedArray" /* Uint8ClampedArray */,
|
||||
"Int16Array" /* Int16Array */,
|
||||
"Uint16Array" /* Uint16Array */,
|
||||
"Int32Array" /* Int32Array */,
|
||||
"Uint32Array" /* Uint32Array */,
|
||||
"Float32Array" /* Float32Array */,
|
||||
"Float64Array" /* Float64Array */
|
||||
]);
|
||||
is.typedArray = (value) => {
|
||||
const objectType = getObjectType(value);
|
||||
if (objectType === null) {
|
||||
return false;
|
||||
}
|
||||
return typedArrayTypes.has(objectType);
|
||||
};
|
||||
const isValidLength = (value) => is.safeInteger(value) && value > -1;
|
||||
is.arrayLike = (value) => !is.nullOrUndefined(value) && !is.function_(value) && isValidLength(value.length);
|
||||
is.inRange = (value, range) => {
|
||||
if (is.number(range)) {
|
||||
return value >= Math.min(0, range) && value <= Math.max(range, 0);
|
||||
}
|
||||
if (is.array(range) && range.length === 2) {
|
||||
return value >= Math.min(...range) && value <= Math.max(...range);
|
||||
}
|
||||
throw new TypeError(`Invalid range: ${JSON.stringify(range)}`);
|
||||
};
|
||||
const NODE_TYPE_ELEMENT = 1;
|
||||
const DOM_PROPERTIES_TO_CHECK = [
|
||||
'innerHTML',
|
||||
'ownerDocument',
|
||||
'style',
|
||||
'attributes',
|
||||
'nodeValue'
|
||||
];
|
||||
is.domElement = (value) => is.object(value) && value.nodeType === NODE_TYPE_ELEMENT && is.string(value.nodeName) &&
|
||||
!is.plainObject(value) && DOM_PROPERTIES_TO_CHECK.every(property => property in value);
|
||||
is.observable = (value) => {
|
||||
if (!value) {
|
||||
return false;
|
||||
}
|
||||
if (value[Symbol.observable] && value === value[Symbol.observable]()) {
|
||||
return true;
|
||||
}
|
||||
if (value['@@observable'] && value === value['@@observable']()) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
is.nodeStream = (value) => !is.nullOrUndefined(value) && isObject(value) && is.function_(value.pipe) && !is.observable(value);
|
||||
is.infinite = (value) => value === Infinity || value === -Infinity;
|
||||
const isAbsoluteMod2 = (rem) => (value) => is.integer(value) && Math.abs(value % 2) === rem;
|
||||
is.even = isAbsoluteMod2(0);
|
||||
is.odd = isAbsoluteMod2(1);
|
||||
const isWhiteSpaceString = (value) => is.string(value) && /\S/.test(value) === false;
|
||||
is.emptyArray = (value) => is.array(value) && value.length === 0;
|
||||
is.nonEmptyArray = (value) => is.array(value) && value.length > 0;
|
||||
is.emptyString = (value) => is.string(value) && value.length === 0;
|
||||
is.nonEmptyString = (value) => is.string(value) && value.length > 0;
|
||||
is.emptyStringOrWhitespace = (value) => is.emptyString(value) || isWhiteSpaceString(value);
|
||||
is.emptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length === 0;
|
||||
is.nonEmptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length > 0;
|
||||
is.emptySet = (value) => is.set(value) && value.size === 0;
|
||||
is.nonEmptySet = (value) => is.set(value) && value.size > 0;
|
||||
is.emptyMap = (value) => is.map(value) && value.size === 0;
|
||||
is.nonEmptyMap = (value) => is.map(value) && value.size > 0;
|
||||
const predicateOnArray = (method, predicate, values) => {
|
||||
if (is.function_(predicate) === false) {
|
||||
throw new TypeError(`Invalid predicate: ${JSON.stringify(predicate)}`);
|
||||
}
|
||||
if (values.length === 0) {
|
||||
throw new TypeError('Invalid number of values');
|
||||
}
|
||||
return method.call(values, predicate);
|
||||
};
|
||||
// tslint:disable variable-name
|
||||
is.any = (predicate, ...values) => predicateOnArray(Array.prototype.some, predicate, values);
|
||||
is.all = (predicate, ...values) => predicateOnArray(Array.prototype.every, predicate, values);
|
||||
// tslint:enable variable-name
|
||||
})(is || (is = {}));
|
||||
// Some few keywords are reserved, but we'll populate them for Node.js users
|
||||
// See https://github.com/Microsoft/TypeScript/issues/2536
|
||||
Object.defineProperties(is, {
|
||||
class: {
|
||||
value: is.class_
|
||||
},
|
||||
function: {
|
||||
value: is.function_
|
||||
},
|
||||
null: {
|
||||
value: is.null_
|
||||
}
|
||||
});
|
||||
exports.default = is;
|
||||
// For CommonJS default export support
|
||||
module.exports = is;
|
||||
module.exports.default = is;
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/@sindresorhus/is/dist/index.js.map
generated
vendored
1
node_modules/@sindresorhus/is/dist/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
63
node_modules/@sindresorhus/is/package.json
generated
vendored
63
node_modules/@sindresorhus/is/package.json
generated
vendored
|
|
@ -1,63 +0,0 @@
|
|||
{
|
||||
"name": "@sindresorhus/is",
|
||||
"version": "0.14.0",
|
||||
"description": "Type check values: `is.string('🦄') //=> true`",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/is",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "sindresorhus.com"
|
||||
},
|
||||
"main": "dist/index.js",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "tslint --format stylish --project .",
|
||||
"build": "del dist && tsc",
|
||||
"test": "npm run lint && npm run build && ava dist/tests",
|
||||
"prepublish": "npm run build && del dist/tests"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"keywords": [
|
||||
"type",
|
||||
"types",
|
||||
"is",
|
||||
"check",
|
||||
"checking",
|
||||
"validate",
|
||||
"validation",
|
||||
"utility",
|
||||
"util",
|
||||
"typeof",
|
||||
"instanceof",
|
||||
"object",
|
||||
"assert",
|
||||
"assertion",
|
||||
"test",
|
||||
"kind",
|
||||
"primitive",
|
||||
"verify",
|
||||
"compare"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@sindresorhus/tsconfig": "^0.1.0",
|
||||
"@types/jsdom": "^11.12.0",
|
||||
"@types/node": "^10.12.10",
|
||||
"@types/tempy": "^0.2.0",
|
||||
"@types/zen-observable": "^0.8.0",
|
||||
"ava": "^0.25.0",
|
||||
"del-cli": "^1.1.0",
|
||||
"jsdom": "^11.6.2",
|
||||
"rxjs": "^6.3.3",
|
||||
"tempy": "^0.2.1",
|
||||
"tslint": "^5.9.1",
|
||||
"tslint-xo": "^0.10.0",
|
||||
"typescript": "^3.2.1",
|
||||
"zen-observable": "^0.8.8"
|
||||
},
|
||||
"types": "dist/index.d.ts"
|
||||
}
|
||||
451
node_modules/@sindresorhus/is/readme.md
generated
vendored
451
node_modules/@sindresorhus/is/readme.md
generated
vendored
|
|
@ -1,451 +0,0 @@
|
|||
# is [](https://travis-ci.org/sindresorhus/is)
|
||||
|
||||
> Type check values: `is.string('🦄') //=> true`
|
||||
|
||||
<img src="header.gif" width="182" align="right">
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install @sindresorhus/is
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const is = require('@sindresorhus/is');
|
||||
|
||||
is('🦄');
|
||||
//=> 'string'
|
||||
|
||||
is(new Map());
|
||||
//=> 'Map'
|
||||
|
||||
is.number(6);
|
||||
//=> true
|
||||
```
|
||||
|
||||
When using `is` together with TypeScript, [type guards](http://www.typescriptlang.org/docs/handbook/advanced-types.html#type-guards-and-differentiating-types) are being used to infer the correct type inside if-else statements.
|
||||
|
||||
```ts
|
||||
import is from '@sindresorhus/is';
|
||||
|
||||
const padLeft = (value: string, padding: string | number) => {
|
||||
if (is.number(padding)) {
|
||||
// `padding` is typed as `number`
|
||||
return Array(padding + 1).join(' ') + value;
|
||||
}
|
||||
|
||||
if (is.string(padding)) {
|
||||
// `padding` is typed as `string`
|
||||
return padding + value;
|
||||
}
|
||||
|
||||
throw new TypeError(`Expected 'padding' to be of type 'string' or 'number', got '${is(padding)}'.`);
|
||||
}
|
||||
|
||||
padLeft('🦄', 3);
|
||||
//=> ' 🦄'
|
||||
|
||||
padLeft('🦄', '🌈');
|
||||
//=> '🌈🦄'
|
||||
```
|
||||
|
||||
|
||||
## API
|
||||
|
||||
### is(value)
|
||||
|
||||
Returns the type of `value`.
|
||||
|
||||
Primitives are lowercase and object types are camelcase.
|
||||
|
||||
Example:
|
||||
|
||||
- `'undefined'`
|
||||
- `'null'`
|
||||
- `'string'`
|
||||
- `'symbol'`
|
||||
- `'Array'`
|
||||
- `'Function'`
|
||||
- `'Object'`
|
||||
|
||||
Note: It will throw an error if you try to feed it object-wrapped primitives, as that's a bad practice. For example `new String('foo')`.
|
||||
|
||||
### is.{method}
|
||||
|
||||
All the below methods accept a value and returns a boolean for whether the value is of the desired type.
|
||||
|
||||
#### Primitives
|
||||
|
||||
##### .undefined(value)
|
||||
##### .null(value)
|
||||
##### .string(value)
|
||||
##### .number(value)
|
||||
##### .boolean(value)
|
||||
##### .symbol(value)
|
||||
|
||||
#### Built-in types
|
||||
|
||||
##### .array(value)
|
||||
##### .function(value)
|
||||
##### .buffer(value)
|
||||
##### .object(value)
|
||||
|
||||
Keep in mind that [functions are objects too](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions).
|
||||
|
||||
##### .numericString(value)
|
||||
|
||||
Returns `true` for a string that represents a number. For example, `'42'` and `'-8'`.
|
||||
|
||||
Note: `'NaN'` returns `false`, but `'Infinity'` and `'-Infinity'` return `true`.
|
||||
|
||||
##### .regExp(value)
|
||||
##### .date(value)
|
||||
##### .error(value)
|
||||
##### .nativePromise(value)
|
||||
##### .promise(value)
|
||||
|
||||
Returns `true` for any object with a `.then()` and `.catch()` method. Prefer this one over `.nativePromise()` as you usually want to allow userland promise implementations too.
|
||||
|
||||
##### .generator(value)
|
||||
|
||||
Returns `true` for any object that implements its own `.next()` and `.throw()` methods and has a function definition for `Symbol.iterator`.
|
||||
|
||||
##### .generatorFunction(value)
|
||||
|
||||
##### .asyncFunction(value)
|
||||
|
||||
Returns `true` for any `async` function that can be called with the `await` operator.
|
||||
|
||||
```js
|
||||
is.asyncFunction(async () => {});
|
||||
// => true
|
||||
|
||||
is.asyncFunction(() => {});
|
||||
// => false
|
||||
```
|
||||
|
||||
##### .boundFunction(value)
|
||||
|
||||
Returns `true` for any `bound` function.
|
||||
|
||||
```js
|
||||
is.boundFunction(() => {});
|
||||
// => true
|
||||
|
||||
is.boundFunction(function () {}.bind(null));
|
||||
// => true
|
||||
|
||||
is.boundFunction(function () {});
|
||||
// => false
|
||||
```
|
||||
|
||||
##### .map(value)
|
||||
##### .set(value)
|
||||
##### .weakMap(value)
|
||||
##### .weakSet(value)
|
||||
|
||||
#### Typed arrays
|
||||
|
||||
##### .int8Array(value)
|
||||
##### .uint8Array(value)
|
||||
##### .uint8ClampedArray(value)
|
||||
##### .int16Array(value)
|
||||
##### .uint16Array(value)
|
||||
##### .int32Array(value)
|
||||
##### .uint32Array(value)
|
||||
##### .float32Array(value)
|
||||
##### .float64Array(value)
|
||||
|
||||
#### Structured data
|
||||
|
||||
##### .arrayBuffer(value)
|
||||
##### .sharedArrayBuffer(value)
|
||||
##### .dataView(value)
|
||||
|
||||
#### Emptiness
|
||||
|
||||
##### .emptyString(value)
|
||||
|
||||
Returns `true` if the value is a `string` and the `.length` is 0.
|
||||
|
||||
##### .nonEmptyString(value)
|
||||
|
||||
Returns `true` if the value is a `string` and the `.length` is more than 0.
|
||||
|
||||
##### .emptyStringOrWhitespace(value)
|
||||
|
||||
Returns `true` if `is.emptyString(value)` or if it's a `string` that is all whitespace.
|
||||
|
||||
##### .emptyArray(value)
|
||||
|
||||
Returns `true` if the value is an `Array` and the `.length` is 0.
|
||||
|
||||
##### .nonEmptyArray(value)
|
||||
|
||||
Returns `true` if the value is an `Array` and the `.length` is more than 0.
|
||||
|
||||
##### .emptyObject(value)
|
||||
|
||||
Returns `true` if the value is an `Object` and `Object.keys(value).length` is 0.
|
||||
|
||||
Please note that `Object.keys` returns only own enumerable properties. Hence something like this can happen:
|
||||
|
||||
```js
|
||||
const object1 = {};
|
||||
|
||||
Object.defineProperty(object1, 'property1', {
|
||||
value: 42,
|
||||
writable: true,
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
|
||||
is.emptyObject(object1);
|
||||
// => true
|
||||
```
|
||||
|
||||
##### .nonEmptyObject(value)
|
||||
|
||||
Returns `true` if the value is an `Object` and `Object.keys(value).length` is more than 0.
|
||||
|
||||
##### .emptySet(value)
|
||||
|
||||
Returns `true` if the value is a `Set` and the `.size` is 0.
|
||||
|
||||
##### .nonEmptySet(Value)
|
||||
|
||||
Returns `true` if the value is a `Set` and the `.size` is more than 0.
|
||||
|
||||
##### .emptyMap(value)
|
||||
|
||||
Returns `true` if the value is a `Map` and the `.size` is 0.
|
||||
|
||||
##### .nonEmptyMap(value)
|
||||
|
||||
Returns `true` if the value is a `Map` and the `.size` is more than 0.
|
||||
|
||||
#### Miscellaneous
|
||||
|
||||
##### .directInstanceOf(value, class)
|
||||
|
||||
Returns `true` if `value` is a direct instance of `class`.
|
||||
|
||||
```js
|
||||
is.directInstanceOf(new Error(), Error);
|
||||
//=> true
|
||||
|
||||
class UnicornError extends Error {}
|
||||
|
||||
is.directInstanceOf(new UnicornError(), Error);
|
||||
//=> false
|
||||
```
|
||||
|
||||
##### .urlInstance(value)
|
||||
|
||||
Returns `true` if `value` is an instance of the [`URL` class](https://developer.mozilla.org/en-US/docs/Web/API/URL).
|
||||
|
||||
```js
|
||||
const url = new URL('https://example.com');
|
||||
|
||||
is.urlInstance(url);
|
||||
//=> true
|
||||
```
|
||||
|
||||
### .url(value)
|
||||
|
||||
Returns `true` if `value` is a URL string.
|
||||
|
||||
Note: this only does basic checking using the [`URL` class](https://developer.mozilla.org/en-US/docs/Web/API/URL) constructor.
|
||||
|
||||
```js
|
||||
const url = 'https://example.com';
|
||||
|
||||
is.url(url);
|
||||
//=> true
|
||||
|
||||
is.url(new URL(url));
|
||||
//=> false
|
||||
```
|
||||
|
||||
##### .truthy(value)
|
||||
|
||||
Returns `true` for all values that evaluate to true in a boolean context:
|
||||
|
||||
```js
|
||||
is.truthy('🦄');
|
||||
//=> true
|
||||
|
||||
is.truthy(undefined);
|
||||
//=> false
|
||||
```
|
||||
|
||||
##### .falsy(value)
|
||||
|
||||
Returns `true` if `value` is one of: `false`, `0`, `''`, `null`, `undefined`, `NaN`.
|
||||
|
||||
##### .nan(value)
|
||||
##### .nullOrUndefined(value)
|
||||
##### .primitive(value)
|
||||
|
||||
JavaScript primitives are as follows: `null`, `undefined`, `string`, `number`, `boolean`, `symbol`.
|
||||
|
||||
##### .integer(value)
|
||||
|
||||
##### .safeInteger(value)
|
||||
|
||||
Returns `true` if `value` is a [safe integer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isSafeInteger).
|
||||
|
||||
##### .plainObject(value)
|
||||
|
||||
An object is plain if it's created by either `{}`, `new Object()`, or `Object.create(null)`.
|
||||
|
||||
##### .iterable(value)
|
||||
##### .asyncIterable(value)
|
||||
##### .class(value)
|
||||
|
||||
Returns `true` for instances created by a class.
|
||||
|
||||
##### .typedArray(value)
|
||||
|
||||
##### .arrayLike(value)
|
||||
|
||||
A `value` is array-like if it is not a function and has a `value.length` that is a safe integer greater than or equal to 0.
|
||||
|
||||
```js
|
||||
is.arrayLike(document.forms);
|
||||
//=> true
|
||||
|
||||
function foo() {
|
||||
is.arrayLike(arguments);
|
||||
//=> true
|
||||
}
|
||||
foo();
|
||||
```
|
||||
|
||||
##### .inRange(value, range)
|
||||
|
||||
Check if `value` (number) is in the given `range`. The range is an array of two values, lower bound and upper bound, in no specific order.
|
||||
|
||||
```js
|
||||
is.inRange(3, [0, 5]);
|
||||
is.inRange(3, [5, 0]);
|
||||
is.inRange(0, [-2, 2]);
|
||||
```
|
||||
|
||||
##### .inRange(value, upperBound)
|
||||
|
||||
Check if `value` (number) is in the range of `0` to `upperBound`.
|
||||
|
||||
```js
|
||||
is.inRange(3, 10);
|
||||
```
|
||||
|
||||
##### .domElement(value)
|
||||
|
||||
Returns `true` if `value` is a DOM Element.
|
||||
|
||||
##### .nodeStream(value)
|
||||
|
||||
Returns `true` if `value` is a Node.js [stream](https://nodejs.org/api/stream.html).
|
||||
|
||||
```js
|
||||
const fs = require('fs');
|
||||
|
||||
is.nodeStream(fs.createReadStream('unicorn.png'));
|
||||
//=> true
|
||||
```
|
||||
|
||||
##### .observable(value)
|
||||
|
||||
Returns `true` if `value` is an `Observable`.
|
||||
|
||||
```js
|
||||
const {Observable} = require('rxjs');
|
||||
|
||||
is.observable(new Observable());
|
||||
//=> true
|
||||
```
|
||||
|
||||
##### .infinite(value)
|
||||
|
||||
Check if `value` is `Infinity` or `-Infinity`.
|
||||
|
||||
##### .even(value)
|
||||
|
||||
Returns `true` if `value` is an even integer.
|
||||
|
||||
##### .odd(value)
|
||||
|
||||
Returns `true` if `value` is an odd integer.
|
||||
|
||||
##### .any(predicate, ...values)
|
||||
|
||||
Returns `true` if **any** of the input `values` returns true in the `predicate`:
|
||||
|
||||
```js
|
||||
is.any(is.string, {}, true, '🦄');
|
||||
//=> true
|
||||
|
||||
is.any(is.boolean, 'unicorns', [], new Map());
|
||||
//=> false
|
||||
```
|
||||
|
||||
##### .all(predicate, ...values)
|
||||
|
||||
Returns `true` if **all** of the input `values` returns true in the `predicate`:
|
||||
|
||||
```js
|
||||
is.all(is.object, {}, new Map(), new Set());
|
||||
//=> true
|
||||
|
||||
is.all(is.string, '🦄', [], 'unicorns');
|
||||
//=> false
|
||||
```
|
||||
|
||||
|
||||
## FAQ
|
||||
|
||||
### Why yet another type checking module?
|
||||
|
||||
There are hundreds of type checking modules on npm, unfortunately, I couldn't find any that fit my needs:
|
||||
|
||||
- Includes both type methods and ability to get the type
|
||||
- Types of primitives returned as lowercase and object types as camelcase
|
||||
- Covers all built-ins
|
||||
- Unsurprising behavior
|
||||
- Well-maintained
|
||||
- Comprehensive test suite
|
||||
|
||||
For the ones I found, pick 3 of these.
|
||||
|
||||
The most common mistakes I noticed in these modules was using `instanceof` for type checking, forgetting that functions are objects, and omitting `symbol` as a primitive.
|
||||
|
||||
|
||||
## Related
|
||||
|
||||
- [ow](https://github.com/sindresorhus/ow) - Function argument validation for humans
|
||||
- [is-stream](https://github.com/sindresorhus/is-stream) - Check if something is a Node.js stream
|
||||
- [is-observable](https://github.com/sindresorhus/is-observable) - Check if a value is an Observable
|
||||
- [file-type](https://github.com/sindresorhus/file-type) - Detect the file type of a Buffer/Uint8Array
|
||||
- [is-ip](https://github.com/sindresorhus/is-ip) - Check if a string is an IP address
|
||||
- [is-array-sorted](https://github.com/sindresorhus/is-array-sorted) - Check if an Array is sorted
|
||||
- [is-error-constructor](https://github.com/sindresorhus/is-error-constructor) - Check if a value is an error constructor
|
||||
- [is-empty-iterable](https://github.com/sindresorhus/is-empty-iterable) - Check if an Iterable is empty
|
||||
- [is-blob](https://github.com/sindresorhus/is-blob) - Check if a value is a Blob - File-like object of immutable, raw data
|
||||
- [has-emoji](https://github.com/sindresorhus/has-emoji) - Check whether a string has any emoji
|
||||
|
||||
|
||||
## Created by
|
||||
|
||||
- [Sindre Sorhus](https://github.com/sindresorhus)
|
||||
- [Giora Guttsait](https://github.com/gioragutt)
|
||||
- [Brandon Smith](https://github.com/brandon93s)
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
21
node_modules/@szmarczak/http-timer/LICENSE
generated
vendored
21
node_modules/@szmarczak/http-timer/LICENSE
generated
vendored
|
|
@ -1,21 +0,0 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2018 Szymon Marczak
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
70
node_modules/@szmarczak/http-timer/README.md
generated
vendored
70
node_modules/@szmarczak/http-timer/README.md
generated
vendored
|
|
@ -1,70 +0,0 @@
|
|||
# http-timer
|
||||
> Timings for HTTP requests
|
||||
|
||||
[](https://travis-ci.org/szmarczak/http-timer)
|
||||
[](https://coveralls.io/github/szmarczak/http-timer?branch=master)
|
||||
[](https://packagephobia.now.sh/result?p=@szmarczak/http-timer)
|
||||
|
||||
Inspired by the [`request` package](https://github.com/request/request).
|
||||
|
||||
## Usage
|
||||
```js
|
||||
'use strict';
|
||||
const https = require('https');
|
||||
const timer = require('@szmarczak/http-timer');
|
||||
|
||||
const request = https.get('https://httpbin.org/anything');
|
||||
const timings = timer(request);
|
||||
|
||||
request.on('response', response => {
|
||||
response.on('data', () => {}); // Consume the data somehow
|
||||
response.on('end', () => {
|
||||
console.log(timings);
|
||||
});
|
||||
});
|
||||
|
||||
// { start: 1535708511443,
|
||||
// socket: 1535708511444,
|
||||
// lookup: 1535708511444,
|
||||
// connect: 1535708511582,
|
||||
// upload: 1535708511887,
|
||||
// response: 1535708512037,
|
||||
// end: 1535708512040,
|
||||
// phases:
|
||||
// { wait: 1,
|
||||
// dns: 0,
|
||||
// tcp: 138,
|
||||
// request: 305,
|
||||
// firstByte: 150,
|
||||
// download: 3,
|
||||
// total: 597 } }
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### timer(request)
|
||||
|
||||
Returns: `Object`
|
||||
|
||||
- `start` - Time when the request started.
|
||||
- `socket` - Time when a socket was assigned to the request.
|
||||
- `lookup` - Time when the DNS lookup finished.
|
||||
- `connect` - Time when the socket successfully connected.
|
||||
- `upload` - Time when the request finished uploading.
|
||||
- `response` - Time when the request fired the `response` event.
|
||||
- `end` - Time when the response fired the `end` event.
|
||||
- `error` - Time when the request fired the `error` event.
|
||||
- `phases`
|
||||
- `wait` - `timings.socket - timings.start`
|
||||
- `dns` - `timings.lookup - timings.socket`
|
||||
- `tcp` - `timings.connect - timings.lookup`
|
||||
- `request` - `timings.upload - timings.connect`
|
||||
- `firstByte` - `timings.response - timings.upload`
|
||||
- `download` - `timings.end - timings.response`
|
||||
- `total` - `timings.end - timings.start` or `timings.error - timings.start`
|
||||
|
||||
**Note**: The time is a `number` representing the milliseconds elapsed since the UNIX epoch.
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
47
node_modules/@szmarczak/http-timer/package.json
generated
vendored
47
node_modules/@szmarczak/http-timer/package.json
generated
vendored
|
|
@ -1,47 +0,0 @@
|
|||
{
|
||||
"name": "@szmarczak/http-timer",
|
||||
"version": "1.1.2",
|
||||
"description": "Timings for HTTP requests",
|
||||
"main": "source",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && nyc ava",
|
||||
"coveralls": "nyc report --reporter=text-lcov | coveralls"
|
||||
},
|
||||
"files": [
|
||||
"source"
|
||||
],
|
||||
"keywords": [
|
||||
"http",
|
||||
"https",
|
||||
"timer",
|
||||
"timings"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/szmarczak/http-timer.git"
|
||||
},
|
||||
"author": "Szymon Marczak",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/szmarczak/http-timer/issues"
|
||||
},
|
||||
"homepage": "https://github.com/szmarczak/http-timer#readme",
|
||||
"xo": {
|
||||
"rules": {
|
||||
"unicorn/filename-case": "camelCase"
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
"ava": "^0.25.0",
|
||||
"coveralls": "^3.0.2",
|
||||
"p-event": "^2.1.0",
|
||||
"nyc": "^12.0.2",
|
||||
"xo": "^0.22.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"defer-to-connect": "^1.0.1"
|
||||
}
|
||||
}
|
||||
99
node_modules/@szmarczak/http-timer/source/index.js
generated
vendored
99
node_modules/@szmarczak/http-timer/source/index.js
generated
vendored
|
|
@ -1,99 +0,0 @@
|
|||
'use strict';
|
||||
const deferToConnect = require('defer-to-connect');
|
||||
|
||||
module.exports = request => {
|
||||
const timings = {
|
||||
start: Date.now(),
|
||||
socket: null,
|
||||
lookup: null,
|
||||
connect: null,
|
||||
upload: null,
|
||||
response: null,
|
||||
end: null,
|
||||
error: null,
|
||||
phases: {
|
||||
wait: null,
|
||||
dns: null,
|
||||
tcp: null,
|
||||
request: null,
|
||||
firstByte: null,
|
||||
download: null,
|
||||
total: null
|
||||
}
|
||||
};
|
||||
|
||||
const handleError = origin => {
|
||||
const emit = origin.emit.bind(origin);
|
||||
origin.emit = (event, ...args) => {
|
||||
// Catches the `error` event
|
||||
if (event === 'error') {
|
||||
timings.error = Date.now();
|
||||
timings.phases.total = timings.error - timings.start;
|
||||
|
||||
origin.emit = emit;
|
||||
}
|
||||
|
||||
// Saves the original behavior
|
||||
return emit(event, ...args);
|
||||
};
|
||||
};
|
||||
|
||||
let uploadFinished = false;
|
||||
const onUpload = () => {
|
||||
timings.upload = Date.now();
|
||||
timings.phases.request = timings.upload - timings.connect;
|
||||
};
|
||||
|
||||
handleError(request);
|
||||
|
||||
request.once('socket', socket => {
|
||||
timings.socket = Date.now();
|
||||
timings.phases.wait = timings.socket - timings.start;
|
||||
|
||||
const lookupListener = () => {
|
||||
timings.lookup = Date.now();
|
||||
timings.phases.dns = timings.lookup - timings.socket;
|
||||
};
|
||||
|
||||
socket.once('lookup', lookupListener);
|
||||
|
||||
deferToConnect(socket, () => {
|
||||
timings.connect = Date.now();
|
||||
|
||||
if (timings.lookup === null) {
|
||||
socket.removeListener('lookup', lookupListener);
|
||||
timings.lookup = timings.connect;
|
||||
timings.phases.dns = timings.lookup - timings.socket;
|
||||
}
|
||||
|
||||
timings.phases.tcp = timings.connect - timings.lookup;
|
||||
|
||||
if (uploadFinished && !timings.upload) {
|
||||
onUpload();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
request.once('finish', () => {
|
||||
uploadFinished = true;
|
||||
|
||||
if (timings.connect) {
|
||||
onUpload();
|
||||
}
|
||||
});
|
||||
|
||||
request.once('response', response => {
|
||||
timings.response = Date.now();
|
||||
timings.phases.firstByte = timings.response - timings.upload;
|
||||
|
||||
handleError(response);
|
||||
|
||||
response.once('end', () => {
|
||||
timings.end = Date.now();
|
||||
timings.phases.download = timings.end - timings.response;
|
||||
timings.phases.total = timings.end - timings.start;
|
||||
});
|
||||
});
|
||||
|
||||
return timings;
|
||||
};
|
||||
21
node_modules/@types/normalize-package-data/LICENSE
generated
vendored
21
node_modules/@types/normalize-package-data/LICENSE
generated
vendored
|
|
@ -1,21 +0,0 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
16
node_modules/@types/normalize-package-data/README.md
generated
vendored
16
node_modules/@types/normalize-package-data/README.md
generated
vendored
|
|
@ -1,16 +0,0 @@
|
|||
# Installation
|
||||
> `npm install --save @types/normalize-package-data`
|
||||
|
||||
# Summary
|
||||
This package contains type definitions for normalize-package-data (https://github.com/npm/normalize-package-data#readme).
|
||||
|
||||
# Details
|
||||
Files were exported from https://www.github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/normalize-package-data
|
||||
|
||||
Additional Details
|
||||
* Last updated: Sun, 07 Jan 2018 07:34:38 GMT
|
||||
* Dependencies: none
|
||||
* Global values: none
|
||||
|
||||
# Credits
|
||||
These definitions were written by Jeff Dickey <https://github.com/jdxcode>.
|
||||
46
node_modules/@types/normalize-package-data/index.d.ts
generated
vendored
46
node_modules/@types/normalize-package-data/index.d.ts
generated
vendored
|
|
@ -1,46 +0,0 @@
|
|||
// Type definitions for normalize-package-data 2.4
|
||||
// Project: https://github.com/npm/normalize-package-data#readme
|
||||
// Definitions by: Jeff Dickey <https://github.com/jdxcode>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
export = normalize;
|
||||
|
||||
declare function normalize(data: normalize.Input, warn?: normalize.WarnFn, strict?: boolean): void;
|
||||
declare function normalize(data: normalize.Input, strict?: boolean): void;
|
||||
|
||||
declare namespace normalize {
|
||||
type WarnFn = (msg: string) => void;
|
||||
interface Input {[k: string]: any; }
|
||||
|
||||
interface Person {
|
||||
name?: string;
|
||||
email?: string;
|
||||
url?: string;
|
||||
}
|
||||
|
||||
interface Package {
|
||||
[k: string]: any;
|
||||
name: string;
|
||||
version: string;
|
||||
files?: string[];
|
||||
bin?: {[k: string]: string };
|
||||
man?: string[];
|
||||
keywords?: string[];
|
||||
author?: Person;
|
||||
maintainers?: Person[];
|
||||
contributors?: Person[];
|
||||
bundleDependencies?: {[name: string]: string; };
|
||||
dependencies?: {[name: string]: string; };
|
||||
devDependencies?: {[name: string]: string; };
|
||||
optionalDependencies?: {[name: string]: string; };
|
||||
description?: string;
|
||||
engines?: {[type: string]: string };
|
||||
license?: string;
|
||||
repository?: { type: string, url: string };
|
||||
bugs?: { url: string, email?: string } | { url?: string, email: string };
|
||||
homepage?: string;
|
||||
scripts?: {[k: string]: string};
|
||||
readme: string;
|
||||
_id: string;
|
||||
}
|
||||
}
|
||||
22
node_modules/@types/normalize-package-data/package.json
generated
vendored
22
node_modules/@types/normalize-package-data/package.json
generated
vendored
|
|
@ -1,22 +0,0 @@
|
|||
{
|
||||
"name": "@types/normalize-package-data",
|
||||
"version": "2.4.0",
|
||||
"description": "TypeScript definitions for normalize-package-data",
|
||||
"license": "MIT",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Jeff Dickey",
|
||||
"url": "https://github.com/jdxcode",
|
||||
"githubUsername": "jdxcode"
|
||||
}
|
||||
],
|
||||
"main": "",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://www.github.com/DefinitelyTyped/DefinitelyTyped.git"
|
||||
},
|
||||
"scripts": {},
|
||||
"dependencies": {},
|
||||
"typesPublisherContentHash": "5d2101e9e55c73e1d649a6c311e0d40bdfaa25bb06bb75ea6f3bb0d149c1303b",
|
||||
"typeScriptVersion": "2.0"
|
||||
}
|
||||
58
node_modules/ansi-align/CHANGELOG.md
generated
vendored
58
node_modules/ansi-align/CHANGELOG.md
generated
vendored
|
|
@ -1,58 +0,0 @@
|
|||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
### [3.0.1](https://github.com/nexdrew/ansi-align/compare/v3.0.0...v3.0.1) (2021-09-27)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **package:** update string-width to version 4.1.0 ([#52](https://github.com/nexdrew/ansi-align/issues/52)) ([ab5b733](https://github.com/nexdrew/ansi-align/commit/ab5b733b1c30eef87b75e15459f2216db28d7ed3))
|
||||
|
||||
<a name="3.0.0"></a>
|
||||
# [3.0.0](https://github.com/nexdrew/ansi-align/compare/v2.0.0...v3.0.0) (2018-12-17)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **package:** update string-width to version 3.0.0 ([#50](https://github.com/nexdrew/ansi-align/issues/50)) ([67f0d8f](https://github.com/nexdrew/ansi-align/commit/67f0d8f))
|
||||
|
||||
|
||||
### BREAKING CHANGES
|
||||
|
||||
* **package:** Node 4 no longer supported, please update to Node 6+ or use ansi-align@2.0.0
|
||||
|
||||
|
||||
|
||||
<a name="2.0.0"></a>
|
||||
# [2.0.0](https://github.com/nexdrew/ansi-align/compare/v1.1.0...v2.0.0) (2017-05-01)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* ES2015ify, dropping support for Node <4 ([#30](https://github.com/nexdrew/ansi-align/issues/30)) ([7b43f48](https://github.com/nexdrew/ansi-align/commit/7b43f48))
|
||||
|
||||
|
||||
### BREAKING CHANGES
|
||||
|
||||
* Node 0.10 or 0.12 no longer supported, please update to Node 4+ or use ansi-align@1.1.0
|
||||
|
||||
|
||||
|
||||
<a name="1.1.0"></a>
|
||||
# [1.1.0](https://github.com/nexdrew/ansi-align/compare/v1.0.0...v1.1.0) (2016-06-06)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* support left-alignment as no-op ([#3](https://github.com/nexdrew/ansi-align/issues/3)) ([e581db6](https://github.com/nexdrew/ansi-align/commit/e581db6))
|
||||
|
||||
|
||||
|
||||
<a name="1.0.0"></a>
|
||||
# 1.0.0 (2016-04-30)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* initial commit ([1914d90](https://github.com/nexdrew/ansi-align/commit/1914d90))
|
||||
13
node_modules/ansi-align/LICENSE
generated
vendored
13
node_modules/ansi-align/LICENSE
generated
vendored
|
|
@ -1,13 +0,0 @@
|
|||
Copyright (c) 2016, Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any purpose
|
||||
with or without fee is hereby granted, provided that the above copyright notice
|
||||
and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
|
||||
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
|
||||
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
|
||||
THIS SOFTWARE.
|
||||
80
node_modules/ansi-align/README.md
generated
vendored
80
node_modules/ansi-align/README.md
generated
vendored
|
|
@ -1,80 +0,0 @@
|
|||
# ansi-align
|
||||
|
||||
> align-text with ANSI support for CLIs
|
||||
|
||||
[](https://travis-ci.org/nexdrew/ansi-align)
|
||||
[](https://coveralls.io/github/nexdrew/ansi-align?branch=master)
|
||||
[](https://github.com/conventional-changelog/standard-version)
|
||||
[](https://greenkeeper.io/)
|
||||
|
||||
Easily center- or right- align a block of text, carefully ignoring ANSI escape codes.
|
||||
|
||||
E.g. turn this:
|
||||
|
||||
<img width="281" alt="ansi text block no alignment :(" src="https://cloud.githubusercontent.com/assets/1929625/14937509/7c3076dc-0ed7-11e6-8c16-4f6a4ccc8346.png">
|
||||
|
||||
Into this:
|
||||
|
||||
<img width="278" alt="ansi text block center aligned!" src="https://cloud.githubusercontent.com/assets/1929625/14937510/7c3ca0b0-0ed7-11e6-8f0a-541ca39b6e0a.png">
|
||||
|
||||
## Install
|
||||
|
||||
```sh
|
||||
npm install --save ansi-align
|
||||
```
|
||||
|
||||
```js
|
||||
var ansiAlign = require('ansi-align')
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### `ansiAlign(text, [opts])`
|
||||
|
||||
Align the given text per the line with the greatest [`string-width`](https://github.com/sindresorhus/string-width), returning a new string (or array).
|
||||
|
||||
#### Arguments
|
||||
|
||||
- `text`: required, string or array
|
||||
|
||||
The text to align. If a string is given, it will be split using either the `opts.split` value or `'\n'` by default. If an array is given, a different array of modified strings will be returned.
|
||||
|
||||
- `opts`: optional, object
|
||||
|
||||
Options to change behavior, see below.
|
||||
|
||||
#### Options
|
||||
|
||||
- `opts.align`: string, default `'center'`
|
||||
|
||||
The alignment mode. Use `'center'` for center-alignment, `'right'` for right-alignment, or `'left'` for left-alignment. Note that the given `text` is assumed to be left-aligned already, so specifying `align: 'left'` just returns the `text` as is (no-op).
|
||||
|
||||
- `opts.split`: string or RegExp, default `'\n'`
|
||||
|
||||
The separator to use when splitting the text. Only used if text is given as a string.
|
||||
|
||||
- `opts.pad`: string, default `' '`
|
||||
|
||||
The value used to left-pad (prepend to) lines of lesser width. Will be repeated as necessary to adjust alignment to the line with the greatest width.
|
||||
|
||||
### `ansiAlign.center(text)`
|
||||
|
||||
Alias for `ansiAlign(text, { align: 'center' })`.
|
||||
|
||||
### `ansiAlign.right(text)`
|
||||
|
||||
Alias for `ansiAlign(text, { align: 'right' })`.
|
||||
|
||||
### `ansiAlign.left(text)`
|
||||
|
||||
Alias for `ansiAlign(text, { align: 'left' })`, which is a no-op.
|
||||
|
||||
## Similar Packages
|
||||
|
||||
- [`center-align`](https://github.com/jonschlinkert/center-align): Very close to this package, except it doesn't support ANSI codes.
|
||||
- [`left-pad`](https://github.com/camwest/left-pad): Great for left-padding but does not support center alignment or ANSI codes.
|
||||
- Pretty much anything by the [chalk](https://github.com/chalk) team
|
||||
|
||||
## License
|
||||
|
||||
ISC © Contributors
|
||||
61
node_modules/ansi-align/index.js
generated
vendored
61
node_modules/ansi-align/index.js
generated
vendored
|
|
@ -1,61 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
const stringWidth = require('string-width')
|
||||
|
||||
function ansiAlign (text, opts) {
|
||||
if (!text) return text
|
||||
|
||||
opts = opts || {}
|
||||
const align = opts.align || 'center'
|
||||
|
||||
// short-circuit `align: 'left'` as no-op
|
||||
if (align === 'left') return text
|
||||
|
||||
const split = opts.split || '\n'
|
||||
const pad = opts.pad || ' '
|
||||
const widthDiffFn = align !== 'right' ? halfDiff : fullDiff
|
||||
|
||||
let returnString = false
|
||||
if (!Array.isArray(text)) {
|
||||
returnString = true
|
||||
text = String(text).split(split)
|
||||
}
|
||||
|
||||
let width
|
||||
let maxWidth = 0
|
||||
text = text.map(function (str) {
|
||||
str = String(str)
|
||||
width = stringWidth(str)
|
||||
maxWidth = Math.max(width, maxWidth)
|
||||
return {
|
||||
str,
|
||||
width
|
||||
}
|
||||
}).map(function (obj) {
|
||||
return new Array(widthDiffFn(maxWidth, obj.width) + 1).join(pad) + obj.str
|
||||
})
|
||||
|
||||
return returnString ? text.join(split) : text
|
||||
}
|
||||
|
||||
ansiAlign.left = function left (text) {
|
||||
return ansiAlign(text, { align: 'left' })
|
||||
}
|
||||
|
||||
ansiAlign.center = function center (text) {
|
||||
return ansiAlign(text, { align: 'center' })
|
||||
}
|
||||
|
||||
ansiAlign.right = function right (text) {
|
||||
return ansiAlign(text, { align: 'right' })
|
||||
}
|
||||
|
||||
module.exports = ansiAlign
|
||||
|
||||
function halfDiff (maxWidth, curWidth) {
|
||||
return Math.floor((maxWidth - curWidth) / 2)
|
||||
}
|
||||
|
||||
function fullDiff (maxWidth, curWidth) {
|
||||
return maxWidth - curWidth
|
||||
}
|
||||
43
node_modules/ansi-align/package.json
generated
vendored
43
node_modules/ansi-align/package.json
generated
vendored
|
|
@ -1,43 +0,0 @@
|
|||
{
|
||||
"name": "ansi-align",
|
||||
"version": "3.0.1",
|
||||
"description": "align-text with ANSI support for CLIs",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"pretest": "standard",
|
||||
"test": "nyc ava",
|
||||
"coverage": "nyc report --reporter=text-lcov | coveralls",
|
||||
"release": "standard-version"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/nexdrew/ansi-align.git"
|
||||
},
|
||||
"keywords": [
|
||||
"ansi",
|
||||
"align",
|
||||
"cli",
|
||||
"center",
|
||||
"pad"
|
||||
],
|
||||
"author": "nexdrew",
|
||||
"license": "ISC",
|
||||
"bugs": {
|
||||
"url": "https://github.com/nexdrew/ansi-align/issues"
|
||||
},
|
||||
"homepage": "https://github.com/nexdrew/ansi-align#readme",
|
||||
"dependencies": {
|
||||
"string-width": "^4.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"ava": "^2.0.0",
|
||||
"chalk": "^2.4.2",
|
||||
"coveralls": "^3.0.3",
|
||||
"nyc": "^14.0.0",
|
||||
"standard": "^14.0.0",
|
||||
"standard-version": "^7.0.0"
|
||||
}
|
||||
}
|
||||
11
node_modules/ava/cli.js
generated
vendored
11
node_modules/ava/cli.js
generated
vendored
|
|
@ -1,11 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
const debug = require('debug')('ava');
|
||||
const importLocal = require('import-local');
|
||||
|
||||
// Prefer the local installation of AVA
|
||||
if (importLocal(__filename)) {
|
||||
debug('Using local install of AVA');
|
||||
} else {
|
||||
require('./lib/cli').run();
|
||||
}
|
||||
4
node_modules/ava/entrypoints/cli.mjs
generated
vendored
Executable file
4
node_modules/ava/entrypoints/cli.mjs
generated
vendored
Executable file
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env node
|
||||
import run from '../lib/cli.js';
|
||||
|
||||
run();
|
||||
109
node_modules/ava/entrypoints/eslint-plugin-helper.cjs
generated
vendored
Normal file
109
node_modules/ava/entrypoints/eslint-plugin-helper.cjs
generated
vendored
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
'use strict';
|
||||
const path = require('path');
|
||||
const url = require('url');
|
||||
const v8 = require('v8');
|
||||
const {Worker} = require('worker_threads');
|
||||
|
||||
const {
|
||||
classify,
|
||||
hasExtension,
|
||||
isHelperish,
|
||||
matches,
|
||||
normalizeFileForMatching,
|
||||
normalizePatterns,
|
||||
} = require('../lib/glob-helpers.cjs');
|
||||
|
||||
const MAX_DATA_LENGTH_EXCLUSIVE = 100 * 1024; // Allocate 100 KiB to exchange globs.
|
||||
|
||||
let data;
|
||||
let sync;
|
||||
let worker;
|
||||
|
||||
const resolveGlobsSync = (projectDir, overrideExtensions, overrideFiles) => {
|
||||
if (worker === undefined) {
|
||||
const dataBuffer = new SharedArrayBuffer(MAX_DATA_LENGTH_EXCLUSIVE);
|
||||
data = new Uint8Array(dataBuffer);
|
||||
|
||||
const syncBuffer = new SharedArrayBuffer(4);
|
||||
sync = new Int32Array(syncBuffer);
|
||||
|
||||
const filename = path.join(__dirname, '../lib/eslint-plugin-helper-worker.js');
|
||||
worker = new Worker(url.pathToFileURL(filename), {
|
||||
workerData: {
|
||||
dataBuffer,
|
||||
syncBuffer,
|
||||
firstMessage: {projectDir, overrideExtensions, overrideFiles},
|
||||
},
|
||||
});
|
||||
worker.unref();
|
||||
} else {
|
||||
worker.postMessage({projectDir, overrideExtensions, overrideFiles});
|
||||
}
|
||||
|
||||
const synchronize = Atomics.wait(sync, 0, 0, 10_000);
|
||||
if (synchronize === 'timed-out') {
|
||||
throw new Error('Timed out resolving AVA configuration');
|
||||
}
|
||||
|
||||
const byteLength = Atomics.exchange(sync, 0, 0);
|
||||
if (byteLength === MAX_DATA_LENGTH_EXCLUSIVE) {
|
||||
throw new Error('Globs are over 100 KiB and cannot be resolved');
|
||||
}
|
||||
|
||||
const globsOrError = v8.deserialize(data.slice(0, byteLength));
|
||||
if (globsOrError instanceof Error) {
|
||||
throw globsOrError;
|
||||
}
|
||||
|
||||
return globsOrError;
|
||||
};
|
||||
|
||||
const helperCache = new Map();
|
||||
|
||||
function load(projectDir, overrides) {
|
||||
const cacheKey = `${JSON.stringify(overrides)}\n${projectDir}`;
|
||||
if (helperCache.has(cacheKey)) {
|
||||
return helperCache.get(cacheKey);
|
||||
}
|
||||
|
||||
let helperPatterns = [];
|
||||
if (overrides && overrides.helpers !== undefined) {
|
||||
if (!Array.isArray(overrides.helpers) || overrides.helpers.length === 0) {
|
||||
throw new Error('The ’helpers’ override must be an array containing glob patterns.');
|
||||
}
|
||||
|
||||
helperPatterns = normalizePatterns(overrides.helpers);
|
||||
}
|
||||
|
||||
const globs = resolveGlobsSync(projectDir, overrides && overrides.extensions, overrides && overrides.files);
|
||||
|
||||
const classifyForESLint = file => {
|
||||
const {isTest} = classify(file, globs);
|
||||
let isHelper = false;
|
||||
if (!isTest && hasExtension(globs.extensions, file)) {
|
||||
file = normalizeFileForMatching(projectDir, file);
|
||||
isHelper = isHelperish(file) || (helperPatterns.length > 0 && matches(file, helperPatterns));
|
||||
}
|
||||
|
||||
return {isHelper, isTest};
|
||||
};
|
||||
|
||||
const helper = Object.freeze({
|
||||
classifyFile: classifyForESLint,
|
||||
classifyImport: importPath => {
|
||||
if (hasExtension(globs.extensions, importPath)) {
|
||||
// The importPath has one of the test file extensions: we can classify
|
||||
// it directly.
|
||||
return classifyForESLint(importPath);
|
||||
}
|
||||
|
||||
// Add the first extension. If multiple extensions are available, assume
|
||||
// patterns are not biased to any particular extension.
|
||||
return classifyForESLint(`${importPath}.${globs.extensions[0]}`);
|
||||
},
|
||||
});
|
||||
helperCache.set(cacheKey, helper);
|
||||
return helper;
|
||||
}
|
||||
|
||||
exports.load = load;
|
||||
2
node_modules/ava/entrypoints/main.cjs
generated
vendored
Normal file
2
node_modules/ava/entrypoints/main.cjs
generated
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
'use strict';
|
||||
module.exports = require('../lib/worker/main.cjs');
|
||||
1
node_modules/ava/entrypoints/main.mjs
generated
vendored
Normal file
1
node_modules/ava/entrypoints/main.mjs
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
export {default} from '../lib/worker/main.cjs';
|
||||
2
node_modules/ava/entrypoints/plugin.cjs
generated
vendored
Normal file
2
node_modules/ava/entrypoints/plugin.cjs
generated
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
'use strict';
|
||||
module.exports = require('../lib/worker/plugin.cjs');
|
||||
4
node_modules/ava/entrypoints/plugin.mjs
generated
vendored
Normal file
4
node_modules/ava/entrypoints/plugin.mjs
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
import * as plugin from '../lib/worker/plugin.cjs';
|
||||
|
||||
const {registerSharedWorker} = plugin;
|
||||
export {registerSharedWorker};
|
||||
201
node_modules/ava/eslint-plugin-helper.js
generated
vendored
201
node_modules/ava/eslint-plugin-helper.js
generated
vendored
|
|
@ -1,201 +0,0 @@
|
|||
'use strict';
|
||||
let isMainThread = true;
|
||||
let supportsWorkers = false;
|
||||
try {
|
||||
({isMainThread} = require('worker_threads'));
|
||||
supportsWorkers = true;
|
||||
} catch {}
|
||||
|
||||
const {classify, hasExtension, isHelperish, matches, normalizeFileForMatching, normalizeGlobs, normalizePatterns} = require('./lib/globs');
|
||||
|
||||
let resolveGlobs;
|
||||
let resolveGlobsSync;
|
||||
|
||||
if (!supportsWorkers || !isMainThread) {
|
||||
const normalizeExtensions = require('./lib/extensions');
|
||||
const {loadConfig, loadConfigSync} = require('./lib/load-config');
|
||||
const providerManager = require('./lib/provider-manager');
|
||||
|
||||
const configCache = new Map();
|
||||
|
||||
const collectProviders = ({conf, projectDir}) => {
|
||||
const providers = [];
|
||||
if (Reflect.has(conf, 'babel')) {
|
||||
const {level, main} = providerManager.babel(projectDir);
|
||||
providers.push({
|
||||
level,
|
||||
main: main({config: conf.babel}),
|
||||
type: 'babel'
|
||||
});
|
||||
}
|
||||
|
||||
if (Reflect.has(conf, 'typescript')) {
|
||||
const {level, main} = providerManager.typescript(projectDir);
|
||||
providers.push({
|
||||
level,
|
||||
main: main({config: conf.typescript}),
|
||||
type: 'typescript'
|
||||
});
|
||||
}
|
||||
|
||||
return providers;
|
||||
};
|
||||
|
||||
const buildGlobs = ({conf, providers, projectDir, overrideExtensions, overrideFiles}) => {
|
||||
const extensions = overrideExtensions ?
|
||||
normalizeExtensions(overrideExtensions) :
|
||||
normalizeExtensions(conf.extensions, providers);
|
||||
|
||||
return {
|
||||
cwd: projectDir,
|
||||
...normalizeGlobs({
|
||||
extensions,
|
||||
files: overrideFiles ? overrideFiles : conf.files,
|
||||
providers
|
||||
})
|
||||
};
|
||||
};
|
||||
|
||||
resolveGlobsSync = (projectDir, overrideExtensions, overrideFiles) => {
|
||||
if (!configCache.has(projectDir)) {
|
||||
const conf = loadConfigSync({resolveFrom: projectDir});
|
||||
const providers = collectProviders({conf, projectDir});
|
||||
configCache.set(projectDir, {conf, providers});
|
||||
}
|
||||
|
||||
const {conf, providers} = configCache.get(projectDir);
|
||||
return buildGlobs({conf, providers, projectDir, overrideExtensions, overrideFiles});
|
||||
};
|
||||
|
||||
resolveGlobs = async (projectDir, overrideExtensions, overrideFiles) => {
|
||||
if (!configCache.has(projectDir)) {
|
||||
configCache.set(projectDir, loadConfig({resolveFrom: projectDir}).then(conf => { // eslint-disable-line promise/prefer-await-to-then
|
||||
const providers = collectProviders({conf, projectDir});
|
||||
return {conf, providers};
|
||||
}));
|
||||
}
|
||||
|
||||
const {conf, providers} = await configCache.get(projectDir);
|
||||
return buildGlobs({conf, providers, projectDir, overrideExtensions, overrideFiles});
|
||||
};
|
||||
}
|
||||
|
||||
if (supportsWorkers) {
|
||||
const v8 = require('v8');
|
||||
|
||||
const MAX_DATA_LENGTH_EXCLUSIVE = 100 * 1024; // Allocate 100 KiB to exchange globs.
|
||||
|
||||
if (isMainThread) {
|
||||
const {Worker} = require('worker_threads');
|
||||
let data;
|
||||
let sync;
|
||||
let worker;
|
||||
|
||||
resolveGlobsSync = (projectDir, overrideExtensions, overrideFiles) => {
|
||||
if (worker === undefined) {
|
||||
const dataBuffer = new SharedArrayBuffer(MAX_DATA_LENGTH_EXCLUSIVE);
|
||||
data = new Uint8Array(dataBuffer);
|
||||
|
||||
const syncBuffer = new SharedArrayBuffer(4);
|
||||
sync = new Int32Array(syncBuffer);
|
||||
|
||||
worker = new Worker(__filename, {
|
||||
workerData: {
|
||||
dataBuffer,
|
||||
syncBuffer,
|
||||
firstMessage: {projectDir, overrideExtensions, overrideFiles}
|
||||
}
|
||||
});
|
||||
worker.unref();
|
||||
} else {
|
||||
worker.postMessage({projectDir, overrideExtensions, overrideFiles});
|
||||
}
|
||||
|
||||
Atomics.wait(sync, 0, 0);
|
||||
|
||||
const byteLength = Atomics.exchange(sync, 0, 0);
|
||||
if (byteLength === MAX_DATA_LENGTH_EXCLUSIVE) {
|
||||
throw new Error('Globs are over 100 KiB and cannot be resolved');
|
||||
}
|
||||
|
||||
const globsOrError = v8.deserialize(data.slice(0, byteLength));
|
||||
if (globsOrError instanceof Error) {
|
||||
throw globsOrError;
|
||||
}
|
||||
|
||||
return globsOrError;
|
||||
};
|
||||
} else {
|
||||
const {parentPort, workerData} = require('worker_threads');
|
||||
const data = new Uint8Array(workerData.dataBuffer);
|
||||
const sync = new Int32Array(workerData.syncBuffer);
|
||||
|
||||
const handleMessage = async ({projectDir, overrideExtensions, overrideFiles}) => {
|
||||
let encoded;
|
||||
try {
|
||||
const globs = await resolveGlobs(projectDir, overrideExtensions, overrideFiles);
|
||||
encoded = v8.serialize(globs);
|
||||
} catch (error) {
|
||||
encoded = v8.serialize(error);
|
||||
}
|
||||
|
||||
const byteLength = encoded.length < MAX_DATA_LENGTH_EXCLUSIVE ? encoded.copy(data) : MAX_DATA_LENGTH_EXCLUSIVE;
|
||||
Atomics.store(sync, 0, byteLength);
|
||||
Atomics.notify(sync, 0);
|
||||
};
|
||||
|
||||
parentPort.on('message', handleMessage);
|
||||
handleMessage(workerData.firstMessage);
|
||||
delete workerData.firstMessage;
|
||||
}
|
||||
}
|
||||
|
||||
const helperCache = new Map();
|
||||
|
||||
function load(projectDir, overrides) {
|
||||
const cacheKey = `${JSON.stringify(overrides)}\n${projectDir}`;
|
||||
if (helperCache.has(cacheKey)) {
|
||||
return helperCache.get(cacheKey);
|
||||
}
|
||||
|
||||
let helperPatterns = [];
|
||||
if (overrides && overrides.helpers !== undefined) {
|
||||
if (!Array.isArray(overrides.helpers) || overrides.helpers.length === 0) {
|
||||
throw new Error('The ’helpers’ override must be an array containing glob patterns.');
|
||||
}
|
||||
|
||||
helperPatterns = normalizePatterns(overrides.helpers);
|
||||
}
|
||||
|
||||
const globs = resolveGlobsSync(projectDir, overrides && overrides.extensions, overrides && overrides.files);
|
||||
|
||||
const classifyForESLint = file => {
|
||||
const {isTest} = classify(file, globs);
|
||||
let isHelper = false;
|
||||
if (!isTest && hasExtension(globs.extensions, file)) {
|
||||
file = normalizeFileForMatching(projectDir, file);
|
||||
isHelper = isHelperish(file) || (helperPatterns.length > 0 && matches(file, helperPatterns));
|
||||
}
|
||||
|
||||
return {isHelper, isTest};
|
||||
};
|
||||
|
||||
const helper = Object.freeze({
|
||||
classifyFile: classifyForESLint,
|
||||
classifyImport: importPath => {
|
||||
if (hasExtension(globs.extensions, importPath)) {
|
||||
// The importPath has one of the test file extensions: we can classify
|
||||
// it directly.
|
||||
return classifyForESLint(importPath);
|
||||
}
|
||||
|
||||
// Add the first extension. If multiple extensions are available, assume
|
||||
// patterns are not biased to any particular extension.
|
||||
return classifyForESLint(`${importPath}.${globs.extensions[0]}`);
|
||||
}
|
||||
});
|
||||
helperCache.set(cacheKey, helper);
|
||||
return helper;
|
||||
}
|
||||
|
||||
exports.load = load;
|
||||
822
node_modules/ava/index.d.ts
generated
vendored
822
node_modules/ava/index.d.ts
generated
vendored
|
|
@ -1,822 +1,12 @@
|
|||
export interface Subscribable {
|
||||
subscribe(observer: {
|
||||
error(err: any): void;
|
||||
complete(): void;
|
||||
}): void;
|
||||
}
|
||||
import type {TestFn} from './types/test-fn';
|
||||
|
||||
export type Constructor = (new (...args: any[]) => any);
|
||||
|
||||
/** Specify one or more expectations the thrown error must satisfy. */
|
||||
export type ThrowsExpectation = {
|
||||
/** The thrown error must have a code that equals the given string or number. */
|
||||
code?: string | number;
|
||||
|
||||
/** The thrown error must be an instance of this constructor. */
|
||||
instanceOf?: Constructor;
|
||||
|
||||
/** The thrown error must be strictly equal to this value. */
|
||||
is?: Error;
|
||||
|
||||
/** The thrown error must have a message that equals the given string, or matches the regular expression. */
|
||||
message?: string | RegExp;
|
||||
|
||||
/** The thrown error must have a name that equals the given string. */
|
||||
name?: string;
|
||||
};
|
||||
|
||||
export type CommitDiscardOptions = {
|
||||
/**
|
||||
* Whether the logs should be included in those of the parent test.
|
||||
*/
|
||||
retainLogs?: boolean;
|
||||
};
|
||||
|
||||
/** Options that can be passed to the `t.snapshot()` assertion. */
|
||||
export type SnapshotOptions = {
|
||||
/** If provided and not an empty string, used to select the snapshot to compare the `expected` value against. */
|
||||
id?: string;
|
||||
};
|
||||
|
||||
export interface Assertions {
|
||||
/** Assert that `actual` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy). Comes with power-assert. */
|
||||
assert: AssertAssertion;
|
||||
|
||||
/** Assert that `actual` is [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to `expected`. */
|
||||
deepEqual: DeepEqualAssertion;
|
||||
|
||||
/** Assert that `actual` is like `expected`. */
|
||||
like: LikeAssertion;
|
||||
|
||||
/** Fail the test. */
|
||||
fail: FailAssertion;
|
||||
|
||||
/** Assert that `actual` is strictly false. */
|
||||
false: FalseAssertion;
|
||||
|
||||
/** Assert that `actual` is [falsy](https://developer.mozilla.org/en-US/docs/Glossary/Falsy). */
|
||||
falsy: FalsyAssertion;
|
||||
|
||||
/**
|
||||
* Assert that `actual` is [the same
|
||||
* value](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is) as `expected`.
|
||||
*/
|
||||
is: IsAssertion;
|
||||
|
||||
/**
|
||||
* Assert that `actual` is not [the same
|
||||
* value](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is) as `expected`.
|
||||
*/
|
||||
not: NotAssertion;
|
||||
|
||||
/** Assert that `actual` is not [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to `expected`. */
|
||||
notDeepEqual: NotDeepEqualAssertion;
|
||||
|
||||
/** Assert that `string` does not match the regular expression. */
|
||||
notRegex: NotRegexAssertion;
|
||||
|
||||
/** Assert that the function does not throw. */
|
||||
notThrows: NotThrowsAssertion;
|
||||
|
||||
/** Assert that the async function does not throw, or that the promise does not reject. Must be awaited. */
|
||||
notThrowsAsync: NotThrowsAsyncAssertion;
|
||||
|
||||
/** Count a passing assertion. */
|
||||
pass: PassAssertion;
|
||||
|
||||
/** Assert that `string` matches the regular expression. */
|
||||
regex: RegexAssertion;
|
||||
|
||||
/**
|
||||
* Assert that `expected` is [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to a
|
||||
* previously recorded [snapshot](https://github.com/concordancejs/concordance#serialization-details), or if
|
||||
* necessary record a new snapshot.
|
||||
*/
|
||||
snapshot: SnapshotAssertion;
|
||||
|
||||
/**
|
||||
* Assert that the function throws [an error](https://www.npmjs.com/package/is-error). If so, returns the error value.
|
||||
*/
|
||||
throws: ThrowsAssertion;
|
||||
|
||||
/**
|
||||
* Assert that the async function throws [an error](https://www.npmjs.com/package/is-error), or the promise rejects
|
||||
* with one. If so, returns a promise for the error value, which must be awaited.
|
||||
*/
|
||||
throwsAsync: ThrowsAsyncAssertion;
|
||||
|
||||
/** Assert that `actual` is strictly true. */
|
||||
true: TrueAssertion;
|
||||
|
||||
/** Assert that `actual` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy). */
|
||||
truthy: TruthyAssertion;
|
||||
}
|
||||
|
||||
export interface AssertAssertion {
|
||||
/** Assert that `actual` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy). Comes with power-assert. */
|
||||
(actual: any, message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(actual: any, message?: string): void;
|
||||
}
|
||||
|
||||
export interface DeepEqualAssertion {
|
||||
/** Assert that `actual` is [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to `expected`. */
|
||||
<ValueType = any>(actual: ValueType, expected: ValueType, message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(actual: any, expected: any, message?: string): void;
|
||||
}
|
||||
|
||||
export interface LikeAssertion {
|
||||
/** Assert that `value` is like `selector`. */
|
||||
(value: any, selector: Record<string, any>, message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(value: any, selector: any, message?: string): void;
|
||||
}
|
||||
|
||||
export interface FailAssertion {
|
||||
/** Fail the test. */
|
||||
(message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(message?: string): void;
|
||||
}
|
||||
|
||||
export interface FalseAssertion {
|
||||
/** Assert that `actual` is strictly false. */
|
||||
(actual: any, message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(actual: any, message?: string): void;
|
||||
}
|
||||
|
||||
export interface FalsyAssertion {
|
||||
/** Assert that `actual` is [falsy](https://developer.mozilla.org/en-US/docs/Glossary/Falsy). */
|
||||
(actual: any, message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(actual: any, message?: string): void;
|
||||
}
|
||||
|
||||
export interface IsAssertion {
|
||||
/**
|
||||
* Assert that `actual` is [the same
|
||||
* value](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is) as `expected`.
|
||||
*/
|
||||
<ValueType = any>(actual: ValueType, expected: ValueType, message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(actual: any, expected: any, message?: string): void;
|
||||
}
|
||||
|
||||
export interface NotAssertion {
|
||||
/**
|
||||
* Assert that `actual` is not [the same
|
||||
* value](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is) as `expected`.
|
||||
*/
|
||||
<ValueType = any>(actual: ValueType, expected: ValueType, message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(actual: any, expected: any, message?: string): void;
|
||||
}
|
||||
|
||||
export interface NotDeepEqualAssertion {
|
||||
/** Assert that `actual` is not [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to `expected`. */
|
||||
<ValueType = any>(actual: ValueType, expected: ValueType, message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(actual: any, expected: any, message?: string): void;
|
||||
}
|
||||
|
||||
export interface NotRegexAssertion {
|
||||
/** Assert that `string` does not match the regular expression. */
|
||||
(string: string, regex: RegExp, message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(string: string, regex: RegExp, message?: string): void;
|
||||
}
|
||||
|
||||
export interface NotThrowsAssertion {
|
||||
/** Assert that the function does not throw. */
|
||||
(fn: () => any, message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(fn: () => any, message?: string): void;
|
||||
}
|
||||
|
||||
export interface NotThrowsAsyncAssertion {
|
||||
/** Assert that the async function does not throw. You must await the result. */
|
||||
(fn: () => PromiseLike<any>, message?: string): Promise<void>;
|
||||
|
||||
/** Assert that the promise does not reject. You must await the result. */
|
||||
(promise: PromiseLike<any>, message?: string): Promise<void>;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(nonThrower: any, message?: string): void;
|
||||
}
|
||||
|
||||
export interface PassAssertion {
|
||||
/** Count a passing assertion. */
|
||||
(message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(message?: string): void;
|
||||
}
|
||||
|
||||
export interface RegexAssertion {
|
||||
/** Assert that `string` matches the regular expression. */
|
||||
(string: string, regex: RegExp, message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(string: string, regex: RegExp, message?: string): void;
|
||||
}
|
||||
|
||||
export interface SnapshotAssertion {
|
||||
/**
|
||||
* Assert that `expected` is [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to a
|
||||
* previously recorded [snapshot](https://github.com/concordancejs/concordance#serialization-details), or if
|
||||
* necessary record a new snapshot.
|
||||
*/
|
||||
(expected: any, message?: string): void;
|
||||
|
||||
/**
|
||||
* Assert that `expected` is [deeply equal](https://github.com/concordancejs/concordance#comparison-details) to a
|
||||
* previously recorded [snapshot](https://github.com/concordancejs/concordance#serialization-details) (selected
|
||||
* through `options.id` if provided), or if necessary record a new snapshot.
|
||||
*/
|
||||
(expected: any, options: SnapshotOptions, message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(expected: any, message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(expected: any, options: SnapshotOptions, message?: string): void;
|
||||
}
|
||||
|
||||
export interface ThrowsAssertion {
|
||||
/**
|
||||
* Assert that the function throws [an error](https://www.npmjs.com/package/is-error). If so, returns the error value.
|
||||
* The error must satisfy all expectations.
|
||||
*/
|
||||
<ThrownError extends Error>(fn: () => any, expectations?: ThrowsExpectation | null, message?: string): ThrownError;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(fn: () => any, expectations?: any, message?: string): void;
|
||||
}
|
||||
|
||||
export interface ThrowsAsyncAssertion {
|
||||
/**
|
||||
* Assert that the async function throws [an error](https://www.npmjs.com/package/is-error). If so, returns the error
|
||||
* value. You must await the result.
|
||||
*/
|
||||
<ThrownError extends Error>(fn: () => PromiseLike<any>, expectations?: null, message?: string): Promise<ThrownError>;
|
||||
|
||||
/**
|
||||
* Assert that the async function throws [an error](https://www.npmjs.com/package/is-error). If so, returns the error
|
||||
* value. You must await the result. The error must satisfy all expectations.
|
||||
*/
|
||||
<ThrownError extends Error>(fn: () => PromiseLike<any>, expectations: ThrowsExpectation, message?: string): Promise<ThrownError>;
|
||||
|
||||
/**
|
||||
* Assert that the promise rejects with [an error](https://www.npmjs.com/package/is-error). If so, returns the
|
||||
* rejection reason. You must await the result.
|
||||
*/
|
||||
<ThrownError extends Error>(promise: PromiseLike<any>, expectations?: null, message?: string): Promise<ThrownError>;
|
||||
|
||||
/**
|
||||
* Assert that the promise rejects with [an error](https://www.npmjs.com/package/is-error). If so, returns the
|
||||
* rejection reason. You must await the result. The error must satisfy all expectations.
|
||||
*/
|
||||
<ThrownError extends Error>(promise: PromiseLike<any>, expectations: ThrowsExpectation, message?: string): Promise<ThrownError>;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(thrower: any, expectations?: any, message?: string): void;
|
||||
}
|
||||
|
||||
export interface TrueAssertion {
|
||||
/** Assert that `actual` is strictly true. */
|
||||
(actual: any, message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(actual: any, message?: string): void;
|
||||
}
|
||||
|
||||
export interface TruthyAssertion {
|
||||
/** Assert that `actual` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy). */
|
||||
(actual: any, message?: string): void;
|
||||
|
||||
/** Skip this assertion. */
|
||||
skip(actual: any, message?: string): void;
|
||||
}
|
||||
|
||||
/** The `t` value passed to test & hook implementations. */
|
||||
export interface ExecutionContext<Context = unknown> extends Assertions {
|
||||
/** Test context, shared with hooks. */
|
||||
context: Context;
|
||||
|
||||
/** Title of the test or hook. */
|
||||
readonly title: string;
|
||||
|
||||
/** Whether the test has passed. Only accurate in afterEach hooks. */
|
||||
readonly passed: boolean;
|
||||
|
||||
log: LogFn;
|
||||
plan: PlanFn;
|
||||
teardown: TeardownFn;
|
||||
timeout: TimeoutFn;
|
||||
try: TryFn<Context>;
|
||||
}
|
||||
|
||||
export interface LogFn {
|
||||
/** Log one or more values. */
|
||||
(...values: any[]): void;
|
||||
|
||||
/** Skip logging. */
|
||||
skip(...values: any[]): void;
|
||||
}
|
||||
|
||||
export interface PlanFn {
|
||||
/**
|
||||
* Plan how many assertion there are in the test. The test will fail if the actual assertion count doesn't match the
|
||||
* number of planned assertions. See [assertion planning](https://github.com/avajs/ava#assertion-planning).
|
||||
*/
|
||||
(count: number): void;
|
||||
|
||||
/** Don't plan assertions. */
|
||||
skip(count: number): void;
|
||||
}
|
||||
|
||||
export interface TimeoutFn {
|
||||
/**
|
||||
* Set a timeout for the test, in milliseconds. The test will fail if the timeout is exceeded.
|
||||
* The timeout is reset each time an assertion is made.
|
||||
*/
|
||||
(ms: number, message?: string): void;
|
||||
}
|
||||
|
||||
export interface TeardownFn {
|
||||
/** Declare a function to be run after the test has ended. */
|
||||
(fn: () => void): void;
|
||||
}
|
||||
|
||||
export interface TryFn<Context = unknown> {
|
||||
/**
|
||||
* Attempt to run some assertions. The result must be explicitly committed or discarded or else
|
||||
* the test will fail. A macro may be provided. The title may help distinguish attempts from
|
||||
* one another.
|
||||
*/
|
||||
<Args extends any[]>(title: string, fn: EitherMacro<Args, Context>, ...args: Args): Promise<TryResult>;
|
||||
|
||||
/**
|
||||
* Attempt to run some assertions. The result must be explicitly committed or discarded or else
|
||||
* the test will fail. A macro may be provided. The title may help distinguish attempts from
|
||||
* one another.
|
||||
*/
|
||||
<Args extends any[]>(title: string, fn: [EitherMacro<Args, Context>, ...Array<EitherMacro<Args, Context>>], ...args: Args): Promise<TryResult[]>;
|
||||
|
||||
/**
|
||||
* Attempt to run some assertions. The result must be explicitly committed or discarded or else
|
||||
* the test will fail. A macro may be provided.
|
||||
*/
|
||||
<Args extends any[]>(fn: EitherMacro<Args, Context>, ...args: Args): Promise<TryResult>;
|
||||
|
||||
/**
|
||||
* Attempt to run some assertions. The result must be explicitly committed or discarded or else
|
||||
* the test will fail. A macro may be provided.
|
||||
*/
|
||||
<Args extends any[]>(fn: [EitherMacro<Args, Context>, ...Array<EitherMacro<Args, Context>>], ...args: Args): Promise<TryResult[]>;
|
||||
}
|
||||
|
||||
export interface AssertionError extends Error {}
|
||||
|
||||
export interface TryResult {
|
||||
/**
|
||||
* Title of the attempt, helping you tell attempts aparts.
|
||||
*/
|
||||
title: string;
|
||||
|
||||
/**
|
||||
* Indicates whether all assertions passed, or at least one failed.
|
||||
*/
|
||||
passed: boolean;
|
||||
|
||||
/**
|
||||
* Errors raised for each failed assertion.
|
||||
*/
|
||||
errors: AssertionError[];
|
||||
|
||||
/**
|
||||
* Logs created during the attempt using `t.log()`. Contains formatted values.
|
||||
*/
|
||||
logs: string[];
|
||||
|
||||
/**
|
||||
* Commit the attempt. Counts as one assertion for the plan count. If the
|
||||
* attempt failed, calling this will also cause your test to fail.
|
||||
*/
|
||||
commit(options?: CommitDiscardOptions): void;
|
||||
|
||||
/**
|
||||
* Discard the attempt.
|
||||
*/
|
||||
discard(options?: CommitDiscardOptions): void;
|
||||
}
|
||||
|
||||
/** The `t` value passed to implementations for tests & hooks declared with the `.cb` modifier. */
|
||||
export interface CbExecutionContext<Context = unknown> extends ExecutionContext<Context> {
|
||||
/**
|
||||
* End the test. If `error` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy) the test or hook
|
||||
* will fail.
|
||||
*/
|
||||
end(error?: any): void;
|
||||
}
|
||||
|
||||
export type ImplementationResult = PromiseLike<void> | Subscribable | void;
|
||||
export type Implementation<Context = unknown> = (t: ExecutionContext<Context>) => ImplementationResult;
|
||||
export type CbImplementation<Context = unknown> = (t: CbExecutionContext<Context>) => ImplementationResult;
|
||||
|
||||
/** A reusable test or hook implementation. */
|
||||
export type UntitledMacro<Args extends any[], Context = unknown> = (t: ExecutionContext<Context>, ...args: Args) => ImplementationResult;
|
||||
|
||||
/** A reusable test or hook implementation. */
|
||||
export type Macro<Args extends any[], Context = unknown> = UntitledMacro<Args, Context> & {
|
||||
/**
|
||||
* Implement this function to generate a test (or hook) title whenever this macro is used. `providedTitle` contains
|
||||
* the title provided when the test or hook was declared. Also receives the remaining test arguments.
|
||||
*/
|
||||
title?: (providedTitle: string | undefined, ...args: Args) => string;
|
||||
};
|
||||
|
||||
export type EitherMacro<Args extends any[], Context> = Macro<Args, Context> | UntitledMacro<Args, Context>;
|
||||
|
||||
/** Alias for a single macro, or an array of macros. */
|
||||
export type OneOrMoreMacros<Args extends any[], Context> = EitherMacro<Args, Context> | [EitherMacro<Args, Context>, ...Array<EitherMacro<Args, Context>>];
|
||||
|
||||
/** A reusable test or hook implementation, for tests & hooks declared with the `.cb` modifier. */
|
||||
export type UntitledCbMacro<Args extends any[], Context = unknown> = (t: CbExecutionContext<Context>, ...args: Args) => ImplementationResult;
|
||||
|
||||
/** A reusable test or hook implementation, for tests & hooks declared with the `.cb` modifier. */
|
||||
export type CbMacro<Args extends any[], Context = unknown> = UntitledCbMacro<Args, Context> & {
|
||||
title?: (providedTitle: string | undefined, ...args: Args) => string;
|
||||
};
|
||||
|
||||
export type EitherCbMacro<Args extends any[], Context> = CbMacro<Args, Context> | UntitledCbMacro<Args, Context>;
|
||||
|
||||
/** Alias for a single macro, or an array of macros, used for tests & hooks declared with the `.cb` modifier. */
|
||||
export type OneOrMoreCbMacros<Args extends any[], Context> = EitherCbMacro<Args, Context> | [EitherCbMacro<Args, Context>, ...Array<EitherCbMacro<Args, Context>>];
|
||||
|
||||
export interface TestInterface<Context = unknown> {
|
||||
/** Declare a concurrent test. */
|
||||
(title: string, implementation: Implementation<Context>): void;
|
||||
|
||||
/** Declare a concurrent test that uses one or more macros. Additional arguments are passed to the macro. */
|
||||
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/** Declare a concurrent test that uses one or more macros. The macro is responsible for generating a unique test title. */
|
||||
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/** Declare a hook that is run once, after all tests have passed. */
|
||||
after: AfterInterface<Context>;
|
||||
|
||||
/** Declare a hook that is run after each passing test. */
|
||||
afterEach: AfterInterface<Context>;
|
||||
|
||||
/** Declare a hook that is run once, before all tests. */
|
||||
before: BeforeInterface<Context>;
|
||||
|
||||
/** Declare a hook that is run before each test. */
|
||||
beforeEach: BeforeInterface<Context>;
|
||||
|
||||
/** Declare a test that must call `t.end()` when it's done. */
|
||||
cb: CbInterface<Context>;
|
||||
|
||||
/** Declare a test that is expected to fail. */
|
||||
failing: FailingInterface<Context>;
|
||||
|
||||
/** Declare tests and hooks that are run serially. */
|
||||
serial: SerialInterface<Context>;
|
||||
|
||||
only: OnlyInterface<Context>;
|
||||
skip: SkipInterface<Context>;
|
||||
todo: TodoDeclaration;
|
||||
meta: MetaInterface;
|
||||
}
|
||||
|
||||
export interface AfterInterface<Context = unknown> {
|
||||
/** Declare a hook that is run once, after all tests have passed. */
|
||||
(implementation: Implementation<Context>): void;
|
||||
|
||||
/** Declare a hook that is run once, after all tests have passed. */
|
||||
(title: string, implementation: Implementation<Context>): void;
|
||||
|
||||
/** Declare a hook that is run once, after all tests have passed. Additional arguments are passed to the macro. */
|
||||
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/** Declare a hook that is run once, after all tests have passed. */
|
||||
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/** Declare a hook that is run once, after all tests are done. */
|
||||
always: AlwaysInterface<Context>;
|
||||
|
||||
/** Declare a hook that must call `t.end()` when it's done. */
|
||||
cb: HookCbInterface<Context>;
|
||||
|
||||
skip: HookSkipInterface<Context>;
|
||||
}
|
||||
|
||||
export interface AlwaysInterface<Context = unknown> {
|
||||
/** Declare a hook that is run once, after all tests are done. */
|
||||
(implementation: Implementation<Context>): void;
|
||||
|
||||
/** Declare a hook that is run once, after all tests are done. */
|
||||
(title: string, implementation: Implementation<Context>): void;
|
||||
|
||||
/** Declare a hook that is run once, after all tests are done. Additional arguments are passed to the macro. */
|
||||
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/** Declare a hook that is run once, after all tests are done. */
|
||||
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/** Declare a hook that must call `t.end()` when it's done. */
|
||||
cb: HookCbInterface<Context>;
|
||||
|
||||
skip: HookSkipInterface<Context>;
|
||||
}
|
||||
|
||||
export interface BeforeInterface<Context = unknown> {
|
||||
/** Declare a hook that is run once, before all tests. */
|
||||
(implementation: Implementation<Context>): void;
|
||||
|
||||
/** Declare a hook that is run once, before all tests. */
|
||||
(title: string, implementation: Implementation<Context>): void;
|
||||
|
||||
/** Declare a hook that is run once, before all tests. Additional arguments are passed to the macro. */
|
||||
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/** Declare a hook that is run once, before all tests. */
|
||||
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/** Declare a hook that must call `t.end()` when it's done. */
|
||||
cb: HookCbInterface<Context>;
|
||||
|
||||
skip: HookSkipInterface<Context>;
|
||||
}
|
||||
|
||||
export interface CbInterface<Context = unknown> {
|
||||
/** Declare a test that must call `t.end()` when it's done. */
|
||||
(title: string, implementation: CbImplementation<Context>): void;
|
||||
|
||||
/**
|
||||
* Declare a concurrent test that uses one or more macros. The macros must call `t.end()` when they're done.
|
||||
* Additional arguments are passed to the macro.
|
||||
*/
|
||||
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/**
|
||||
* Declare a concurrent test that uses one or more macros. The macros must call `t.end()` when they're done.
|
||||
* The macro is responsible for generating a unique test title.
|
||||
*/
|
||||
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/** Declare a test that is expected to fail. */
|
||||
failing: CbFailingInterface<Context>;
|
||||
|
||||
only: CbOnlyInterface<Context>;
|
||||
skip: CbSkipInterface<Context>;
|
||||
}
|
||||
|
||||
export interface CbFailingInterface<Context = unknown> {
|
||||
/** Declare a test that must call `t.end()` when it's done. The test is expected to fail. */
|
||||
(title: string, implementation: CbImplementation<Context>): void;
|
||||
|
||||
/**
|
||||
* Declare a test that uses one or more macros. The macros must call `t.end()` when they're done.
|
||||
* Additional arguments are passed to the macro. The test is expected to fail.
|
||||
*/
|
||||
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/**
|
||||
* Declare a test that uses one or more macros. The macros must call `t.end()` when they're done.
|
||||
* The test is expected to fail.
|
||||
*/
|
||||
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
only: CbOnlyInterface<Context>;
|
||||
skip: CbSkipInterface<Context>;
|
||||
}
|
||||
|
||||
export interface CbOnlyInterface<Context = unknown> {
|
||||
/**
|
||||
* Declare a test that must call `t.end()` when it's done. Only this test and others declared with `.only()` are run.
|
||||
*/
|
||||
(title: string, implementation: CbImplementation<Context>): void;
|
||||
|
||||
/**
|
||||
* Declare a test that uses one or more macros. The macros must call `t.end()` when they're done.
|
||||
* Additional arguments are passed to the macro. Only this test and others declared with `.only()` are run.
|
||||
*/
|
||||
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/**
|
||||
* Declare a test that uses one or more macros. The macros must call `t.end()` when they're done.
|
||||
* Additional arguments are passed to the macro. Only this test and others declared with `.only()` are run.
|
||||
*/
|
||||
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
|
||||
}
|
||||
|
||||
export interface CbSkipInterface<Context = unknown> {
|
||||
/** Skip this test. */
|
||||
(title: string, implementation: CbImplementation<Context>): void;
|
||||
|
||||
/** Skip this test. */
|
||||
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/** Skip this test. */
|
||||
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
|
||||
}
|
||||
|
||||
export interface FailingInterface<Context = unknown> {
|
||||
/** Declare a concurrent test. The test is expected to fail. */
|
||||
(title: string, implementation: Implementation<Context>): void;
|
||||
|
||||
/**
|
||||
* Declare a concurrent test that uses one or more macros. Additional arguments are passed to the macro.
|
||||
* The test is expected to fail.
|
||||
*/
|
||||
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/**
|
||||
* Declare a concurrent test that uses one or more macros. The macro is responsible for generating a unique test title.
|
||||
* The test is expected to fail.
|
||||
*/
|
||||
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
only: OnlyInterface<Context>;
|
||||
skip: SkipInterface<Context>;
|
||||
}
|
||||
|
||||
export interface HookCbInterface<Context = unknown> {
|
||||
/** Declare a hook that must call `t.end()` when it's done. */
|
||||
(implementation: CbImplementation<Context>): void;
|
||||
|
||||
/** Declare a hook that must call `t.end()` when it's done. */
|
||||
(title: string, implementation: CbImplementation<Context>): void;
|
||||
|
||||
/**
|
||||
* Declare a hook that uses one or more macros. The macros must call `t.end()` when they're done.
|
||||
* Additional arguments are passed to the macro.
|
||||
*/
|
||||
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/**
|
||||
* Declare a hook that uses one or more macros. The macros must call `t.end()` when they're done.
|
||||
*/
|
||||
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
skip: HookCbSkipInterface<Context>;
|
||||
}
|
||||
|
||||
export interface HookCbSkipInterface<Context = unknown> {
|
||||
/** Skip this hook. */
|
||||
(implementation: CbImplementation<Context>): void;
|
||||
|
||||
/** Skip this hook. */
|
||||
(title: string, implementation: CbImplementation<Context>): void;
|
||||
|
||||
/** Skip this hook. */
|
||||
<T extends any[]>(title: string, macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/** Skip this hook. */
|
||||
<T extends any[]>(macros: OneOrMoreCbMacros<T, Context>, ...rest: T): void;
|
||||
}
|
||||
|
||||
export interface HookSkipInterface<Context = unknown> {
|
||||
/** Skip this hook. */
|
||||
(implementation: Implementation<Context>): void;
|
||||
|
||||
/** Skip this hook. */
|
||||
(title: string, implementation: Implementation<Context>): void;
|
||||
|
||||
/** Skip this hook. */
|
||||
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/** Skip this hook. */
|
||||
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
}
|
||||
|
||||
export interface OnlyInterface<Context = unknown> {
|
||||
/** Declare a test. Only this test and others declared with `.only()` are run. */
|
||||
(title: string, implementation: Implementation<Context>): void;
|
||||
|
||||
/**
|
||||
* Declare a test that uses one or more macros. Additional arguments are passed to the macro.
|
||||
* Only this test and others declared with `.only()` are run.
|
||||
*/
|
||||
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/**
|
||||
* Declare a test that uses one or more macros. The macro is responsible for generating a unique test title.
|
||||
* Only this test and others declared with `.only()` are run.
|
||||
*/
|
||||
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
}
|
||||
|
||||
export interface SerialInterface<Context = unknown> {
|
||||
/** Declare a serial test. */
|
||||
(title: string, implementation: Implementation<Context>): void;
|
||||
|
||||
/** Declare a serial test that uses one or more macros. Additional arguments are passed to the macro. */
|
||||
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/**
|
||||
* Declare a serial test that uses one or more macros. The macro is responsible for generating a unique test title.
|
||||
*/
|
||||
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/** Declare a serial hook that is run once, after all tests have passed. */
|
||||
after: AfterInterface<Context>;
|
||||
|
||||
/** Declare a serial hook that is run after each passing test. */
|
||||
afterEach: AfterInterface<Context>;
|
||||
|
||||
/** Declare a serial hook that is run once, before all tests. */
|
||||
before: BeforeInterface<Context>;
|
||||
|
||||
/** Declare a serial hook that is run before each test. */
|
||||
beforeEach: BeforeInterface<Context>;
|
||||
|
||||
/** Declare a serial test that must call `t.end()` when it's done. */
|
||||
cb: CbInterface<Context>;
|
||||
|
||||
/** Declare a serial test that is expected to fail. */
|
||||
failing: FailingInterface<Context>;
|
||||
|
||||
only: OnlyInterface<Context>;
|
||||
skip: SkipInterface<Context>;
|
||||
todo: TodoDeclaration;
|
||||
}
|
||||
|
||||
export interface SkipInterface<Context = unknown> {
|
||||
/** Skip this test. */
|
||||
(title: string, implementation: Implementation<Context>): void;
|
||||
|
||||
/** Skip this test. */
|
||||
<T extends any[]>(title: string, macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
|
||||
/** Skip this test. */
|
||||
<T extends any[]>(macros: OneOrMoreMacros<T, Context>, ...rest: T): void;
|
||||
}
|
||||
|
||||
export interface TodoDeclaration {
|
||||
/** Declare a test that should be implemented later. */
|
||||
(title: string): void;
|
||||
}
|
||||
|
||||
export interface MetaInterface {
|
||||
/** Path to the test file being executed. */
|
||||
file: string;
|
||||
|
||||
/** Directory where snapshots are stored. */
|
||||
snapshotDirectory: string;
|
||||
}
|
||||
export * from './types/assertions';
|
||||
export * from './types/try-fn';
|
||||
export * from './types/test-fn';
|
||||
export * from './types/subscribable';
|
||||
|
||||
/** Call to declare a test, or chain to declare hooks or test modifiers */
|
||||
declare const test: TestInterface;
|
||||
declare const test: TestFn;
|
||||
|
||||
/** Call to declare a test, or chain to declare hooks or test modifiers */
|
||||
export default test;
|
||||
|
||||
/** Call to declare a hook that is run once, after all tests have passed, or chain to declare modifiers. */
|
||||
export const after: AfterInterface;
|
||||
|
||||
/** Call to declare a hook that is run after each passing test, or chain to declare modifiers. */
|
||||
export const afterEach: AfterInterface;
|
||||
|
||||
/** Call to declare a hook that is run once, before all tests, or chain to declare modifiers. */
|
||||
export const before: BeforeInterface;
|
||||
|
||||
/** Call to declare a hook that is run before each test, or chain to declare modifiers. */
|
||||
export const beforeEach: BeforeInterface;
|
||||
|
||||
/** Call to declare a test that must invoke `t.end()` when it's done, or chain to declare modifiers. */
|
||||
export const cb: CbInterface;
|
||||
|
||||
/** Call to declare a test that is expected to fail, or chain to declare modifiers. */
|
||||
export const failing: FailingInterface;
|
||||
|
||||
/** Call to declare a test that is run exclusively, along with other tests declared with `.only()`. */
|
||||
export const only: OnlyInterface;
|
||||
|
||||
/** Call to declare a serial test, or chain to declare serial hooks or test modifiers. */
|
||||
export const serial: SerialInterface;
|
||||
|
||||
/** Skip this test. */
|
||||
export const skip: SkipInterface;
|
||||
|
||||
/** Declare a test that should be implemented later. */
|
||||
export const todo: TodoDeclaration;
|
||||
|
||||
/** Meta data associated with the current process. */
|
||||
export const meta: MetaInterface;
|
||||
|
|
|
|||
8
node_modules/ava/index.js
generated
vendored
8
node_modules/ava/index.js
generated
vendored
|
|
@ -1,8 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
// Ensure the same AVA install is loaded by the test file as by the test worker
|
||||
if (process.env.AVA_PATH && process.env.AVA_PATH !== __dirname) {
|
||||
module.exports = require(process.env.AVA_PATH);
|
||||
} else {
|
||||
module.exports = require('./lib/worker/main');
|
||||
}
|
||||
157
node_modules/ava/lib/api.js
generated
vendored
157
node_modules/ava/lib/api.js
generated
vendored
|
|
@ -1,23 +1,25 @@
|
|||
'use strict';
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const commonPathPrefix = require('common-path-prefix');
|
||||
const resolveCwd = require('resolve-cwd');
|
||||
const debounce = require('lodash/debounce');
|
||||
const arrify = require('arrify');
|
||||
const ms = require('ms');
|
||||
const chunkd = require('chunkd');
|
||||
const Emittery = require('emittery');
|
||||
const pMap = require('p-map');
|
||||
const tempDir = require('temp-dir');
|
||||
const globs = require('./globs');
|
||||
const isCi = require('./is-ci');
|
||||
const RunStatus = require('./run-status');
|
||||
const fork = require('./fork');
|
||||
const serializeError = require('./serialize-error');
|
||||
const {getApplicableLineNumbers} = require('./line-numbers');
|
||||
const sharedWorkers = require('./plugin-support/shared-workers');
|
||||
import fs from 'node:fs';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import process from 'node:process';
|
||||
|
||||
import arrify from 'arrify';
|
||||
import chunkd from 'chunkd';
|
||||
import commonPathPrefix from 'common-path-prefix';
|
||||
import Emittery from 'emittery';
|
||||
import ms from 'ms';
|
||||
import pMap from 'p-map';
|
||||
import resolveCwd from 'resolve-cwd';
|
||||
import tempDir from 'temp-dir';
|
||||
|
||||
import fork from './fork.js';
|
||||
import * as globs from './globs.js';
|
||||
import isCi from './is-ci.js';
|
||||
import {getApplicableLineNumbers} from './line-numbers.js';
|
||||
import {observeWorkerProcess} from './plugin-support/shared-workers.js';
|
||||
import RunStatus from './run-status.js';
|
||||
import scheduler from './scheduler.js';
|
||||
import serializeError from './serialize-error.js';
|
||||
|
||||
function resolveModules(modules) {
|
||||
return arrify(modules).map(name => {
|
||||
|
|
@ -40,7 +42,40 @@ function getFilePathPrefix(files) {
|
|||
return commonPathPrefix(files);
|
||||
}
|
||||
|
||||
class Api extends Emittery {
|
||||
class TimeoutTrigger {
|
||||
constructor(fn, waitMs = 0) {
|
||||
this.fn = fn.bind(null);
|
||||
this.ignoreUntil = 0;
|
||||
this.waitMs = waitMs;
|
||||
this.timer = undefined;
|
||||
}
|
||||
|
||||
debounce() {
|
||||
if (this.timer === undefined) {
|
||||
this.timer = setTimeout(() => this.trigger(), this.waitMs);
|
||||
} else {
|
||||
this.timer.refresh();
|
||||
}
|
||||
}
|
||||
|
||||
discard() {
|
||||
// N.B. this.timer is not cleared so if debounce() is called after it will
|
||||
// not run again.
|
||||
clearTimeout(this.timer);
|
||||
}
|
||||
|
||||
ignoreFor(periodMs) {
|
||||
this.ignoreUntil = Math.max(this.ignoreUntil, Date.now() + periodMs);
|
||||
}
|
||||
|
||||
trigger() {
|
||||
if (Date.now() >= this.ignoreUntil) {
|
||||
this.fn();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default class Api extends Emittery {
|
||||
constructor(options) {
|
||||
super();
|
||||
|
||||
|
|
@ -55,7 +90,7 @@ class Api extends Emittery {
|
|||
}
|
||||
}
|
||||
|
||||
async run({files: selectedFiles = [], filter = [], runtimeOptions = {}} = {}) {
|
||||
async run({files: selectedFiles = [], filter = [], runtimeOptions = {}} = {}) { // eslint-disable-line complexity
|
||||
let setupOrGlobError;
|
||||
|
||||
const apiOptions = this.options;
|
||||
|
|
@ -70,11 +105,11 @@ class Api extends Emittery {
|
|||
let bailed = false;
|
||||
const pendingWorkers = new Set();
|
||||
const timedOutWorkerFiles = new Set();
|
||||
let restartTimer;
|
||||
let timeoutTrigger;
|
||||
if (apiOptions.timeout && !apiOptions.debug) {
|
||||
const timeout = ms(apiOptions.timeout);
|
||||
|
||||
restartTimer = debounce(() => {
|
||||
timeoutTrigger = new TimeoutTrigger(() => {
|
||||
// If failFast is active, prevent new test files from running after
|
||||
// the current ones are exited.
|
||||
if (failFast) {
|
||||
|
|
@ -89,7 +124,7 @@ class Api extends Emittery {
|
|||
}
|
||||
}, timeout);
|
||||
} else {
|
||||
restartTimer = Object.assign(() => {}, {cancel() {}});
|
||||
timeoutTrigger = new TimeoutTrigger(() => {});
|
||||
}
|
||||
|
||||
this._interruptHandler = () => {
|
||||
|
|
@ -102,7 +137,7 @@ class Api extends Emittery {
|
|||
bailed = true;
|
||||
|
||||
// Make sure we don't run the timeout handler
|
||||
restartTimer.cancel();
|
||||
timeoutTrigger.discard();
|
||||
|
||||
runStatus.emitStateChange({type: 'interrupt'});
|
||||
|
||||
|
|
@ -111,6 +146,8 @@ class Api extends Emittery {
|
|||
}
|
||||
};
|
||||
|
||||
const {providers = []} = this.options;
|
||||
|
||||
let testFiles;
|
||||
try {
|
||||
testFiles = await globs.findTests({cwd: this.options.projectDir, ...apiOptions.globs});
|
||||
|
|
@ -118,7 +155,8 @@ class Api extends Emittery {
|
|||
selectedFiles = filter.length === 0 ? testFiles : globs.applyTestFileFilter({
|
||||
cwd: this.options.projectDir,
|
||||
filter: filter.map(({pattern}) => pattern),
|
||||
testFiles
|
||||
providers,
|
||||
testFiles,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
|
|
@ -126,6 +164,13 @@ class Api extends Emittery {
|
|||
setupOrGlobError = error;
|
||||
}
|
||||
|
||||
const selectionInsights = {
|
||||
filter,
|
||||
ignoredFilterPatternFiles: selectedFiles.ignoredFilterPatternFiles || [],
|
||||
testFileCount: testFiles.length,
|
||||
selectionCount: selectedFiles.length,
|
||||
};
|
||||
|
||||
try {
|
||||
if (this.options.parallelRuns) {
|
||||
const {currentIndex, totalRuns} = this.options.parallelRuns;
|
||||
|
|
@ -137,11 +182,13 @@ class Api extends Emittery {
|
|||
|
||||
const currentFileCount = selectedFiles.length;
|
||||
|
||||
runStatus = new RunStatus(fileCount, {currentFileCount, currentIndex, totalRuns});
|
||||
runStatus = new RunStatus(fileCount, {currentFileCount, currentIndex, totalRuns}, selectionInsights);
|
||||
} else {
|
||||
runStatus = new RunStatus(selectedFiles.length, null);
|
||||
runStatus = new RunStatus(selectedFiles.length, null, selectionInsights);
|
||||
}
|
||||
|
||||
selectedFiles = scheduler.failingTestsFirst(selectedFiles, this._getLocalCacheDir(), this.options.cacheEnabled);
|
||||
|
||||
const debugWithoutSpecificFile = Boolean(this.options.debug) && !this.options.debug.active && selectedFiles.length !== 1;
|
||||
|
||||
await this.emit('run', {
|
||||
|
|
@ -155,7 +202,7 @@ class Api extends Emittery {
|
|||
previousFailures: runtimeOptions.previousFailures || 0,
|
||||
runOnlyExclusive: runtimeOptions.runOnlyExclusive === true,
|
||||
runVector: runtimeOptions.runVector || 0,
|
||||
status: runStatus
|
||||
status: runStatus,
|
||||
});
|
||||
|
||||
if (setupOrGlobError) {
|
||||
|
|
@ -169,9 +216,9 @@ class Api extends Emittery {
|
|||
|
||||
runStatus.on('stateChange', record => {
|
||||
if (record.testFile && !timedOutWorkerFiles.has(record.testFile)) {
|
||||
// Restart the timer whenever there is activity from workers that
|
||||
// Debounce the timer whenever there is activity from workers that
|
||||
// haven't already timed out.
|
||||
restartTimer();
|
||||
timeoutTrigger.debounce();
|
||||
}
|
||||
|
||||
if (failFast && (record.type === 'hook-failed' || record.type === 'test-failed' || record.type === 'worker-failed')) {
|
||||
|
|
@ -185,14 +232,16 @@ class Api extends Emittery {
|
|||
}
|
||||
});
|
||||
|
||||
const {providers = []} = this.options;
|
||||
const providerStates = (await Promise.all(providers.map(async ({type, main}) => {
|
||||
const providerStates = [];
|
||||
await Promise.all(providers.map(async ({type, main}) => {
|
||||
const state = await main.compile({cacheDir: this._createCacheDir(), files: testFiles});
|
||||
return state === null ? null : {type, state};
|
||||
}))).filter(state => state !== null);
|
||||
if (state !== null) {
|
||||
providerStates.push({type, state});
|
||||
}
|
||||
}));
|
||||
|
||||
// Resolve the correct concurrency value.
|
||||
let concurrency = Math.min(os.cpus().length, isCi ? 2 : Infinity);
|
||||
let concurrency = Math.min(os.cpus().length, isCi ? 2 : Number.POSITIVE_INFINITY);
|
||||
if (apiOptions.concurrency > 0) {
|
||||
concurrency = apiOptions.concurrency;
|
||||
}
|
||||
|
|
@ -212,13 +261,15 @@ class Api extends Emittery {
|
|||
}
|
||||
|
||||
const lineNumbers = getApplicableLineNumbers(globs.normalizeFileForMatching(apiOptions.projectDir, file), filter);
|
||||
// Removing `providers` field because they cannot be transfered to the worker threads.
|
||||
const {providers, ...forkOptions} = apiOptions;
|
||||
const options = {
|
||||
...apiOptions,
|
||||
...forkOptions,
|
||||
providerStates,
|
||||
lineNumbers,
|
||||
recordNewSnapshots: !isCi,
|
||||
// If we're looking for matches, run every single test process in exclusive-only mode
|
||||
runOnlyExclusive: apiOptions.match.length > 0 || runtimeOptions.runOnlyExclusive === true
|
||||
runOnlyExclusive: apiOptions.match.length > 0 || runtimeOptions.runOnlyExclusive === true,
|
||||
};
|
||||
|
||||
if (runtimeOptions.updateSnapshots) {
|
||||
|
|
@ -227,42 +278,52 @@ class Api extends Emittery {
|
|||
}
|
||||
|
||||
const worker = fork(file, options, apiOptions.nodeArguments);
|
||||
worker.onStateChange(data => {
|
||||
if (data.type === 'test-timeout-configured' && !apiOptions.debug) {
|
||||
timeoutTrigger.ignoreFor(data.period);
|
||||
}
|
||||
});
|
||||
runStatus.observeWorker(worker, file, {selectingLines: lineNumbers.length > 0});
|
||||
deregisteredSharedWorkers.push(sharedWorkers.observeWorkerProcess(worker, runStatus));
|
||||
deregisteredSharedWorkers.push(observeWorkerProcess(worker, runStatus));
|
||||
|
||||
pendingWorkers.add(worker);
|
||||
worker.promise.then(() => {
|
||||
pendingWorkers.delete(worker);
|
||||
});
|
||||
restartTimer();
|
||||
timeoutTrigger.debounce();
|
||||
|
||||
await worker.promise;
|
||||
}, {concurrency, stopOnError: false});
|
||||
|
||||
// Allow shared workers to clean up before the run ends.
|
||||
await Promise.all(deregisteredSharedWorkers);
|
||||
scheduler.storeFailedTestFiles(runStatus, this.options.cacheEnabled === false ? null : this._createCacheDir());
|
||||
} catch (error) {
|
||||
if (error && error.name === 'AggregateError') {
|
||||
for (const err of error) {
|
||||
runStatus.emitStateChange({type: 'internal-error', err: serializeError('Internal error', false, err)});
|
||||
for (const error_ of error.errors) {
|
||||
runStatus.emitStateChange({type: 'internal-error', err: serializeError('Internal error', false, error_)});
|
||||
}
|
||||
} else {
|
||||
runStatus.emitStateChange({type: 'internal-error', err: serializeError('Internal error', false, error)});
|
||||
}
|
||||
}
|
||||
|
||||
restartTimer.cancel();
|
||||
timeoutTrigger.discard();
|
||||
return runStatus;
|
||||
}
|
||||
|
||||
_getLocalCacheDir() {
|
||||
return path.join(this.options.projectDir, 'node_modules', '.cache', 'ava');
|
||||
}
|
||||
|
||||
_createCacheDir() {
|
||||
if (this._cacheDir) {
|
||||
return this._cacheDir;
|
||||
}
|
||||
|
||||
const cacheDir = this.options.cacheEnabled === false ?
|
||||
fs.mkdtempSync(`${tempDir}${path.sep}`) :
|
||||
path.join(this.options.projectDir, 'node_modules', '.cache', 'ava');
|
||||
const cacheDir = this.options.cacheEnabled === false
|
||||
? fs.mkdtempSync(`${tempDir}${path.sep}`)
|
||||
: this._getLocalCacheDir();
|
||||
|
||||
// Ensure cacheDir exists
|
||||
fs.mkdirSync(cacheDir, {recursive: true});
|
||||
|
|
@ -272,5 +333,3 @@ class Api extends Emittery {
|
|||
return cacheDir;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Api;
|
||||
|
|
|
|||
527
node_modules/ava/lib/assert.js
generated
vendored
527
node_modules/ava/lib/assert.js
generated
vendored
File diff suppressed because it is too large
Load diff
23
node_modules/ava/lib/chalk.js
generated
vendored
23
node_modules/ava/lib/chalk.js
generated
vendored
|
|
@ -1,20 +1,15 @@
|
|||
'use strict';
|
||||
const chalk = require('chalk');
|
||||
import {Chalk} from 'chalk'; // eslint-disable-line unicorn/import-style
|
||||
|
||||
let ctx = null;
|
||||
exports.get = () => {
|
||||
if (!ctx) {
|
||||
throw new Error('Chalk has not yet been configured');
|
||||
}
|
||||
let chalk = new Chalk(); // eslint-disable-line import/no-mutable-exports
|
||||
|
||||
return ctx;
|
||||
};
|
||||
export {chalk};
|
||||
|
||||
exports.set = options => {
|
||||
if (ctx) {
|
||||
let configured = false;
|
||||
export function set(options) {
|
||||
if (configured) {
|
||||
throw new Error('Chalk has already been configured');
|
||||
}
|
||||
|
||||
ctx = new chalk.Instance(options);
|
||||
return ctx;
|
||||
};
|
||||
configured = true;
|
||||
chalk = new Chalk(options);
|
||||
}
|
||||
|
|
|
|||
230
node_modules/ava/lib/cli.js
generated
vendored
230
node_modules/ava/lib/cli.js
generated
vendored
|
|
@ -1,89 +1,112 @@
|
|||
'use strict';
|
||||
const path = require('path');
|
||||
const del = require('del');
|
||||
const updateNotifier = require('update-notifier');
|
||||
const figures = require('figures');
|
||||
const arrify = require('arrify');
|
||||
const yargs = require('yargs');
|
||||
const readPkg = require('read-pkg');
|
||||
const isCi = require('./is-ci');
|
||||
const {loadConfig} = require('./load-config');
|
||||
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import process from 'node:process';
|
||||
|
||||
import arrify from 'arrify';
|
||||
import ciParallelVars from 'ci-parallel-vars';
|
||||
import del from 'del';
|
||||
import figures from 'figures';
|
||||
import yargs from 'yargs';
|
||||
import {hideBin} from 'yargs/helpers'; // eslint-disable-line node/file-extension-in-import
|
||||
|
||||
import Api from './api.js';
|
||||
import {chalk} from './chalk.js';
|
||||
import validateEnvironmentVariables from './environment-variables.js';
|
||||
import normalizeExtensions from './extensions.js';
|
||||
import {normalizeGlobs, normalizePattern} from './globs.js';
|
||||
import {controlFlow} from './ipc-flow-control.cjs';
|
||||
import isCi from './is-ci.js';
|
||||
import {splitPatternAndLineNumbers} from './line-numbers.js';
|
||||
import {loadConfig} from './load-config.js';
|
||||
import normalizeModuleTypes from './module-types.js';
|
||||
import normalizeNodeArguments from './node-arguments.js';
|
||||
import providerManager from './provider-manager.js';
|
||||
import DefaultReporter from './reporters/default.js';
|
||||
import TapReporter from './reporters/tap.js';
|
||||
import Watcher from './watcher.js';
|
||||
|
||||
function exit(message) {
|
||||
console.error(`\n ${require('./chalk').get().red(figures.cross)} ${message}`);
|
||||
console.error(`\n ${chalk.red(figures.cross)} ${message}`);
|
||||
process.exit(1); // eslint-disable-line unicorn/no-process-exit
|
||||
}
|
||||
|
||||
const coerceLastValue = value => {
|
||||
return Array.isArray(value) ? value.pop() : value;
|
||||
};
|
||||
const coerceLastValue = value => Array.isArray(value) ? value.pop() : value;
|
||||
|
||||
const FLAGS = {
|
||||
concurrency: {
|
||||
alias: 'c',
|
||||
coerce: coerceLastValue,
|
||||
description: 'Max number of test files running at the same time (default: CPU cores)',
|
||||
type: 'number'
|
||||
type: 'number',
|
||||
},
|
||||
'fail-fast': {
|
||||
coerce: coerceLastValue,
|
||||
description: 'Stop after first test failure',
|
||||
type: 'boolean'
|
||||
type: 'boolean',
|
||||
},
|
||||
match: {
|
||||
alias: 'm',
|
||||
description: 'Only run tests with matching title (can be repeated)',
|
||||
type: 'string'
|
||||
type: 'string',
|
||||
},
|
||||
'no-worker-threads': {
|
||||
coerce: coerceLastValue,
|
||||
description: 'Don\'t use worker threads',
|
||||
type: 'boolean',
|
||||
},
|
||||
'node-arguments': {
|
||||
coerce: coerceLastValue,
|
||||
description: 'Additional Node.js arguments for launching worker processes (specify as a single string)',
|
||||
type: 'string'
|
||||
type: 'string',
|
||||
},
|
||||
serial: {
|
||||
alias: 's',
|
||||
coerce: coerceLastValue,
|
||||
description: 'Run tests serially',
|
||||
type: 'boolean'
|
||||
type: 'boolean',
|
||||
},
|
||||
tap: {
|
||||
alias: 't',
|
||||
coerce: coerceLastValue,
|
||||
description: 'Generate TAP output',
|
||||
type: 'boolean'
|
||||
type: 'boolean',
|
||||
},
|
||||
timeout: {
|
||||
alias: 'T',
|
||||
coerce: coerceLastValue,
|
||||
description: 'Set global timeout (milliseconds or human-readable, e.g. 10s, 2m)',
|
||||
type: 'string'
|
||||
type: 'string',
|
||||
},
|
||||
'update-snapshots': {
|
||||
alias: 'u',
|
||||
coerce: coerceLastValue,
|
||||
description: 'Update snapshots',
|
||||
type: 'boolean'
|
||||
type: 'boolean',
|
||||
},
|
||||
verbose: {
|
||||
alias: 'v',
|
||||
coerce: coerceLastValue,
|
||||
description: 'Enable verbose output',
|
||||
type: 'boolean'
|
||||
description: 'Enable verbose output (default)',
|
||||
type: 'boolean',
|
||||
},
|
||||
watch: {
|
||||
alias: 'w',
|
||||
coerce: coerceLastValue,
|
||||
description: 'Re-run tests when files change',
|
||||
type: 'boolean'
|
||||
}
|
||||
type: 'boolean',
|
||||
},
|
||||
};
|
||||
|
||||
exports.run = async () => { // eslint-disable-line complexity
|
||||
let conf = {};
|
||||
let confError = null;
|
||||
export default async function loadCli() { // eslint-disable-line complexity
|
||||
let conf;
|
||||
let confError;
|
||||
try {
|
||||
const {argv: {config: configFile}} = yargs.help(false);
|
||||
const {argv: {config: configFile}} = yargs(hideBin(process.argv)).help(false);
|
||||
conf = await loadConfig({configFile});
|
||||
if (conf.configFile && path.basename(conf.configFile) !== path.relative(conf.projectDir, conf.configFile)) {
|
||||
console.log(chalk.magenta(` ${figures.warning} Using configuration from ${conf.configFile}`));
|
||||
}
|
||||
} catch (error) {
|
||||
confError = error;
|
||||
}
|
||||
|
|
@ -91,18 +114,24 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
// Enter debug mode if the main process is being inspected. This assumes the
|
||||
// worker processes are automatically inspected, too. It is not necessary to
|
||||
// run AVA with the debug command, though it's allowed.
|
||||
const activeInspector = require('inspector').url() !== undefined; // eslint-disable-line node/no-unsupported-features/node-builtins
|
||||
let debug = activeInspector ?
|
||||
{
|
||||
let activeInspector = false;
|
||||
try {
|
||||
const {default: inspector} = await import('node:inspector'); // eslint-disable-line node/no-unsupported-features/es-syntax
|
||||
|
||||
activeInspector = inspector.url() !== undefined;
|
||||
} catch {}
|
||||
|
||||
let debug = activeInspector
|
||||
? {
|
||||
active: true,
|
||||
break: false,
|
||||
files: [],
|
||||
host: undefined,
|
||||
port: undefined
|
||||
port: undefined,
|
||||
} : null;
|
||||
|
||||
let resetCache = false;
|
||||
const {argv} = yargs
|
||||
const {argv} = yargs(hideBin(process.argv))
|
||||
.parserConfiguration({
|
||||
'boolean-negation': true,
|
||||
'camel-case-expansion': false,
|
||||
|
|
@ -116,7 +145,7 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
'set-placeholder-key': false,
|
||||
'short-option-groups': true,
|
||||
'strip-aliased': true,
|
||||
'unknown-options-as-args': false
|
||||
'unknown-options-as-args': false,
|
||||
})
|
||||
.usage('$0 [<pattern>...]')
|
||||
.usage('$0 debug [<pattern>...]')
|
||||
|
|
@ -124,16 +153,16 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
.options({
|
||||
color: {
|
||||
description: 'Force color output',
|
||||
type: 'boolean'
|
||||
type: 'boolean',
|
||||
},
|
||||
config: {
|
||||
description: 'Specific JavaScript file for AVA to read its config from, instead of using package.json or ava.config.* files'
|
||||
}
|
||||
description: 'Specific JavaScript file for AVA to read its config from, instead of using package.json or ava.config.* files',
|
||||
},
|
||||
})
|
||||
.command('* [<pattern>...]', 'Run tests', yargs => yargs.options(FLAGS).positional('pattern', {
|
||||
array: true,
|
||||
describe: 'Glob patterns to select what test files to run. Leave empty if you want AVA to run all test files instead. Add a colon and specify line numbers of specific tests to run',
|
||||
type: 'string'
|
||||
describe: 'Select which test files to run. Leave empty if you want AVA to run all test files as per your configuration. Accepts glob patterns, directories that (recursively) contain test files, and file paths. Add a colon and specify line numbers of specific tests to run',
|
||||
type: 'string',
|
||||
}), argv => {
|
||||
if (activeInspector) {
|
||||
debug.files = argv.pattern || [];
|
||||
|
|
@ -145,22 +174,22 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
yargs => yargs.options(FLAGS).options({
|
||||
break: {
|
||||
description: 'Break before the test file is loaded',
|
||||
type: 'boolean'
|
||||
type: 'boolean',
|
||||
},
|
||||
host: {
|
||||
default: '127.0.0.1',
|
||||
description: 'Address or hostname through which you can connect to the inspector',
|
||||
type: 'string'
|
||||
type: 'string',
|
||||
},
|
||||
port: {
|
||||
default: 9229,
|
||||
description: 'Port on which you can connect to the inspector',
|
||||
type: 'number'
|
||||
}
|
||||
type: 'number',
|
||||
},
|
||||
}).positional('pattern', {
|
||||
demand: true,
|
||||
describe: 'Glob patterns to select a single test file to debug. Add a colon and specify line numbers of specific tests to run',
|
||||
type: 'string'
|
||||
type: 'string',
|
||||
}),
|
||||
argv => {
|
||||
debug = {
|
||||
|
|
@ -168,12 +197,12 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
break: argv.break === true,
|
||||
files: argv.pattern,
|
||||
host: argv.host,
|
||||
port: argv.port
|
||||
port: argv.port,
|
||||
};
|
||||
})
|
||||
.command(
|
||||
'reset-cache',
|
||||
'Reset AVA’s compilation cache and exit',
|
||||
'Delete any temporary files and state kept by AVA, then exit',
|
||||
yargs => yargs,
|
||||
() => {
|
||||
resetCache = true;
|
||||
|
|
@ -184,8 +213,14 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
.help();
|
||||
|
||||
const combined = {...conf};
|
||||
|
||||
for (const flag of Object.keys(FLAGS)) {
|
||||
if (Reflect.has(argv, flag)) {
|
||||
if (flag === 'no-worker-threads' && Reflect.has(argv, 'worker-threads')) {
|
||||
combined.workerThreads = argv['worker-threads'];
|
||||
continue;
|
||||
}
|
||||
|
||||
if (argv[flag] !== undefined) {
|
||||
if (flag === 'fail-fast') {
|
||||
combined.failFast = argv[flag];
|
||||
} else if (flag === 'update-snapshots') {
|
||||
|
|
@ -196,13 +231,15 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
}
|
||||
}
|
||||
|
||||
const chalkOptions = {level: combined.color === false ? 0 : require('chalk').level};
|
||||
const chalk = require('./chalk').set(chalkOptions);
|
||||
|
||||
if (combined.updateSnapshots && combined.match) {
|
||||
exit('Snapshots cannot be updated when matching specific tests.');
|
||||
const chalkOptions = {level: 0};
|
||||
if (combined.color !== false) {
|
||||
const {supportsColor: {level}} = await import('chalk'); // eslint-disable-line node/no-unsupported-features/es-syntax, unicorn/import-style
|
||||
chalkOptions.level = level;
|
||||
}
|
||||
|
||||
const {set: setChalk} = await import('./chalk.js'); // eslint-disable-line node/no-unsupported-features/es-syntax
|
||||
setChalk(chalkOptions);
|
||||
|
||||
if (confError) {
|
||||
if (confError.parent) {
|
||||
exit(`${confError.message}\n\n${chalk.gray((confError.parent && confError.parent.stack) || confError.parent)}`);
|
||||
|
|
@ -211,23 +248,23 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
}
|
||||
}
|
||||
|
||||
updateNotifier({pkg: require('../package.json')}).notify();
|
||||
|
||||
const {nonSemVerExperiments: experiments, projectDir} = conf;
|
||||
if (resetCache) {
|
||||
const cacheDir = path.join(projectDir, 'node_modules', '.cache', 'ava');
|
||||
|
||||
try {
|
||||
await del('*', {
|
||||
cwd: cacheDir,
|
||||
nodir: true
|
||||
});
|
||||
console.error(`\n${chalk.green(figures.tick)} Removed AVA cache files in ${cacheDir}`);
|
||||
const deletedFilePaths = await del('*', {cwd: cacheDir});
|
||||
|
||||
if (deletedFilePaths.length === 0) {
|
||||
console.log(`\n${chalk.green(figures.tick)} No cache files to remove`);
|
||||
} else {
|
||||
console.log(`\n${chalk.green(figures.tick)} Removed AVA cache files in ${cacheDir}`);
|
||||
}
|
||||
|
||||
process.exit(0); // eslint-disable-line unicorn/no-process-exit
|
||||
} catch (error) {
|
||||
exit(`Error removing AVA cache files in ${cacheDir}\n\n${chalk.gray((error && error.stack) || error)}`);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (argv.watch) {
|
||||
|
|
@ -266,6 +303,10 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
console.log(chalk.magenta(` ${figures.warning} Experiments are enabled. These are unsupported and may change or be removed at any time.`));
|
||||
}
|
||||
|
||||
if (Reflect.has(conf, 'babel')) {
|
||||
exit('Built-in Babel support has been removed.');
|
||||
}
|
||||
|
||||
if (Reflect.has(conf, 'compileEnhancements')) {
|
||||
exit('Enhancement compilation must be configured in AVA’s Babel options.');
|
||||
}
|
||||
|
|
@ -278,22 +319,9 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
exit('’sources’ has been removed. Use ’ignoredByWatcher’ to provide glob patterns of files that the watcher should ignore.');
|
||||
}
|
||||
|
||||
const ciParallelVars = require('ci-parallel-vars');
|
||||
const Api = require('./api');
|
||||
const DefaultReporter = require('./reporters/default');
|
||||
const TapReporter = require('./reporters/tap');
|
||||
const Watcher = require('./watcher');
|
||||
const normalizeExtensions = require('./extensions');
|
||||
const normalizeModuleTypes = require('./module-types');
|
||||
const {normalizeGlobs, normalizePattern} = require('./globs');
|
||||
const normalizeNodeArguments = require('./node-arguments');
|
||||
const validateEnvironmentVariables = require('./environment-variables');
|
||||
const {splitPatternAndLineNumbers} = require('./line-numbers');
|
||||
const providerManager = require('./provider-manager');
|
||||
|
||||
let pkg;
|
||||
try {
|
||||
pkg = readPkg.sync({cwd: projectDir});
|
||||
pkg = JSON.parse(fs.readFileSync(path.resolve(projectDir, 'package.json')));
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error;
|
||||
|
|
@ -303,26 +331,13 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
const {type: defaultModuleType = 'commonjs'} = pkg || {};
|
||||
|
||||
const providers = [];
|
||||
if (Reflect.has(conf, 'babel')) {
|
||||
try {
|
||||
const {level, main} = providerManager.babel(projectDir);
|
||||
providers.push({
|
||||
level,
|
||||
main: main({config: conf.babel}),
|
||||
type: 'babel'
|
||||
});
|
||||
} catch (error) {
|
||||
exit(error.message);
|
||||
}
|
||||
}
|
||||
|
||||
if (Reflect.has(conf, 'typescript')) {
|
||||
try {
|
||||
const {level, main} = providerManager.typescript(projectDir);
|
||||
const {level, main} = await providerManager.typescript(projectDir);
|
||||
providers.push({
|
||||
level,
|
||||
main: main({config: conf.typescript}),
|
||||
type: 'typescript'
|
||||
type: 'typescript',
|
||||
});
|
||||
} catch (error) {
|
||||
exit(error.message);
|
||||
|
|
@ -377,16 +392,14 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
.map(pattern => splitPatternAndLineNumbers(pattern))
|
||||
.map(({pattern, ...rest}) => ({
|
||||
pattern: normalizePattern(path.relative(projectDir, path.resolve(process.cwd(), pattern))),
|
||||
...rest
|
||||
...rest,
|
||||
}));
|
||||
if (combined.updateSnapshots && filter.some(condition => condition.lineNumbers !== null)) {
|
||||
exit('Snapshots cannot be updated when selecting specific tests by their line number.');
|
||||
}
|
||||
|
||||
const api = new Api({
|
||||
cacheEnabled: combined.cache !== false,
|
||||
chalkOptions,
|
||||
concurrency: combined.concurrency || 0,
|
||||
workerThreads: combined.workerThreads !== false,
|
||||
debug,
|
||||
environmentVariables,
|
||||
experiments,
|
||||
|
|
@ -406,38 +419,31 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
snapshotDir: combined.snapshotDir ? path.resolve(projectDir, combined.snapshotDir) : null,
|
||||
timeout: combined.timeout || '10s',
|
||||
updateSnapshots: combined.updateSnapshots,
|
||||
workerArgv: argv['--']
|
||||
workerArgv: argv['--'],
|
||||
});
|
||||
|
||||
const reporter = combined.tap && !combined.watch && debug === null ? new TapReporter({
|
||||
extensions: globs.extensions,
|
||||
projectDir,
|
||||
reportStream: process.stdout,
|
||||
stdStream: process.stderr
|
||||
stdStream: process.stderr,
|
||||
}) : new DefaultReporter({
|
||||
extensions: globs.extensions,
|
||||
projectDir,
|
||||
reportStream: process.stdout,
|
||||
stdStream: process.stderr,
|
||||
watching: combined.watch,
|
||||
verbose: debug !== null || combined.verbose || isCi || !process.stdout.isTTY
|
||||
});
|
||||
|
||||
api.on('run', plan => {
|
||||
reporter.startRun(plan);
|
||||
|
||||
if (process.env.AVA_EMIT_RUN_STATUS_OVER_IPC === 'I\'ll find a payphone baby / Take some time to talk to you') {
|
||||
const {controlFlow} = require('./ipc-flow-control');
|
||||
const bufferedSend = controlFlow(process);
|
||||
|
||||
if (process.versions.node >= '12.16.0') {
|
||||
plan.status.on('stateChange', evt => {
|
||||
bufferedSend(evt);
|
||||
});
|
||||
} else {
|
||||
const v8 = require('v8');
|
||||
plan.status.on('stateChange', evt => {
|
||||
bufferedSend([...v8.serialize(evt)]);
|
||||
});
|
||||
}
|
||||
plan.status.on('stateChange', evt => {
|
||||
bufferedSend(evt);
|
||||
});
|
||||
}
|
||||
|
||||
plan.status.on('stateChange', evt => {
|
||||
|
|
@ -455,7 +461,7 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
globs,
|
||||
projectDir,
|
||||
providers,
|
||||
reporter
|
||||
reporter,
|
||||
});
|
||||
watcher.observeStdin(process.stdin);
|
||||
} else {
|
||||
|
|
@ -476,4 +482,4 @@ exports.run = async () => { // eslint-disable-line complexity
|
|||
process.exitCode = runStatus.suggestExitCode({matching: match.length > 0});
|
||||
reporter.endRun();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
|||
29
node_modules/ava/lib/code-excerpt.js
generated
vendored
29
node_modules/ava/lib/code-excerpt.js
generated
vendored
|
|
@ -1,14 +1,14 @@
|
|||
'use strict';
|
||||
const fs = require('fs');
|
||||
const equalLength = require('equal-length');
|
||||
const codeExcerpt = require('code-excerpt');
|
||||
const truncate = require('cli-truncate');
|
||||
const chalk = require('./chalk').get();
|
||||
import fs from 'node:fs';
|
||||
|
||||
import truncate from 'cli-truncate';
|
||||
import codeExcerpt from 'code-excerpt';
|
||||
|
||||
import {chalk} from './chalk.js';
|
||||
|
||||
const formatLineNumber = (lineNumber, maxLineNumber) =>
|
||||
' '.repeat(Math.max(0, String(maxLineNumber).length - String(lineNumber).length)) + lineNumber;
|
||||
|
||||
module.exports = (source, options = {}) => {
|
||||
export default function exceptCode(source, options = {}) {
|
||||
if (!source.isWithinProject || source.isDependency) {
|
||||
return null;
|
||||
}
|
||||
|
|
@ -18,7 +18,7 @@ module.exports = (source, options = {}) => {
|
|||
|
||||
let contents;
|
||||
try {
|
||||
contents = fs.readFileSync(file, 'utf8');
|
||||
contents = fs.readFileSync(new URL(file), 'utf8');
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
|
|
@ -30,25 +30,20 @@ module.exports = (source, options = {}) => {
|
|||
|
||||
const lines = excerpt.map(item => ({
|
||||
line: item.line,
|
||||
value: truncate(item.value, maxWidth - String(line).length - 5)
|
||||
value: truncate(item.value, maxWidth - String(line).length - 5),
|
||||
}));
|
||||
|
||||
const joinedLines = lines.map(line => line.value).join('\n');
|
||||
const extendedLines = equalLength(joinedLines).split('\n');
|
||||
const extendedWidth = Math.max(...lines.map(item => item.value.length));
|
||||
|
||||
return lines
|
||||
.map((item, index) => ({
|
||||
line: item.line,
|
||||
value: extendedLines[index]
|
||||
}))
|
||||
.map(item => {
|
||||
const isErrorSource = item.line === line;
|
||||
|
||||
const lineNumber = formatLineNumber(item.line, line) + ':';
|
||||
const coloredLineNumber = isErrorSource ? lineNumber : chalk.grey(lineNumber);
|
||||
const result = ` ${coloredLineNumber} ${item.value}`;
|
||||
const result = ` ${coloredLineNumber} ${item.value.padEnd(extendedWidth)}`;
|
||||
|
||||
return isErrorSource ? chalk.bgRed(result) : result;
|
||||
})
|
||||
.join('\n');
|
||||
};
|
||||
}
|
||||
|
|
|
|||
94
node_modules/ava/lib/concordance-options.js
generated
vendored
94
node_modules/ava/lib/concordance-options.js
generated
vendored
|
|
@ -1,37 +1,32 @@
|
|||
'use strict';
|
||||
const util = require('util'); // eslint-disable-line unicorn/import-style
|
||||
const ansiStyles = require('ansi-styles');
|
||||
const stripAnsi = require('strip-ansi');
|
||||
const cloneDeepWith = require('lodash/cloneDeepWith');
|
||||
const reactPlugin = require('@concordance/react');
|
||||
const chalk = require('./chalk').get();
|
||||
import {inspect} from 'node:util';
|
||||
|
||||
// Wrap Concordance's React plugin. Change the name to avoid collisions if in
|
||||
// the future users can register plugins themselves.
|
||||
const avaReactPlugin = {...reactPlugin, name: 'ava-plugin-react'};
|
||||
const plugins = [avaReactPlugin];
|
||||
import ansiStyles from 'ansi-styles';
|
||||
import {Chalk} from 'chalk'; // eslint-disable-line unicorn/import-style
|
||||
import stripAnsi from 'strip-ansi';
|
||||
|
||||
const forceColor = new chalk.Instance({level: Math.max(chalk.level, 1)});
|
||||
import {chalk} from './chalk.js';
|
||||
|
||||
const forceColor = new Chalk({level: Math.max(chalk.level, 1)});
|
||||
|
||||
const colorTheme = {
|
||||
boolean: ansiStyles.yellow,
|
||||
circular: forceColor.grey('[Circular]'),
|
||||
date: {
|
||||
invalid: forceColor.red('invalid'),
|
||||
value: ansiStyles.blue
|
||||
value: ansiStyles.blue,
|
||||
},
|
||||
diffGutters: {
|
||||
actual: forceColor.red('-') + ' ',
|
||||
expected: forceColor.green('+') + ' ',
|
||||
padding: ' '
|
||||
padding: ' ',
|
||||
},
|
||||
error: {
|
||||
ctor: {open: ansiStyles.grey.open + '(', close: ')' + ansiStyles.grey.close},
|
||||
name: ansiStyles.magenta
|
||||
name: ansiStyles.magenta,
|
||||
},
|
||||
function: {
|
||||
name: ansiStyles.blue,
|
||||
stringTag: ansiStyles.magenta
|
||||
stringTag: ansiStyles.magenta,
|
||||
},
|
||||
global: ansiStyles.magenta,
|
||||
item: {after: forceColor.grey(',')},
|
||||
|
|
@ -45,44 +40,16 @@ const colorTheme = {
|
|||
closeBracket: forceColor.grey('}'),
|
||||
ctor: ansiStyles.magenta,
|
||||
stringTag: {open: ansiStyles.magenta.open + '@', close: ansiStyles.magenta.close},
|
||||
secondaryStringTag: {open: ansiStyles.grey.open + '@', close: ansiStyles.grey.close}
|
||||
secondaryStringTag: {open: ansiStyles.grey.open + '@', close: ansiStyles.grey.close},
|
||||
},
|
||||
property: {
|
||||
after: forceColor.grey(','),
|
||||
keyBracket: {open: forceColor.grey('['), close: forceColor.grey(']')},
|
||||
valueFallback: forceColor.grey('…')
|
||||
},
|
||||
react: {
|
||||
functionType: forceColor.grey('\u235F'),
|
||||
openTag: {
|
||||
start: forceColor.grey('<'),
|
||||
end: forceColor.grey('>'),
|
||||
selfClose: forceColor.grey('/'),
|
||||
selfCloseVoid: ' ' + forceColor.grey('/')
|
||||
},
|
||||
closeTag: {
|
||||
open: forceColor.grey('</'),
|
||||
close: forceColor.grey('>')
|
||||
},
|
||||
tagName: ansiStyles.magenta,
|
||||
attribute: {
|
||||
separator: '=',
|
||||
value: {
|
||||
openBracket: forceColor.grey('{'),
|
||||
closeBracket: forceColor.grey('}'),
|
||||
string: {
|
||||
line: {open: forceColor.blue('"'), close: forceColor.blue('"'), escapeQuote: '"'}
|
||||
}
|
||||
}
|
||||
},
|
||||
child: {
|
||||
openBracket: forceColor.grey('{'),
|
||||
closeBracket: forceColor.grey('}')
|
||||
}
|
||||
valueFallback: forceColor.grey('…'),
|
||||
},
|
||||
regexp: {
|
||||
source: {open: ansiStyles.blue.open + '/', close: '/' + ansiStyles.blue.close},
|
||||
flags: ansiStyles.yellow
|
||||
flags: ansiStyles.yellow,
|
||||
},
|
||||
stats: {separator: forceColor.grey('---')},
|
||||
string: {
|
||||
|
|
@ -94,45 +61,42 @@ const colorTheme = {
|
|||
diff: {
|
||||
insert: {
|
||||
open: ansiStyles.bgGreen.open + ansiStyles.black.open,
|
||||
close: ansiStyles.black.close + ansiStyles.bgGreen.close
|
||||
close: ansiStyles.black.close + ansiStyles.bgGreen.close,
|
||||
},
|
||||
delete: {
|
||||
open: ansiStyles.bgRed.open + ansiStyles.black.open,
|
||||
close: ansiStyles.black.close + ansiStyles.bgRed.close
|
||||
close: ansiStyles.black.close + ansiStyles.bgRed.close,
|
||||
},
|
||||
equal: ansiStyles.blue,
|
||||
insertLine: {
|
||||
open: ansiStyles.green.open,
|
||||
close: ansiStyles.green.close
|
||||
close: ansiStyles.green.close,
|
||||
},
|
||||
deleteLine: {
|
||||
open: ansiStyles.red.open,
|
||||
close: ansiStyles.red.close
|
||||
}
|
||||
}
|
||||
close: ansiStyles.red.close,
|
||||
},
|
||||
},
|
||||
},
|
||||
symbol: ansiStyles.yellow,
|
||||
typedArray: {
|
||||
bytes: ansiStyles.yellow
|
||||
bytes: ansiStyles.yellow,
|
||||
},
|
||||
undefined: ansiStyles.yellow
|
||||
undefined: ansiStyles.yellow,
|
||||
};
|
||||
|
||||
const plainTheme = cloneDeepWith(colorTheme, value => {
|
||||
if (typeof value === 'string') {
|
||||
return stripAnsi(value);
|
||||
}
|
||||
});
|
||||
const plainTheme = JSON.parse(JSON.stringify(colorTheme), value => typeof value === 'string' ? stripAnsi(value) : value);
|
||||
|
||||
const theme = chalk.level > 0 ? colorTheme : plainTheme;
|
||||
|
||||
exports.default = {
|
||||
const concordanceOptions = {
|
||||
// Use Node's object inspection depth, clamped to a minimum of 3
|
||||
get maxDepth() {
|
||||
return Math.max(3, util.inspect.defaultOptions.depth);
|
||||
return Math.max(3, inspect.defaultOptions.depth);
|
||||
},
|
||||
plugins,
|
||||
theme
|
||||
theme,
|
||||
};
|
||||
|
||||
exports.snapshotManager = {plugins, theme: plainTheme};
|
||||
export default concordanceOptions;
|
||||
|
||||
export const snapshotManager = {theme: plainTheme};
|
||||
|
|
|
|||
9
node_modules/ava/lib/context-ref.js
generated
vendored
9
node_modules/ava/lib/context-ref.js
generated
vendored
|
|
@ -1,7 +1,4 @@
|
|||
'use strict';
|
||||
const clone = require('lodash/clone');
|
||||
|
||||
class ContextRef {
|
||||
export default class ContextRef {
|
||||
constructor() {
|
||||
this.value = {};
|
||||
}
|
||||
|
|
@ -18,7 +15,6 @@ class ContextRef {
|
|||
return new LateBinding(this);
|
||||
}
|
||||
}
|
||||
module.exports = ContextRef;
|
||||
|
||||
class LateBinding extends ContextRef {
|
||||
constructor(ref) {
|
||||
|
|
@ -29,7 +25,8 @@ class LateBinding extends ContextRef {
|
|||
|
||||
get() {
|
||||
if (!this.bound) {
|
||||
this.set(clone(this.ref.get()));
|
||||
const value = this.ref.get();
|
||||
this.set(value !== null && typeof value === 'object' ? {...value} : value);
|
||||
}
|
||||
|
||||
return super.get();
|
||||
|
|
|
|||
52
node_modules/ava/lib/create-chain.js
generated
vendored
52
node_modules/ava/lib/create-chain.js
generated
vendored
|
|
@ -1,4 +1,3 @@
|
|||
'use strict';
|
||||
const chainRegistry = new WeakMap();
|
||||
|
||||
function startChain(name, call, defaults) {
|
||||
|
|
@ -48,20 +47,16 @@ function createHookChain(hook, isAfterHook) {
|
|||
// * `skip` must come at the end
|
||||
// * no `only`
|
||||
// * no repeating
|
||||
extendChain(hook, 'cb', 'callback');
|
||||
extendChain(hook, 'skip', 'skipped');
|
||||
extendChain(hook.cb, 'skip', 'skipped');
|
||||
if (isAfterHook) {
|
||||
extendChain(hook, 'always');
|
||||
extendChain(hook.always, 'cb', 'callback');
|
||||
extendChain(hook.always, 'skip', 'skipped');
|
||||
extendChain(hook.always.cb, 'skip', 'skipped');
|
||||
}
|
||||
|
||||
return hook;
|
||||
}
|
||||
|
||||
function createChain(fn, defaults, meta) {
|
||||
export default function createChain(fn, defaults, meta) {
|
||||
// Test chaining rules:
|
||||
// * `serial` must come at the start
|
||||
// * `only` and `skip` must come at the end
|
||||
|
|
@ -69,27 +64,15 @@ function createChain(fn, defaults, meta) {
|
|||
// * `only` and `skip` cannot be chained together
|
||||
// * no repeating
|
||||
const root = startChain('test', fn, {...defaults, type: 'test'});
|
||||
extendChain(root, 'cb', 'callback');
|
||||
extendChain(root, 'failing');
|
||||
extendChain(root, 'only', 'exclusive');
|
||||
extendChain(root, 'serial');
|
||||
extendChain(root, 'skip', 'skipped');
|
||||
extendChain(root.cb, 'failing');
|
||||
extendChain(root.cb, 'only', 'exclusive');
|
||||
extendChain(root.cb, 'skip', 'skipped');
|
||||
extendChain(root.cb.failing, 'only', 'exclusive');
|
||||
extendChain(root.cb.failing, 'skip', 'skipped');
|
||||
extendChain(root.failing, 'only', 'exclusive');
|
||||
extendChain(root.failing, 'skip', 'skipped');
|
||||
extendChain(root.serial, 'cb', 'callback');
|
||||
extendChain(root.serial, 'failing');
|
||||
extendChain(root.serial, 'only', 'exclusive');
|
||||
extendChain(root.serial, 'skip', 'skipped');
|
||||
extendChain(root.serial.cb, 'failing');
|
||||
extendChain(root.serial.cb, 'only', 'exclusive');
|
||||
extendChain(root.serial.cb, 'skip', 'skipped');
|
||||
extendChain(root.serial.cb.failing, 'only', 'exclusive');
|
||||
extendChain(root.serial.cb.failing, 'skip', 'skipped');
|
||||
extendChain(root.serial.failing, 'only', 'exclusive');
|
||||
extendChain(root.serial.failing, 'skip', 'skipped');
|
||||
|
||||
|
|
@ -108,9 +91,38 @@ function createChain(fn, defaults, meta) {
|
|||
root.todo = startChain('test.todo', fn, {...defaults, type: 'test', todo: true});
|
||||
root.serial.todo = startChain('test.serial.todo', fn, {...defaults, serial: true, type: 'test', todo: true});
|
||||
|
||||
root.macro = options => {
|
||||
if (typeof options === 'function') {
|
||||
return Object.freeze({exec: options});
|
||||
}
|
||||
|
||||
return Object.freeze({exec: options.exec, title: options.title});
|
||||
};
|
||||
|
||||
root.meta = meta;
|
||||
|
||||
// Our type definition uses ESM syntax; when using CJS with VSCode, the
|
||||
// auto-completion assumes the root is accessed through `require('ava').default`.
|
||||
// Placate VSCode by adding a mostly hidden default property on the root.
|
||||
// This is available through both CJS and ESM imports. We use a proxy so that
|
||||
// we don't end up with root.default.default.default chains.
|
||||
Object.defineProperty(root, 'default', {
|
||||
configurable: false,
|
||||
enumerable: false,
|
||||
writable: false,
|
||||
value: new Proxy(root, {
|
||||
apply(target, thisArg, argumentsList) {
|
||||
target.apply(thisArg, argumentsList);
|
||||
},
|
||||
get(target, prop) {
|
||||
if (prop === 'default') {
|
||||
throw new TypeError('Cannot access default.default');
|
||||
}
|
||||
|
||||
return target[prop];
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
return root;
|
||||
}
|
||||
|
||||
module.exports = createChain;
|
||||
|
|
|
|||
5
node_modules/ava/lib/environment-variables.js
generated
vendored
5
node_modules/ava/lib/environment-variables.js
generated
vendored
|
|
@ -1,5 +1,4 @@
|
|||
'use strict';
|
||||
function validateEnvironmentVariables(environmentVariables) {
|
||||
export default function validateEnvironmentVariables(environmentVariables) {
|
||||
if (!environmentVariables) {
|
||||
return {};
|
||||
}
|
||||
|
|
@ -12,5 +11,3 @@ function validateEnvironmentVariables(environmentVariables) {
|
|||
|
||||
return environmentVariables;
|
||||
}
|
||||
|
||||
module.exports = validateEnvironmentVariables;
|
||||
|
|
|
|||
73
node_modules/ava/lib/eslint-plugin-helper-worker.js
generated
vendored
Normal file
73
node_modules/ava/lib/eslint-plugin-helper-worker.js
generated
vendored
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
import v8 from 'node:v8';
|
||||
import {parentPort, workerData} from 'node:worker_threads';
|
||||
|
||||
import normalizeExtensions from './extensions.js';
|
||||
import {normalizeGlobs} from './globs.js';
|
||||
import {loadConfig} from './load-config.js';
|
||||
import providerManager from './provider-manager.js';
|
||||
|
||||
const MAX_DATA_LENGTH_EXCLUSIVE = 100 * 1024; // Allocate 100 KiB to exchange globs.
|
||||
|
||||
const configCache = new Map();
|
||||
|
||||
const collectProviders = async ({conf, projectDir}) => {
|
||||
const providers = [];
|
||||
if (Reflect.has(conf, 'typescript')) {
|
||||
const {level, main} = await providerManager.typescript(projectDir);
|
||||
providers.push({
|
||||
level,
|
||||
main: main({config: conf.typescript}),
|
||||
type: 'typescript',
|
||||
});
|
||||
}
|
||||
|
||||
return providers;
|
||||
};
|
||||
|
||||
const buildGlobs = ({conf, providers, projectDir, overrideExtensions, overrideFiles}) => {
|
||||
const extensions = overrideExtensions
|
||||
? normalizeExtensions(overrideExtensions)
|
||||
: normalizeExtensions(conf.extensions, providers);
|
||||
|
||||
return {
|
||||
cwd: projectDir,
|
||||
...normalizeGlobs({
|
||||
extensions,
|
||||
files: overrideFiles ? overrideFiles : conf.files,
|
||||
providers,
|
||||
}),
|
||||
};
|
||||
};
|
||||
|
||||
const resolveGlobs = async (projectDir, overrideExtensions, overrideFiles) => {
|
||||
if (!configCache.has(projectDir)) {
|
||||
configCache.set(projectDir, loadConfig({resolveFrom: projectDir}).then(async conf => {
|
||||
const providers = await collectProviders({conf, projectDir});
|
||||
return {conf, providers};
|
||||
}));
|
||||
}
|
||||
|
||||
const {conf, providers} = await configCache.get(projectDir);
|
||||
return buildGlobs({conf, providers, projectDir, overrideExtensions, overrideFiles});
|
||||
};
|
||||
|
||||
const data = new Uint8Array(workerData.dataBuffer);
|
||||
const sync = new Int32Array(workerData.syncBuffer);
|
||||
|
||||
const handleMessage = async ({projectDir, overrideExtensions, overrideFiles}) => {
|
||||
let encoded;
|
||||
try {
|
||||
const globs = await resolveGlobs(projectDir, overrideExtensions, overrideFiles);
|
||||
encoded = v8.serialize(globs);
|
||||
} catch (error) {
|
||||
encoded = v8.serialize(error);
|
||||
}
|
||||
|
||||
const byteLength = encoded.length < MAX_DATA_LENGTH_EXCLUSIVE ? encoded.copy(data) : MAX_DATA_LENGTH_EXCLUSIVE;
|
||||
Atomics.store(sync, 0, byteLength);
|
||||
Atomics.notify(sync, 0);
|
||||
};
|
||||
|
||||
parentPort.on('message', handleMessage);
|
||||
handleMessage(workerData.firstMessage);
|
||||
delete workerData.firstMessage;
|
||||
4
node_modules/ava/lib/extensions.js
generated
vendored
4
node_modules/ava/lib/extensions.js
generated
vendored
|
|
@ -1,4 +1,4 @@
|
|||
module.exports = (configuredExtensions, providers = []) => {
|
||||
export default function resolveExtensions(configuredExtensions, providers = []) {
|
||||
// Combine all extensions possible for testing. Remove duplicate extensions.
|
||||
const duplicates = new Set();
|
||||
const seen = new Set();
|
||||
|
|
@ -43,4 +43,4 @@ module.exports = (configuredExtensions, providers = []) => {
|
|||
}
|
||||
|
||||
return [...seen];
|
||||
};
|
||||
}
|
||||
|
|
|
|||
163
node_modules/ava/lib/fork.js
generated
vendored
163
node_modules/ava/lib/fork.js
generated
vendored
|
|
@ -1,68 +1,71 @@
|
|||
'use strict';
|
||||
const childProcess = require('child_process');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const Emittery = require('emittery');
|
||||
const {controlFlow} = require('./ipc-flow-control');
|
||||
import childProcess from 'node:child_process';
|
||||
import process from 'node:process';
|
||||
import {fileURLToPath} from 'node:url';
|
||||
import {Worker} from 'node:worker_threads';
|
||||
|
||||
if (fs.realpathSync(__filename) !== __filename) {
|
||||
console.warn('WARNING: `npm link ava` and the `--preserve-symlink` flag are incompatible. We have detected that AVA is linked via `npm link`, and that you are using either an early version of Node 6, or the `--preserve-symlink` flag. This breaks AVA. You should upgrade to Node 6.2.0+, avoid the `--preserve-symlink` flag, or avoid using `npm link ava`.');
|
||||
import Emittery from 'emittery';
|
||||
import {pEvent} from 'p-event';
|
||||
|
||||
import {controlFlow} from './ipc-flow-control.cjs';
|
||||
import serializeError from './serialize-error.js';
|
||||
|
||||
let workerPath = new URL('worker/base.js', import.meta.url);
|
||||
export function _testOnlyReplaceWorkerPath(replacement) {
|
||||
workerPath = replacement;
|
||||
}
|
||||
|
||||
// In case the test file imports a different AVA install,
|
||||
// the presence of this variable allows it to require this one instead
|
||||
const AVA_PATH = path.resolve(__dirname, '..');
|
||||
const WORKER_PATH = require.resolve('./worker/subprocess');
|
||||
const additionalExecArgv = ['--enable-source-maps'];
|
||||
|
||||
class SharedWorkerChannel extends Emittery {
|
||||
constructor({channelId, filename, initialData}, sendToFork) {
|
||||
super();
|
||||
|
||||
this.id = channelId;
|
||||
this.filename = filename;
|
||||
this.initialData = initialData;
|
||||
this.sendToFork = sendToFork;
|
||||
}
|
||||
|
||||
signalReady() {
|
||||
this.sendToFork({
|
||||
type: 'shared-worker-ready',
|
||||
channelId: this.id
|
||||
const createWorker = (options, execArgv) => {
|
||||
let worker;
|
||||
let postMessage;
|
||||
let close;
|
||||
if (options.workerThreads) {
|
||||
worker = new Worker(workerPath, {
|
||||
argv: options.workerArgv,
|
||||
env: {NODE_ENV: 'test', ...process.env, ...options.environmentVariables},
|
||||
execArgv: [...execArgv, ...additionalExecArgv],
|
||||
workerData: {
|
||||
options,
|
||||
},
|
||||
trackUnmanagedFds: true,
|
||||
stdin: true,
|
||||
stdout: true,
|
||||
stderr: true,
|
||||
});
|
||||
}
|
||||
postMessage = worker.postMessage.bind(worker);
|
||||
|
||||
signalError() {
|
||||
this.sendToFork({
|
||||
type: 'shared-worker-error',
|
||||
channelId: this.id
|
||||
// Ensure we've seen this event before we terminate the worker thread, as a
|
||||
// workaround for https://github.com/nodejs/node/issues/38418.
|
||||
const starting = pEvent(worker, 'message', ({ava}) => ava && ava.type === 'starting');
|
||||
|
||||
close = async () => {
|
||||
try {
|
||||
await starting;
|
||||
await worker.terminate();
|
||||
} finally {
|
||||
// No-op
|
||||
}
|
||||
};
|
||||
} else {
|
||||
worker = childProcess.fork(fileURLToPath(workerPath), options.workerArgv, {
|
||||
cwd: options.projectDir,
|
||||
silent: true,
|
||||
env: {NODE_ENV: 'test', ...process.env, ...options.environmentVariables},
|
||||
execArgv: [...execArgv, ...additionalExecArgv],
|
||||
});
|
||||
postMessage = controlFlow(worker);
|
||||
close = async () => worker.kill();
|
||||
}
|
||||
|
||||
emitMessage({messageId, replyTo, serializedData}) {
|
||||
this.emit('message', {
|
||||
messageId,
|
||||
replyTo,
|
||||
serializedData
|
||||
});
|
||||
}
|
||||
|
||||
forwardMessageToFork({messageId, replyTo, serializedData}) {
|
||||
this.sendToFork({
|
||||
type: 'shared-worker-message',
|
||||
channelId: this.id,
|
||||
messageId,
|
||||
replyTo,
|
||||
serializedData
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let forkCounter = 0;
|
||||
|
||||
module.exports = (file, options, execArgv = process.execArgv) => {
|
||||
const forkId = `fork/${++forkCounter}`;
|
||||
const sharedWorkerChannels = new Map();
|
||||
return {
|
||||
worker,
|
||||
postMessage,
|
||||
close,
|
||||
};
|
||||
};
|
||||
|
||||
export default function loadFork(file, options, execArgv = process.execArgv) {
|
||||
let finished = false;
|
||||
|
||||
const emitter = new Emittery();
|
||||
|
|
@ -75,31 +78,22 @@ module.exports = (file, options, execArgv = process.execArgv) => {
|
|||
options = {
|
||||
baseDir: process.cwd(),
|
||||
file,
|
||||
forkId,
|
||||
...options
|
||||
...options,
|
||||
};
|
||||
|
||||
const subprocess = childProcess.fork(WORKER_PATH, options.workerArgv, {
|
||||
cwd: options.projectDir,
|
||||
silent: true,
|
||||
env: {NODE_ENV: 'test', ...process.env, ...options.environmentVariables, AVA_PATH},
|
||||
execArgv
|
||||
});
|
||||
|
||||
subprocess.stdout.on('data', chunk => {
|
||||
const {worker, postMessage, close} = createWorker(options, execArgv);
|
||||
worker.stdout.on('data', chunk => {
|
||||
emitStateChange({type: 'worker-stdout', chunk});
|
||||
});
|
||||
|
||||
subprocess.stderr.on('data', chunk => {
|
||||
worker.stderr.on('data', chunk => {
|
||||
emitStateChange({type: 'worker-stderr', chunk});
|
||||
});
|
||||
|
||||
const bufferedSend = controlFlow(subprocess);
|
||||
|
||||
let forcedExit = false;
|
||||
const send = evt => {
|
||||
if (!finished && !forcedExit) {
|
||||
bufferedSend({ava: evt});
|
||||
postMessage({ava: evt});
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -109,7 +103,7 @@ module.exports = (file, options, execArgv = process.execArgv) => {
|
|||
resolve();
|
||||
};
|
||||
|
||||
subprocess.on('message', message => {
|
||||
worker.on('message', message => {
|
||||
if (!message.ava) {
|
||||
return;
|
||||
}
|
||||
|
|
@ -119,15 +113,18 @@ module.exports = (file, options, execArgv = process.execArgv) => {
|
|||
send({type: 'options', options});
|
||||
break;
|
||||
case 'shared-worker-connect': {
|
||||
const channel = new SharedWorkerChannel(message.ava, send);
|
||||
sharedWorkerChannels.set(channel.id, channel);
|
||||
emitter.emit('connectSharedWorker', channel);
|
||||
const {channelId, filename, initialData, port} = message.ava;
|
||||
emitter.emit('connectSharedWorker', {
|
||||
filename,
|
||||
initialData,
|
||||
port,
|
||||
signalError() {
|
||||
send({type: 'shared-worker-error', channelId});
|
||||
},
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
case 'shared-worker-message':
|
||||
sharedWorkerChannels.get(message.ava.channelId).emitMessage(message.ava);
|
||||
break;
|
||||
case 'ping':
|
||||
send({type: 'pong'});
|
||||
break;
|
||||
|
|
@ -136,12 +133,12 @@ module.exports = (file, options, execArgv = process.execArgv) => {
|
|||
}
|
||||
});
|
||||
|
||||
subprocess.on('error', err => {
|
||||
emitStateChange({type: 'worker-failed', err});
|
||||
worker.on('error', error => {
|
||||
emitStateChange({type: 'worker-failed', err: serializeError('Worker error', false, error, file)});
|
||||
finish();
|
||||
});
|
||||
|
||||
subprocess.on('exit', (code, signal) => {
|
||||
worker.on('exit', (code, signal) => {
|
||||
if (forcedExit) {
|
||||
emitStateChange({type: 'worker-finished', forcedExit});
|
||||
} else if (code > 0) {
|
||||
|
|
@ -158,12 +155,12 @@ module.exports = (file, options, execArgv = process.execArgv) => {
|
|||
|
||||
return {
|
||||
file,
|
||||
forkId,
|
||||
threadId: worker.threadId,
|
||||
promise,
|
||||
|
||||
exit() {
|
||||
forcedExit = true;
|
||||
subprocess.kill();
|
||||
close();
|
||||
},
|
||||
|
||||
notifyOfPeerFailure() {
|
||||
|
|
@ -176,6 +173,6 @@ module.exports = (file, options, execArgv = process.execArgv) => {
|
|||
|
||||
onStateChange(listener) {
|
||||
return emitter.on('stateChange', listener);
|
||||
}
|
||||
},
|
||||
};
|
||||
};
|
||||
}
|
||||
|
|
|
|||
140
node_modules/ava/lib/glob-helpers.cjs
generated
vendored
Normal file
140
node_modules/ava/lib/glob-helpers.cjs
generated
vendored
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
'use strict';
|
||||
const path = require('path');
|
||||
const process = require('process');
|
||||
|
||||
const ignoreByDefault = require('ignore-by-default');
|
||||
const picomatch = require('picomatch');
|
||||
const slash = require('slash');
|
||||
|
||||
const defaultIgnorePatterns = [...ignoreByDefault.directories(), '**/node_modules'];
|
||||
exports.defaultIgnorePatterns = defaultIgnorePatterns;
|
||||
|
||||
const defaultPicomatchIgnorePatterns = [
|
||||
...defaultIgnorePatterns,
|
||||
// Unlike globby(), picomatch needs a complete pattern when ignoring directories.
|
||||
...defaultIgnorePatterns.map(pattern => `${pattern}/**/*`),
|
||||
];
|
||||
|
||||
const defaultMatchNoIgnore = picomatch(defaultPicomatchIgnorePatterns);
|
||||
|
||||
const matchingCache = new WeakMap();
|
||||
const processMatchingPatterns = input => {
|
||||
let result = matchingCache.get(input);
|
||||
if (!result) {
|
||||
const ignore = [...defaultPicomatchIgnorePatterns];
|
||||
const patterns = input.filter(pattern => {
|
||||
if (pattern.startsWith('!')) {
|
||||
// Unlike globby(), picomatch needs a complete pattern when ignoring directories.
|
||||
ignore.push(pattern.slice(1), `${pattern.slice(1)}/**/*`);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
result = {
|
||||
match: picomatch(patterns, {ignore}),
|
||||
matchNoIgnore: picomatch(patterns),
|
||||
individualMatchers: patterns.map(pattern => ({pattern, match: picomatch(pattern, {ignore})})),
|
||||
};
|
||||
matchingCache.set(input, result);
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
exports.processMatchingPatterns = processMatchingPatterns;
|
||||
|
||||
const matchesIgnorePatterns = (file, patterns) => {
|
||||
const {matchNoIgnore} = processMatchingPatterns(patterns);
|
||||
return matchNoIgnore(file) || defaultMatchNoIgnore(file);
|
||||
};
|
||||
|
||||
function classify(file, {cwd, extensions, filePatterns, ignoredByWatcherPatterns}) {
|
||||
file = normalizeFileForMatching(cwd, file);
|
||||
return {
|
||||
isIgnoredByWatcher: matchesIgnorePatterns(file, ignoredByWatcherPatterns),
|
||||
isTest: hasExtension(extensions, file) && !isHelperish(file) && filePatterns.length > 0 && matches(file, filePatterns),
|
||||
};
|
||||
}
|
||||
|
||||
exports.classify = classify;
|
||||
|
||||
const hasExtension = (extensions, file) => extensions.includes(path.extname(file).slice(1));
|
||||
|
||||
exports.hasExtension = hasExtension;
|
||||
|
||||
function isHelperish(file) { // Assume file has been normalized already.
|
||||
// File names starting with an underscore are deemed "helpers".
|
||||
if (path.basename(file).startsWith('_')) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// This function assumes the file has been normalized. If it couldn't be,
|
||||
// don't check if it's got a parent directory that starts with an underscore.
|
||||
// Deem it not a "helper".
|
||||
if (path.isAbsolute(file)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the file has a parent directory that starts with only a single
|
||||
// underscore, it's deemed a "helper".
|
||||
return path.dirname(file).split('/').some(dir => /^_(?:$|[^_])/.test(dir));
|
||||
}
|
||||
|
||||
exports.isHelperish = isHelperish;
|
||||
|
||||
function matches(file, patterns) {
|
||||
const {match} = processMatchingPatterns(patterns);
|
||||
return match(file);
|
||||
}
|
||||
|
||||
exports.matches = matches;
|
||||
|
||||
function normalizeFileForMatching(cwd, file) {
|
||||
if (process.platform === 'win32') {
|
||||
cwd = slash(cwd);
|
||||
file = slash(file);
|
||||
}
|
||||
|
||||
// Note that if `file` is outside `cwd` we can't normalize it. If this turns
|
||||
// out to be a real-world scenario we may have to make changes in calling code
|
||||
// to make sure the file isn't even selected for matching.
|
||||
if (!file.startsWith(cwd)) {
|
||||
return file;
|
||||
}
|
||||
|
||||
// Assume `cwd` does *not* end in a slash.
|
||||
return file.slice(cwd.length + 1);
|
||||
}
|
||||
|
||||
exports.normalizeFileForMatching = normalizeFileForMatching;
|
||||
|
||||
function normalizePattern(pattern) {
|
||||
// Always use `/` in patterns, harmonizing matching across platforms
|
||||
if (process.platform === 'win32') {
|
||||
pattern = slash(pattern);
|
||||
}
|
||||
|
||||
if (pattern.endsWith('/')) {
|
||||
pattern = pattern.slice(0, -1);
|
||||
}
|
||||
|
||||
if (pattern.startsWith('./')) {
|
||||
return pattern.slice(2);
|
||||
}
|
||||
|
||||
if (pattern.startsWith('!./')) {
|
||||
return `!${pattern.slice(3)}`;
|
||||
}
|
||||
|
||||
return pattern;
|
||||
}
|
||||
|
||||
exports.normalizePattern = normalizePattern;
|
||||
|
||||
function normalizePatterns(patterns) {
|
||||
return patterns.map(pattern => normalizePattern(pattern));
|
||||
}
|
||||
|
||||
exports.normalizePatterns = normalizePatterns;
|
||||
305
node_modules/ava/lib/globs.js
generated
vendored
305
node_modules/ava/lib/globs.js
generated
vendored
|
|
@ -1,54 +1,36 @@
|
|||
'use strict';
|
||||
const path = require('path');
|
||||
const globby = require('globby');
|
||||
const ignoreByDefault = require('ignore-by-default');
|
||||
const picomatch = require('picomatch');
|
||||
const slash = require('slash');
|
||||
const providerManager = require('./provider-manager');
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
const defaultIgnorePatterns = [...ignoreByDefault.directories(), '**/node_modules'];
|
||||
const defaultPicomatchIgnorePatterns = [
|
||||
...defaultIgnorePatterns,
|
||||
// Unlike globby(), picomatch needs a complete pattern when ignoring directories.
|
||||
...defaultIgnorePatterns.map(pattern => `${pattern}/**/*`)
|
||||
];
|
||||
import {globby, globbySync} from 'globby';
|
||||
|
||||
const defaultMatchNoIgnore = picomatch(defaultPicomatchIgnorePatterns);
|
||||
import {
|
||||
defaultIgnorePatterns,
|
||||
hasExtension,
|
||||
normalizeFileForMatching,
|
||||
normalizePatterns,
|
||||
processMatchingPatterns,
|
||||
} from './glob-helpers.cjs';
|
||||
|
||||
export {
|
||||
classify,
|
||||
isHelperish,
|
||||
matches,
|
||||
normalizePattern,
|
||||
defaultIgnorePatterns,
|
||||
hasExtension,
|
||||
normalizeFileForMatching,
|
||||
normalizePatterns,
|
||||
} from './glob-helpers.cjs';
|
||||
|
||||
const defaultIgnoredByWatcherPatterns = [
|
||||
'**/*.snap.md', // No need to rerun tests when the Markdown files change.
|
||||
'ava.config.js', // Config is not reloaded so avoid rerunning tests when it changes.
|
||||
'ava.config.cjs' // Config is not reloaded so avoid rerunning tests when it changes.
|
||||
'ava.config.cjs', // Config is not reloaded so avoid rerunning tests when it changes.
|
||||
];
|
||||
|
||||
const buildExtensionPattern = extensions => extensions.length === 1 ? extensions[0] : `{${extensions.join(',')}}`;
|
||||
|
||||
function normalizePattern(pattern) {
|
||||
// Always use `/` in patterns, harmonizing matching across platforms
|
||||
if (process.platform === 'win32') {
|
||||
pattern = slash(pattern);
|
||||
}
|
||||
|
||||
if (pattern.startsWith('./')) {
|
||||
return pattern.slice(2);
|
||||
}
|
||||
|
||||
if (pattern.startsWith('!./')) {
|
||||
return `!${pattern.slice(3)}`;
|
||||
}
|
||||
|
||||
return pattern;
|
||||
}
|
||||
|
||||
exports.normalizePattern = normalizePattern;
|
||||
|
||||
function normalizePatterns(patterns) {
|
||||
return patterns.map(pattern => normalizePattern(pattern));
|
||||
}
|
||||
|
||||
exports.normalizePatterns = normalizePatterns;
|
||||
|
||||
function normalizeGlobs({extensions, files: filePatterns, ignoredByWatcher: ignoredByWatcherPatterns, providers}) {
|
||||
export function normalizeGlobs({extensions, files: filePatterns, ignoredByWatcher: ignoredByWatcherPatterns, providers}) {
|
||||
if (filePatterns !== undefined && (!Array.isArray(filePatterns) || filePatterns.length === 0)) {
|
||||
throw new Error('The ’files’ configuration must be an array containing glob patterns.');
|
||||
}
|
||||
|
|
@ -68,7 +50,7 @@ function normalizeGlobs({extensions, files: filePatterns, ignoredByWatcher: igno
|
|||
`**/test/**/*.${extensionPattern}`,
|
||||
`**/tests/**/*.${extensionPattern}`,
|
||||
'!**/__tests__/**/__{helper,fixture}?(s)__/**/*',
|
||||
'!**/test?(s)/**/{helper,fixture}?(s)/**/*'
|
||||
'!**/test?(s)/**/{helper,fixture}?(s)/**/*',
|
||||
];
|
||||
|
||||
if (filePatterns) {
|
||||
|
|
@ -84,40 +66,36 @@ function normalizeGlobs({extensions, files: filePatterns, ignoredByWatcher: igno
|
|||
|
||||
ignoredByWatcherPatterns = ignoredByWatcherPatterns ? [...defaultIgnoredByWatcherPatterns, ...normalizePatterns(ignoredByWatcherPatterns)] : [...defaultIgnoredByWatcherPatterns];
|
||||
|
||||
for (const {level, main} of providers) {
|
||||
if (level >= providerManager.levels.pathRewrites) {
|
||||
({filePatterns, ignoredByWatcherPatterns} = main.updateGlobs({filePatterns, ignoredByWatcherPatterns}));
|
||||
}
|
||||
for (const {main} of providers) {
|
||||
({filePatterns, ignoredByWatcherPatterns} = main.updateGlobs({filePatterns, ignoredByWatcherPatterns}));
|
||||
}
|
||||
|
||||
return {extensions, filePatterns, ignoredByWatcherPatterns};
|
||||
}
|
||||
|
||||
exports.normalizeGlobs = normalizeGlobs;
|
||||
|
||||
const hasExtension = (extensions, file) => extensions.includes(path.extname(file).slice(1));
|
||||
|
||||
exports.hasExtension = hasExtension;
|
||||
const globOptions = {
|
||||
// Globs should work relative to the cwd value only (this should be the
|
||||
// project directory that AVA is run in).
|
||||
absolute: false,
|
||||
braceExpansion: true,
|
||||
caseSensitiveMatch: false,
|
||||
dot: false,
|
||||
expandDirectories: false,
|
||||
extglob: true,
|
||||
followSymbolicLinks: true,
|
||||
gitignore: false,
|
||||
globstar: true,
|
||||
ignore: defaultIgnorePatterns,
|
||||
baseNameMatch: false,
|
||||
stats: false,
|
||||
unique: true,
|
||||
};
|
||||
|
||||
const globFiles = async (cwd, patterns) => {
|
||||
const files = await globby(patterns, {
|
||||
// Globs should work relative to the cwd value only (this should be the
|
||||
// project directory that AVA is run in).
|
||||
absolute: false,
|
||||
braceExpansion: true,
|
||||
caseSensitiveMatch: false,
|
||||
...globOptions,
|
||||
cwd,
|
||||
dot: false,
|
||||
expandDirectories: false,
|
||||
extglob: true,
|
||||
followSymbolicLinks: true,
|
||||
gitignore: false,
|
||||
globstar: true,
|
||||
ignore: defaultIgnorePatterns,
|
||||
baseNameMatch: false,
|
||||
onlyFiles: true,
|
||||
stats: false,
|
||||
unique: true
|
||||
});
|
||||
|
||||
// Return absolute file paths. This has the side-effect of normalizing paths
|
||||
|
|
@ -125,119 +103,114 @@ const globFiles = async (cwd, patterns) => {
|
|||
return files.map(file => path.join(cwd, file));
|
||||
};
|
||||
|
||||
async function findFiles({cwd, extensions, filePatterns}) {
|
||||
return (await globFiles(cwd, filePatterns)).filter(file => hasExtension(extensions, file));
|
||||
const globDirectoriesSync = (cwd, patterns) => {
|
||||
const files = globbySync(patterns, {
|
||||
...globOptions,
|
||||
cwd,
|
||||
onlyDirectories: true,
|
||||
});
|
||||
|
||||
// Return absolute file paths. This has the side-effect of normalizing paths
|
||||
// on Windows.
|
||||
return files.map(file => path.join(cwd, file));
|
||||
};
|
||||
|
||||
export async function findFiles({cwd, extensions, filePatterns}) {
|
||||
const files = await globFiles(cwd, filePatterns);
|
||||
return files.filter(file => hasExtension(extensions, file));
|
||||
}
|
||||
|
||||
exports.findFiles = findFiles;
|
||||
|
||||
async function findTests({cwd, extensions, filePatterns}) {
|
||||
return (await findFiles({cwd, extensions, filePatterns})).filter(file => !path.basename(file).startsWith('_'));
|
||||
export async function findTests({cwd, extensions, filePatterns}) {
|
||||
const files = await findFiles({cwd, extensions, filePatterns});
|
||||
return files.filter(file => !path.basename(file).startsWith('_'));
|
||||
}
|
||||
|
||||
exports.findTests = findTests;
|
||||
|
||||
function getChokidarIgnorePatterns({ignoredByWatcherPatterns}) {
|
||||
export function getChokidarIgnorePatterns({ignoredByWatcherPatterns}) {
|
||||
return [
|
||||
...defaultIgnorePatterns.map(pattern => `${pattern}/**/*`),
|
||||
...ignoredByWatcherPatterns.filter(pattern => !pattern.startsWith('!'))
|
||||
...ignoredByWatcherPatterns.filter(pattern => !pattern.startsWith('!')),
|
||||
];
|
||||
}
|
||||
|
||||
exports.getChokidarIgnorePatterns = getChokidarIgnorePatterns;
|
||||
export function applyTestFileFilter({ // eslint-disable-line complexity
|
||||
cwd,
|
||||
expandDirectories = true,
|
||||
filter,
|
||||
providers = [],
|
||||
testFiles,
|
||||
treatFilterPatternsAsFiles = true,
|
||||
}) {
|
||||
const {individualMatchers} = processMatchingPatterns(filter);
|
||||
const normalizedFiles = testFiles.map(file => ({file, matcheable: normalizeFileForMatching(cwd, file)}));
|
||||
|
||||
const matchingCache = new WeakMap();
|
||||
const processMatchingPatterns = input => {
|
||||
let result = matchingCache.get(input);
|
||||
if (!result) {
|
||||
const ignore = [...defaultPicomatchIgnorePatterns];
|
||||
const patterns = input.filter(pattern => {
|
||||
if (pattern.startsWith('!')) {
|
||||
// Unlike globby(), picomatch needs a complete pattern when ignoring directories.
|
||||
ignore.push(pattern.slice(1), `${pattern.slice(1)}/**/*`);
|
||||
return false;
|
||||
const selected = new Set();
|
||||
const unmatchedPatterns = new Set(individualMatchers.map(({pattern}) => pattern));
|
||||
|
||||
for (const {pattern, match} of individualMatchers) {
|
||||
for (const {file, matcheable} of normalizedFiles) {
|
||||
if (match(matcheable)) {
|
||||
unmatchedPatterns.delete(pattern);
|
||||
selected.add(file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (expandDirectories && unmatchedPatterns.size > 0) {
|
||||
const expansion = [];
|
||||
for (const pattern of unmatchedPatterns) {
|
||||
const directories = globDirectoriesSync(cwd, pattern);
|
||||
if (directories.length > 0) {
|
||||
unmatchedPatterns.delete(pattern);
|
||||
expansion.push(directories);
|
||||
}
|
||||
}
|
||||
|
||||
const directories = expansion.flat();
|
||||
if (directories.length > 0) {
|
||||
for (const file of testFiles) {
|
||||
if (selected.has(file)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const dir of directories) {
|
||||
if (file.startsWith(dir + path.sep)) { // eslint-disable-line max-depth
|
||||
selected.add(file);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const ignoredFilterPatternFiles = [];
|
||||
if (treatFilterPatternsAsFiles && unmatchedPatterns.size > 0) {
|
||||
const providerExtensions = new Set(providers.flatMap(({main}) => main.extensions));
|
||||
for (const pattern of unmatchedPatterns) {
|
||||
const file = path.join(cwd, pattern);
|
||||
try {
|
||||
const stats = fs.statSync(file);
|
||||
if (!stats.isFile()) {
|
||||
continue;
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
continue;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
if (
|
||||
path.basename(file).startsWith('_')
|
||||
|| providerExtensions.has(path.extname(file).slice(1))
|
||||
|| file.split(path.sep).includes('node_modules')
|
||||
) {
|
||||
ignoredFilterPatternFiles.push(pattern);
|
||||
continue;
|
||||
}
|
||||
|
||||
result = {
|
||||
match: picomatch(patterns, {ignore}),
|
||||
matchNoIgnore: picomatch(patterns)
|
||||
};
|
||||
matchingCache.set(input, result);
|
||||
selected.add(file);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
function matches(file, patterns) {
|
||||
const {match} = processMatchingPatterns(patterns);
|
||||
return match(file);
|
||||
return Object.assign([...selected], {ignoredFilterPatternFiles});
|
||||
}
|
||||
|
||||
exports.matches = matches;
|
||||
|
||||
const matchesIgnorePatterns = (file, patterns) => {
|
||||
const {matchNoIgnore} = processMatchingPatterns(patterns);
|
||||
return matchNoIgnore(file) || defaultMatchNoIgnore(file);
|
||||
};
|
||||
|
||||
function normalizeFileForMatching(cwd, file) {
|
||||
if (process.platform === 'win32') {
|
||||
cwd = slash(cwd);
|
||||
file = slash(file);
|
||||
}
|
||||
|
||||
if (!cwd) { // TODO: Ensure tests provide an actual value.
|
||||
return file;
|
||||
}
|
||||
|
||||
// TODO: If `file` is outside `cwd` we can't normalize it. Need to figure
|
||||
// out if that's a real-world scenario, but we may have to ensure the file
|
||||
// isn't even selected.
|
||||
if (!file.startsWith(cwd)) {
|
||||
return file;
|
||||
}
|
||||
|
||||
// Assume `cwd` does *not* end in a slash.
|
||||
return file.slice(cwd.length + 1);
|
||||
}
|
||||
|
||||
exports.normalizeFileForMatching = normalizeFileForMatching;
|
||||
|
||||
function isHelperish(file) { // Assume file has been normalized already.
|
||||
// File names starting with an underscore are deemed "helpers".
|
||||
if (path.basename(file).startsWith('_')) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// This function assumes the file has been normalized. If it couldn't be,
|
||||
// don't check if it's got a parent directory that starts with an underscore.
|
||||
// Deem it not a "helper".
|
||||
if (path.isAbsolute(file)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the file has a parent directory that starts with only a single
|
||||
// underscore, it's deemed a "helper".
|
||||
return path.dirname(file).split('/').some(dir => /^_(?:$|[^_])/.test(dir));
|
||||
}
|
||||
|
||||
exports.isHelperish = isHelperish;
|
||||
|
||||
function classify(file, {cwd, extensions, filePatterns, ignoredByWatcherPatterns}) {
|
||||
file = normalizeFileForMatching(cwd, file);
|
||||
return {
|
||||
isIgnoredByWatcher: matchesIgnorePatterns(file, ignoredByWatcherPatterns),
|
||||
isTest: hasExtension(extensions, file) && !isHelperish(file) && filePatterns.length > 0 && matches(file, filePatterns)
|
||||
};
|
||||
}
|
||||
|
||||
exports.classify = classify;
|
||||
|
||||
function applyTestFileFilter({cwd, filter, testFiles}) {
|
||||
return testFiles.filter(file => matches(normalizeFileForMatching(cwd, file), filter));
|
||||
}
|
||||
|
||||
exports.applyTestFileFilter = applyTestFileFilter;
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue