Bump tough-cookie and @azure/ms-rest-js (#1763)
* Bump tough-cookie and @azure/ms-rest-js Bumps [tough-cookie](https://github.com/salesforce/tough-cookie) and [@azure/ms-rest-js](https://github.com/Azure/ms-rest-js). These dependencies needed to be updated together. Updates `tough-cookie` from 4.0.0 to 4.1.3 - [Release notes](https://github.com/salesforce/tough-cookie/releases) - [Changelog](https://github.com/salesforce/tough-cookie/blob/master/CHANGELOG.md) - [Commits](https://github.com/salesforce/tough-cookie/compare/v4.0.0...v4.1.3) Updates `@azure/ms-rest-js` from 2.6.2 to 2.7.0 - [Changelog](https://github.com/Azure/ms-rest-js/blob/master/Changelog.md) - [Commits](https://github.com/Azure/ms-rest-js/commits) --- updated-dependencies: - dependency-name: tough-cookie dependency-type: indirect - dependency-name: "@azure/ms-rest-js" dependency-type: indirect ... Signed-off-by: dependabot[bot] <support@github.com> * Update checked-in dependencies --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
This commit is contained in:
parent
85c77f1dfc
commit
8f80d7761c
65 changed files with 4194 additions and 4573 deletions
67
node_modules/.package-lock.json
generated
vendored
67
node_modules/.package-lock.json
generated
vendored
|
|
@ -236,19 +236,6 @@
|
|||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-http/node_modules/tough-cookie": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz",
|
||||
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==",
|
||||
"dependencies": {
|
||||
"psl": "^1.1.33",
|
||||
"punycode": "^2.1.1",
|
||||
"universalify": "^0.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-http/node_modules/tslib": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz",
|
||||
|
|
@ -331,19 +318,18 @@
|
|||
"integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ=="
|
||||
},
|
||||
"node_modules/@azure/ms-rest-js": {
|
||||
"version": "2.6.2",
|
||||
"resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.6.2.tgz",
|
||||
"integrity": "sha512-0/8rOxAoR9M3qKUdbGOIYtHtQkm4m5jdoDNdxTU0DkOr84KwyAdJuW/RfjJinGyig4h73DNF0rdCl6XowgCYcg==",
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.7.0.tgz",
|
||||
"integrity": "sha512-ngbzWbqF+NmztDOpLBVDxYM+XLcUj7nKhxGbSU9WtIsXfRB//cf2ZbAG5HkOrhU9/wd/ORRB6lM/d69RKVjiyA==",
|
||||
"dependencies": {
|
||||
"@azure/core-auth": "^1.1.4",
|
||||
"abort-controller": "^3.0.0",
|
||||
"form-data": "^2.5.0",
|
||||
"node-fetch": "^2.6.7",
|
||||
"tough-cookie": "^3.0.1",
|
||||
"tslib": "^1.10.0",
|
||||
"tunnel": "0.0.6",
|
||||
"uuid": "^8.3.2",
|
||||
"xml2js": "^0.4.19"
|
||||
"xml2js": "^0.5.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/ms-rest-js/node_modules/uuid": {
|
||||
|
|
@ -3681,14 +3667,6 @@
|
|||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/ip-regex": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz",
|
||||
"integrity": "sha512-58yWmlHpp7VYfcdTwMTvwMmqx/Elfxjd9RXTDyMsbL7lLWmhMylLEqiYVLKuLzOZqVgiWXD9MfR62Vv89VRxkw==",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/irregular-plurals": {
|
||||
"version": "3.3.0",
|
||||
"resolved": "https://registry.npmjs.org/irregular-plurals/-/irregular-plurals-3.3.0.tgz",
|
||||
|
|
@ -5006,6 +4984,11 @@
|
|||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/querystringify": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
|
||||
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
|
||||
},
|
||||
"node_modules/queue-microtask": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
||||
|
|
@ -5101,6 +5084,11 @@
|
|||
"node": ">=0.10.5"
|
||||
}
|
||||
},
|
||||
"node_modules/requires-port": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
|
||||
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
|
||||
},
|
||||
"node_modules/resolve": {
|
||||
"version": "1.22.1",
|
||||
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
|
||||
|
|
@ -5716,13 +5704,14 @@
|
|||
}
|
||||
},
|
||||
"node_modules/tough-cookie": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz",
|
||||
"integrity": "sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==",
|
||||
"version": "4.1.3",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
|
||||
"integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
|
||||
"dependencies": {
|
||||
"ip-regex": "^2.1.0",
|
||||
"psl": "^1.1.28",
|
||||
"punycode": "^2.1.1"
|
||||
"psl": "^1.1.33",
|
||||
"punycode": "^2.1.1",
|
||||
"universalify": "^0.2.0",
|
||||
"url-parse": "^1.5.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
|
|
@ -5851,8 +5840,9 @@
|
|||
"integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w=="
|
||||
},
|
||||
"node_modules/universalify": {
|
||||
"version": "0.1.2",
|
||||
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
|
||||
"integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==",
|
||||
"engines": {
|
||||
"node": ">= 4.0.0"
|
||||
}
|
||||
|
|
@ -5892,6 +5882,15 @@
|
|||
"punycode": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/url-parse": {
|
||||
"version": "1.5.10",
|
||||
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
|
||||
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
|
||||
"dependencies": {
|
||||
"querystringify": "^2.1.1",
|
||||
"requires-port": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/util": {
|
||||
"version": "0.10.4",
|
||||
"integrity": "sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A==",
|
||||
|
|
|
|||
12
node_modules/@azure/core-http/node_modules/tough-cookie/LICENSE
generated
vendored
12
node_modules/@azure/core-http/node_modules/tough-cookie/LICENSE
generated
vendored
|
|
@ -1,12 +0,0 @@
|
|||
Copyright (c) 2015, Salesforce.com, Inc.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
582
node_modules/@azure/core-http/node_modules/tough-cookie/README.md
generated
vendored
582
node_modules/@azure/core-http/node_modules/tough-cookie/README.md
generated
vendored
|
|
@ -1,582 +0,0 @@
|
|||
[RFC6265](https://tools.ietf.org/html/rfc6265) Cookies and CookieJar for Node.js
|
||||
|
||||
[](https://nodei.co/npm/tough-cookie/)
|
||||
|
||||
[](https://travis-ci.org/salesforce/tough-cookie)
|
||||
|
||||
# Synopsis
|
||||
|
||||
``` javascript
|
||||
var tough = require('tough-cookie');
|
||||
var Cookie = tough.Cookie;
|
||||
var cookie = Cookie.parse(header);
|
||||
cookie.value = 'somethingdifferent';
|
||||
header = cookie.toString();
|
||||
|
||||
var cookiejar = new tough.CookieJar();
|
||||
cookiejar.setCookie(cookie, 'http://currentdomain.example.com/path', cb);
|
||||
// ...
|
||||
cookiejar.getCookies('http://example.com/otherpath',function(err,cookies) {
|
||||
res.headers['cookie'] = cookies.join('; ');
|
||||
});
|
||||
```
|
||||
|
||||
# Installation
|
||||
|
||||
It's _so_ easy!
|
||||
|
||||
`npm install tough-cookie`
|
||||
|
||||
Why the name? NPM modules `cookie`, `cookies` and `cookiejar` were already taken.
|
||||
|
||||
## Version Support
|
||||
|
||||
Support for versions of node.js will follow that of the [request](https://www.npmjs.com/package/request) module.
|
||||
|
||||
# API
|
||||
|
||||
## tough
|
||||
|
||||
Functions on the module you get from `require('tough-cookie')`. All can be used as pure functions and don't need to be "bound".
|
||||
|
||||
**Note**: prior to 1.0.x, several of these functions took a `strict` parameter. This has since been removed from the API as it was no longer necessary.
|
||||
|
||||
### `parseDate(string)`
|
||||
|
||||
Parse a cookie date string into a `Date`. Parses according to RFC6265 Section 5.1.1, not `Date.parse()`.
|
||||
|
||||
### `formatDate(date)`
|
||||
|
||||
Format a Date into a RFC1123 string (the RFC6265-recommended format).
|
||||
|
||||
### `canonicalDomain(str)`
|
||||
|
||||
Transforms a domain-name into a canonical domain-name. The canonical domain-name is a trimmed, lowercased, stripped-of-leading-dot and optionally punycode-encoded domain-name (Section 5.1.2 of RFC6265). For the most part, this function is idempotent (can be run again on its output without ill effects).
|
||||
|
||||
### `domainMatch(str,domStr[,canonicalize=true])`
|
||||
|
||||
Answers "does this real domain match the domain in a cookie?". The `str` is the "current" domain-name and the `domStr` is the "cookie" domain-name. Matches according to RFC6265 Section 5.1.3, but it helps to think of it as a "suffix match".
|
||||
|
||||
The `canonicalize` parameter will run the other two parameters through `canonicalDomain` or not.
|
||||
|
||||
### `defaultPath(path)`
|
||||
|
||||
Given a current request/response path, gives the Path apropriate for storing in a cookie. This is basically the "directory" of a "file" in the path, but is specified by Section 5.1.4 of the RFC.
|
||||
|
||||
The `path` parameter MUST be _only_ the pathname part of a URI (i.e. excludes the hostname, query, fragment, etc.). This is the `.pathname` property of node's `uri.parse()` output.
|
||||
|
||||
### `pathMatch(reqPath,cookiePath)`
|
||||
|
||||
Answers "does the request-path path-match a given cookie-path?" as per RFC6265 Section 5.1.4. Returns a boolean.
|
||||
|
||||
This is essentially a prefix-match where `cookiePath` is a prefix of `reqPath`.
|
||||
|
||||
### `parse(cookieString[, options])`
|
||||
|
||||
alias for `Cookie.parse(cookieString[, options])`
|
||||
|
||||
### `fromJSON(string)`
|
||||
|
||||
alias for `Cookie.fromJSON(string)`
|
||||
|
||||
### `getPublicSuffix(hostname)`
|
||||
|
||||
Returns the public suffix of this hostname. The public suffix is the shortest domain-name upon which a cookie can be set. Returns `null` if the hostname cannot have cookies set for it.
|
||||
|
||||
For example: `www.example.com` and `www.subdomain.example.com` both have public suffix `example.com`.
|
||||
|
||||
For further information, see http://publicsuffix.org/. This module derives its list from that site. This call is currently a wrapper around [`psl`](https://www.npmjs.com/package/psl)'s [get() method](https://www.npmjs.com/package/psl#pslgetdomain).
|
||||
|
||||
### `cookieCompare(a,b)`
|
||||
|
||||
For use with `.sort()`, sorts a list of cookies into the recommended order given in the RFC (Section 5.4 step 2). The sort algorithm is, in order of precedence:
|
||||
|
||||
* Longest `.path`
|
||||
* oldest `.creation` (which has a 1ms precision, same as `Date`)
|
||||
* lowest `.creationIndex` (to get beyond the 1ms precision)
|
||||
|
||||
``` javascript
|
||||
var cookies = [ /* unsorted array of Cookie objects */ ];
|
||||
cookies = cookies.sort(cookieCompare);
|
||||
```
|
||||
|
||||
**Note**: Since JavaScript's `Date` is limited to a 1ms precision, cookies within the same milisecond are entirely possible. This is especially true when using the `now` option to `.setCookie()`. The `.creationIndex` property is a per-process global counter, assigned during construction with `new Cookie()`. This preserves the spirit of the RFC sorting: older cookies go first. This works great for `MemoryCookieStore`, since `Set-Cookie` headers are parsed in order, but may not be so great for distributed systems. Sophisticated `Store`s may wish to set this to some other _logical clock_ such that if cookies A and B are created in the same millisecond, but cookie A is created before cookie B, then `A.creationIndex < B.creationIndex`. If you want to alter the global counter, which you probably _shouldn't_ do, it's stored in `Cookie.cookiesCreated`.
|
||||
|
||||
### `permuteDomain(domain)`
|
||||
|
||||
Generates a list of all possible domains that `domainMatch()` the parameter. May be handy for implementing cookie stores.
|
||||
|
||||
### `permutePath(path)`
|
||||
|
||||
Generates a list of all possible paths that `pathMatch()` the parameter. May be handy for implementing cookie stores.
|
||||
|
||||
|
||||
## Cookie
|
||||
|
||||
Exported via `tough.Cookie`.
|
||||
|
||||
### `Cookie.parse(cookieString[, options])`
|
||||
|
||||
Parses a single Cookie or Set-Cookie HTTP header into a `Cookie` object. Returns `undefined` if the string can't be parsed.
|
||||
|
||||
The options parameter is not required and currently has only one property:
|
||||
|
||||
* _loose_ - boolean - if `true` enable parsing of key-less cookies like `=abc` and `=`, which are not RFC-compliant.
|
||||
|
||||
If options is not an object, it is ignored, which means you can use `Array#map` with it.
|
||||
|
||||
Here's how to process the Set-Cookie header(s) on a node HTTP/HTTPS response:
|
||||
|
||||
``` javascript
|
||||
if (res.headers['set-cookie'] instanceof Array)
|
||||
cookies = res.headers['set-cookie'].map(Cookie.parse);
|
||||
else
|
||||
cookies = [Cookie.parse(res.headers['set-cookie'])];
|
||||
```
|
||||
|
||||
_Note:_ in version 2.3.3, tough-cookie limited the number of spaces before the `=` to 256 characters. This limitation has since been removed.
|
||||
See [Issue 92](https://github.com/salesforce/tough-cookie/issues/92)
|
||||
|
||||
### Properties
|
||||
|
||||
Cookie object properties:
|
||||
|
||||
* _key_ - string - the name or key of the cookie (default "")
|
||||
* _value_ - string - the value of the cookie (default "")
|
||||
* _expires_ - `Date` - if set, the `Expires=` attribute of the cookie (defaults to the string `"Infinity"`). See `setExpires()`
|
||||
* _maxAge_ - seconds - if set, the `Max-Age=` attribute _in seconds_ of the cookie. May also be set to strings `"Infinity"` and `"-Infinity"` for non-expiry and immediate-expiry, respectively. See `setMaxAge()`
|
||||
* _domain_ - string - the `Domain=` attribute of the cookie
|
||||
* _path_ - string - the `Path=` of the cookie
|
||||
* _secure_ - boolean - the `Secure` cookie flag
|
||||
* _httpOnly_ - boolean - the `HttpOnly` cookie flag
|
||||
* _sameSite_ - string - the `SameSite` cookie attribute (from [RFC6265bis]); must be one of `none`, `lax`, or `strict`
|
||||
* _extensions_ - `Array` - any unrecognized cookie attributes as strings (even if equal-signs inside)
|
||||
* _creation_ - `Date` - when this cookie was constructed
|
||||
* _creationIndex_ - number - set at construction, used to provide greater sort precision (please see `cookieCompare(a,b)` for a full explanation)
|
||||
|
||||
After a cookie has been passed through `CookieJar.setCookie()` it will have the following additional attributes:
|
||||
|
||||
* _hostOnly_ - boolean - is this a host-only cookie (i.e. no Domain field was set, but was instead implied)
|
||||
* _pathIsDefault_ - boolean - if true, there was no Path field on the cookie and `defaultPath()` was used to derive one.
|
||||
* _creation_ - `Date` - **modified** from construction to when the cookie was added to the jar
|
||||
* _lastAccessed_ - `Date` - last time the cookie got accessed. Will affect cookie cleaning once implemented. Using `cookiejar.getCookies(...)` will update this attribute.
|
||||
|
||||
### `Cookie([{properties}])`
|
||||
|
||||
Receives an options object that can contain any of the above Cookie properties, uses the default for unspecified properties.
|
||||
|
||||
### `.toString()`
|
||||
|
||||
encode to a Set-Cookie header value. The Expires cookie field is set using `formatDate()`, but is omitted entirely if `.expires` is `Infinity`.
|
||||
|
||||
### `.cookieString()`
|
||||
|
||||
encode to a Cookie header value (i.e. the `.key` and `.value` properties joined with '=').
|
||||
|
||||
### `.setExpires(String)`
|
||||
|
||||
sets the expiry based on a date-string passed through `parseDate()`. If parseDate returns `null` (i.e. can't parse this date string), `.expires` is set to `"Infinity"` (a string) is set.
|
||||
|
||||
### `.setMaxAge(number)`
|
||||
|
||||
sets the maxAge in seconds. Coerces `-Infinity` to `"-Infinity"` and `Infinity` to `"Infinity"` so it JSON serializes correctly.
|
||||
|
||||
### `.expiryTime([now=Date.now()])`
|
||||
|
||||
### `.expiryDate([now=Date.now()])`
|
||||
|
||||
expiryTime() Computes the absolute unix-epoch milliseconds that this cookie expires. expiryDate() works similarly, except it returns a `Date` object. Note that in both cases the `now` parameter should be milliseconds.
|
||||
|
||||
Max-Age takes precedence over Expires (as per the RFC). The `.creation` attribute -- or, by default, the `now` parameter -- is used to offset the `.maxAge` attribute.
|
||||
|
||||
If Expires (`.expires`) is set, that's returned.
|
||||
|
||||
Otherwise, `expiryTime()` returns `Infinity` and `expiryDate()` returns a `Date` object for "Tue, 19 Jan 2038 03:14:07 GMT" (latest date that can be expressed by a 32-bit `time_t`; the common limit for most user-agents).
|
||||
|
||||
### `.TTL([now=Date.now()])`
|
||||
|
||||
compute the TTL relative to `now` (milliseconds). The same precedence rules as for `expiryTime`/`expiryDate` apply.
|
||||
|
||||
The "number" `Infinity` is returned for cookies without an explicit expiry and `0` is returned if the cookie is expired. Otherwise a time-to-live in milliseconds is returned.
|
||||
|
||||
### `.canonicalizedDomain()`
|
||||
|
||||
### `.cdomain()`
|
||||
|
||||
return the canonicalized `.domain` field. This is lower-cased and punycode (RFC3490) encoded if the domain has any non-ASCII characters.
|
||||
|
||||
### `.toJSON()`
|
||||
|
||||
For convenience in using `JSON.serialize(cookie)`. Returns a plain-old `Object` that can be JSON-serialized.
|
||||
|
||||
Any `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are exported in ISO format (`.toISOString()`).
|
||||
|
||||
**NOTE**: Custom `Cookie` properties will be discarded. In tough-cookie 1.x, since there was no `.toJSON` method explicitly defined, all enumerable properties were captured. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array.
|
||||
|
||||
### `Cookie.fromJSON(strOrObj)`
|
||||
|
||||
Does the reverse of `cookie.toJSON()`. If passed a string, will `JSON.parse()` that first.
|
||||
|
||||
Any `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are parsed via `Date.parse()`, not the tough-cookie `parseDate`, since it's JavaScript/JSON-y timestamps being handled at this layer.
|
||||
|
||||
Returns `null` upon JSON parsing error.
|
||||
|
||||
### `.clone()`
|
||||
|
||||
Does a deep clone of this cookie, exactly implemented as `Cookie.fromJSON(cookie.toJSON())`.
|
||||
|
||||
### `.validate()`
|
||||
|
||||
Status: *IN PROGRESS*. Works for a few things, but is by no means comprehensive.
|
||||
|
||||
validates cookie attributes for semantic correctness. Useful for "lint" checking any Set-Cookie headers you generate. For now, it returns a boolean, but eventually could return a reason string -- you can future-proof with this construct:
|
||||
|
||||
``` javascript
|
||||
if (cookie.validate() === true) {
|
||||
// it's tasty
|
||||
} else {
|
||||
// yuck!
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## CookieJar
|
||||
|
||||
Exported via `tough.CookieJar`.
|
||||
|
||||
### `CookieJar([store],[options])`
|
||||
|
||||
Simply use `new CookieJar()`. If you'd like to use a custom store, pass that to the constructor otherwise a `MemoryCookieStore` will be created and used.
|
||||
|
||||
The `options` object can be omitted and can have the following properties:
|
||||
|
||||
* _rejectPublicSuffixes_ - boolean - default `true` - reject cookies with domains like "com" and "co.uk"
|
||||
* _looseMode_ - boolean - default `false` - accept malformed cookies like `bar` and `=bar`, which have an implied empty name.
|
||||
* _prefixSecurity_ - string - default `silent` - set to `'unsafe-disabled'`, `'silent'`, or `'strict'`. See [Cookie Prefixes] below.
|
||||
* _allowSpecialUseDomain_ - boolean - default `false` - accepts special-use domain suffixes, such as `local`. Useful for testing purposes.
|
||||
This is not in the standard, but is used sometimes on the web and is accepted by (most) browsers.
|
||||
|
||||
Since eventually this module would like to support database/remote/etc. CookieJars, continuation passing style is used for CookieJar methods.
|
||||
|
||||
### `.setCookie(cookieOrString, currentUrl, [{options},] cb(err,cookie))`
|
||||
|
||||
Attempt to set the cookie in the cookie jar. If the operation fails, an error will be given to the callback `cb`, otherwise the cookie is passed through. The cookie will have updated `.creation`, `.lastAccessed` and `.hostOnly` properties.
|
||||
|
||||
The `options` object can be omitted and can have the following properties:
|
||||
|
||||
* _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies.
|
||||
* _secure_ - boolean - autodetect from url - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`.
|
||||
* _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies
|
||||
* _ignoreError_ - boolean - default `false` - silently ignore things like parse errors and invalid domains. `Store` errors aren't ignored by this option.
|
||||
* _sameSiteContext_ - string - default unset - set to `'none'`, `'lax'`, or `'strict'` See [SameSite Cookies] below.
|
||||
|
||||
As per the RFC, the `.hostOnly` property is set if there was no "Domain=" parameter in the cookie string (or `.domain` was null on the Cookie object). The `.domain` property is set to the fully-qualified hostname of `currentUrl` in this case. Matching this cookie requires an exact hostname match (not a `domainMatch` as per usual).
|
||||
|
||||
### `.setCookieSync(cookieOrString, currentUrl, [{options}])`
|
||||
|
||||
Synchronous version of `setCookie`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).
|
||||
|
||||
### `.getCookies(currentUrl, [{options},] cb(err,cookies))`
|
||||
|
||||
Retrieve the list of cookies that can be sent in a Cookie header for the current url.
|
||||
|
||||
If an error is encountered, that's passed as `err` to the callback, otherwise an `Array` of `Cookie` objects is passed. The array is sorted with `cookieCompare()` unless the `{sort:false}` option is given.
|
||||
|
||||
The `options` object can be omitted and can have the following properties:
|
||||
|
||||
* _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies.
|
||||
* _secure_ - boolean - autodetect from url - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`.
|
||||
* _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies
|
||||
* _expire_ - boolean - default `true` - perform expiry-time checking of cookies and asynchronously remove expired cookies from the store. Using `false` will return expired cookies and **not** remove them from the store (which is useful for replaying Set-Cookie headers, potentially).
|
||||
* _allPaths_ - boolean - default `false` - if `true`, do not scope cookies by path. The default uses RFC-compliant path scoping. **Note**: may not be supported by the underlying store (the default `MemoryCookieStore` supports it).
|
||||
* _sameSiteContext_ - string - default unset - Set this to `'none'`, `'lax'` or `'strict'` to enforce SameSite cookies upon retrival. See [SameSite Cookies] below.
|
||||
|
||||
The `.lastAccessed` property of the returned cookies will have been updated.
|
||||
|
||||
### `.getCookiesSync(currentUrl, [{options}])`
|
||||
|
||||
Synchronous version of `getCookies`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).
|
||||
|
||||
### `.getCookieString(...)`
|
||||
|
||||
Accepts the same options as `.getCookies()` but passes a string suitable for a Cookie header rather than an array to the callback. Simply maps the `Cookie` array via `.cookieString()`.
|
||||
|
||||
### `.getCookieStringSync(...)`
|
||||
|
||||
Synchronous version of `getCookieString`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).
|
||||
|
||||
### `.getSetCookieStrings(...)`
|
||||
|
||||
Returns an array of strings suitable for **Set-Cookie** headers. Accepts the same options as `.getCookies()`. Simply maps the cookie array via `.toString()`.
|
||||
|
||||
### `.getSetCookieStringsSync(...)`
|
||||
|
||||
Synchronous version of `getSetCookieStrings`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).
|
||||
|
||||
### `.serialize(cb(err,serializedObject))`
|
||||
|
||||
Serialize the Jar if the underlying store supports `.getAllCookies`.
|
||||
|
||||
**NOTE**: Custom `Cookie` properties will be discarded. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array.
|
||||
|
||||
See [Serialization Format].
|
||||
|
||||
### `.serializeSync()`
|
||||
|
||||
Sync version of .serialize
|
||||
|
||||
### `.toJSON()`
|
||||
|
||||
Alias of .serializeSync() for the convenience of `JSON.stringify(cookiejar)`.
|
||||
|
||||
### `CookieJar.deserialize(serialized, [store], cb(err,object))`
|
||||
|
||||
A new Jar is created and the serialized Cookies are added to the underlying store. Each `Cookie` is added via `store.putCookie` in the order in which they appear in the serialization.
|
||||
|
||||
The `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created.
|
||||
|
||||
As a convenience, if `serialized` is a string, it is passed through `JSON.parse` first. If that throws an error, this is passed to the callback.
|
||||
|
||||
### `CookieJar.deserializeSync(serialized, [store])`
|
||||
|
||||
Sync version of `.deserialize`. _Note_ that the `store` must be synchronous for this to work.
|
||||
|
||||
### `CookieJar.fromJSON(string)`
|
||||
|
||||
Alias of `.deserializeSync` to provide consistency with `Cookie.fromJSON()`.
|
||||
|
||||
### `.clone([store,]cb(err,newJar))`
|
||||
|
||||
Produces a deep clone of this jar. Modifications to the original won't affect the clone, and vice versa.
|
||||
|
||||
The `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created. Transferring between store types is supported so long as the source implements `.getAllCookies()` and the destination implements `.putCookie()`.
|
||||
|
||||
### `.cloneSync([store])`
|
||||
|
||||
Synchronous version of `.clone`, returning a new `CookieJar` instance.
|
||||
|
||||
The `store` argument is optional, but must be a _synchronous_ `Store` instance if specified. If not passed, a new instance of `MemoryCookieStore` is used.
|
||||
|
||||
The _source_ and _destination_ must both be synchronous `Store`s. If one or both stores are asynchronous, use `.clone` instead. Recall that `MemoryCookieStore` supports both synchronous and asynchronous API calls.
|
||||
|
||||
### `.removeAllCookies(cb(err))`
|
||||
|
||||
Removes all cookies from the jar.
|
||||
|
||||
This is a new backwards-compatible feature of `tough-cookie` version 2.5, so not all Stores will implement it efficiently. For Stores that do not implement `removeAllCookies`, the fallback is to call `removeCookie` after `getAllCookies`. If `getAllCookies` fails or isn't implemented in the Store, that error is returned. If one or more of the `removeCookie` calls fail, only the first error is returned.
|
||||
|
||||
### `.removeAllCookiesSync()`
|
||||
|
||||
Sync version of `.removeAllCookies()`
|
||||
|
||||
## Store
|
||||
|
||||
Base class for CookieJar stores. Available as `tough.Store`.
|
||||
|
||||
## Store API
|
||||
|
||||
The storage model for each `CookieJar` instance can be replaced with a custom implementation. The default is `MemoryCookieStore` which can be found in the `lib/memstore.js` file. The API uses continuation-passing-style to allow for asynchronous stores.
|
||||
|
||||
Stores should inherit from the base `Store` class, which is available as `require('tough-cookie').Store`.
|
||||
|
||||
Stores are asynchronous by default, but if `store.synchronous` is set to `true`, then the `*Sync` methods on the of the containing `CookieJar` can be used (however, the continuation-passing style
|
||||
|
||||
All `domain` parameters will have been normalized before calling.
|
||||
|
||||
The Cookie store must have all of the following methods.
|
||||
|
||||
### `store.findCookie(domain, path, key, cb(err,cookie))`
|
||||
|
||||
Retrieve a cookie with the given domain, path and key (a.k.a. name). The RFC maintains that exactly one of these cookies should exist in a store. If the store is using versioning, this means that the latest/newest such cookie should be returned.
|
||||
|
||||
Callback takes an error and the resulting `Cookie` object. If no cookie is found then `null` MUST be passed instead (i.e. not an error).
|
||||
|
||||
### `store.findCookies(domain, path, cb(err,cookies))`
|
||||
|
||||
Locates cookies matching the given domain and path. This is most often called in the context of `cookiejar.getCookies()` above.
|
||||
|
||||
If no cookies are found, the callback MUST be passed an empty array.
|
||||
|
||||
The resulting list will be checked for applicability to the current request according to the RFC (domain-match, path-match, http-only-flag, secure-flag, expiry, etc.), so it's OK to use an optimistic search algorithm when implementing this method. However, the search algorithm used SHOULD try to find cookies that `domainMatch()` the domain and `pathMatch()` the path in order to limit the amount of checking that needs to be done.
|
||||
|
||||
As of version 0.9.12, the `allPaths` option to `cookiejar.getCookies()` above will cause the path here to be `null`. If the path is `null`, path-matching MUST NOT be performed (i.e. domain-matching only).
|
||||
|
||||
### `store.putCookie(cookie, cb(err))`
|
||||
|
||||
Adds a new cookie to the store. The implementation SHOULD replace any existing cookie with the same `.domain`, `.path`, and `.key` properties -- depending on the nature of the implementation, it's possible that between the call to `fetchCookie` and `putCookie` that a duplicate `putCookie` can occur.
|
||||
|
||||
The `cookie` object MUST NOT be modified; the caller will have already updated the `.creation` and `.lastAccessed` properties.
|
||||
|
||||
Pass an error if the cookie cannot be stored.
|
||||
|
||||
### `store.updateCookie(oldCookie, newCookie, cb(err))`
|
||||
|
||||
Update an existing cookie. The implementation MUST update the `.value` for a cookie with the same `domain`, `.path` and `.key`. The implementation SHOULD check that the old value in the store is equivalent to `oldCookie` - how the conflict is resolved is up to the store.
|
||||
|
||||
The `.lastAccessed` property will always be different between the two objects (to the precision possible via JavaScript's clock). Both `.creation` and `.creationIndex` are guaranteed to be the same. Stores MAY ignore or defer the `.lastAccessed` change at the cost of affecting how cookies are selected for automatic deletion (e.g., least-recently-used, which is up to the store to implement).
|
||||
|
||||
Stores may wish to optimize changing the `.value` of the cookie in the store versus storing a new cookie. If the implementation doesn't define this method a stub that calls `putCookie(newCookie,cb)` will be added to the store object.
|
||||
|
||||
The `newCookie` and `oldCookie` objects MUST NOT be modified.
|
||||
|
||||
Pass an error if the newCookie cannot be stored.
|
||||
|
||||
### `store.removeCookie(domain, path, key, cb(err))`
|
||||
|
||||
Remove a cookie from the store (see notes on `findCookie` about the uniqueness constraint).
|
||||
|
||||
The implementation MUST NOT pass an error if the cookie doesn't exist; only pass an error due to the failure to remove an existing cookie.
|
||||
|
||||
### `store.removeCookies(domain, path, cb(err))`
|
||||
|
||||
Removes matching cookies from the store. The `path` parameter is optional, and if missing means all paths in a domain should be removed.
|
||||
|
||||
Pass an error ONLY if removing any existing cookies failed.
|
||||
|
||||
### `store.removeAllCookies(cb(err))`
|
||||
|
||||
_Optional_. Removes all cookies from the store.
|
||||
|
||||
Pass an error if one or more cookies can't be removed.
|
||||
|
||||
**Note**: New method as of `tough-cookie` version 2.5, so not all Stores will implement this, plus some stores may choose not to implement this.
|
||||
|
||||
### `store.getAllCookies(cb(err, cookies))`
|
||||
|
||||
_Optional_. Produces an `Array` of all cookies during `jar.serialize()`. The items in the array can be true `Cookie` objects or generic `Object`s with the [Serialization Format] data structure.
|
||||
|
||||
Cookies SHOULD be returned in creation order to preserve sorting via `compareCookies()`. For reference, `MemoryCookieStore` will sort by `.creationIndex` since it uses true `Cookie` objects internally. If you don't return the cookies in creation order, they'll still be sorted by creation time, but this only has a precision of 1ms. See `compareCookies` for more detail.
|
||||
|
||||
Pass an error if retrieval fails.
|
||||
|
||||
**Note**: not all Stores can implement this due to technical limitations, so it is optional.
|
||||
|
||||
## MemoryCookieStore
|
||||
|
||||
Inherits from `Store`.
|
||||
|
||||
A just-in-memory CookieJar synchronous store implementation, used by default. Despite being a synchronous implementation, it's usable with both the synchronous and asynchronous forms of the `CookieJar` API. Supports serialization, `getAllCookies`, and `removeAllCookies`.
|
||||
|
||||
## Community Cookie Stores
|
||||
|
||||
These are some Store implementations authored and maintained by the community. They aren't official and we don't vouch for them but you may be interested to have a look:
|
||||
|
||||
- [`db-cookie-store`](https://github.com/JSBizon/db-cookie-store): SQL including SQLite-based databases
|
||||
- [`file-cookie-store`](https://github.com/JSBizon/file-cookie-store): Netscape cookie file format on disk
|
||||
- [`redis-cookie-store`](https://github.com/benkroeger/redis-cookie-store): Redis
|
||||
- [`tough-cookie-filestore`](https://github.com/mitsuru/tough-cookie-filestore): JSON on disk
|
||||
- [`tough-cookie-web-storage-store`](https://github.com/exponentjs/tough-cookie-web-storage-store): DOM localStorage and sessionStorage
|
||||
|
||||
|
||||
# Serialization Format
|
||||
|
||||
**NOTE**: if you want to have custom `Cookie` properties serialized, add the property name to `Cookie.serializableProperties`.
|
||||
|
||||
```js
|
||||
{
|
||||
// The version of tough-cookie that serialized this jar.
|
||||
version: 'tough-cookie@1.x.y',
|
||||
|
||||
// add the store type, to make humans happy:
|
||||
storeType: 'MemoryCookieStore',
|
||||
|
||||
// CookieJar configuration:
|
||||
rejectPublicSuffixes: true,
|
||||
// ... future items go here
|
||||
|
||||
// Gets filled from jar.store.getAllCookies():
|
||||
cookies: [
|
||||
{
|
||||
key: 'string',
|
||||
value: 'string',
|
||||
// ...
|
||||
/* other Cookie.serializableProperties go here */
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
# RFC6265bis
|
||||
|
||||
Support for RFC6265bis revision 02 is being developed. Since this is a bit of an omnibus revision to the RFC6252, support is broken up into the functional areas.
|
||||
|
||||
## Leave Secure Cookies Alone
|
||||
|
||||
Not yet supported.
|
||||
|
||||
This change makes it so that if a cookie is sent from the server to the client with a `Secure` attribute, the channel must also be secure or the cookie is ignored.
|
||||
|
||||
## SameSite Cookies
|
||||
|
||||
Supported.
|
||||
|
||||
This change makes it possible for servers, and supporting clients, to mitigate certain types of CSRF attacks by disallowing `SameSite` cookies from being sent cross-origin.
|
||||
|
||||
On the Cookie object itself, you can get/set the `.sameSite` attribute, which will be serialized into the `SameSite=` cookie attribute. When unset or `undefined`, no `SameSite=` attribute will be serialized. The valid values of this attribute are `'none'`, `'lax'`, or `'strict'`. Other values will be serialized as-is.
|
||||
|
||||
When parsing cookies with a `SameSite` cookie attribute, values other than `'lax'` or `'strict'` are parsed as `'none'`. For example, `SomeCookie=SomeValue; SameSite=garbage` will parse so that `cookie.sameSite === 'none'`.
|
||||
|
||||
In order to support SameSite cookies, you must provide a `sameSiteContext` option to _both_ `setCookie` and `getCookies`. Valid values for this option are just like for the Cookie object, but have particular meanings:
|
||||
1. `'strict'` mode - If the request is on the same "site for cookies" (see the RFC draft for what this means), pass this option to add a layer of defense against CSRF.
|
||||
2. `'lax'` mode - If the request is from another site, _but_ is directly because of navigation by the user, e.g., `<link type=prefetch>` or `<a href="...">`, pass `sameSiteContext: 'lax'`.
|
||||
3. `'none'` - Otherwise, pass `sameSiteContext: 'none'` (this indicates a cross-origin request).
|
||||
4. unset/`undefined` - SameSite **will not** be enforced! This can be a valid use-case for when CSRF isn't in the threat model of the system being built.
|
||||
|
||||
It is highly recommended that you read RFC 6265bis for fine details on SameSite cookies. In particular [Section 8.8](https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-02#section-8.8) discusses security considerations and defense in depth.
|
||||
|
||||
## Cookie Prefixes
|
||||
|
||||
Supported.
|
||||
|
||||
Cookie prefixes are a way to indicate that a given cookie was set with a set of attributes simply by inspecting the first few characters of the cookie's name.
|
||||
|
||||
Cookie prefixes are defined in [Section 4.1.3 of 6265bis](https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-4.1.3). Two prefixes are defined:
|
||||
|
||||
1. `"__Secure-" Prefix`: If a cookie's name begins with a case-sensitive match for the string "__Secure-", then the cookie will have been set with a "Secure" attribute.
|
||||
2. `"__Host-" Prefix`: If a cookie's name begins with a case-sensitive match for the string "__Host-", then the cookie will have been set with a "Secure" attribute, a "Path" attribute with a value of "/", and no "Domain" attribute.
|
||||
|
||||
If `prefixSecurity` is enabled for `CookieJar`, then cookies that match the prefixes defined above but do not obey the attribute restrictions will not be added.
|
||||
|
||||
You can define this functionality by passing in `prefixSecurity` option to `CookieJar`. It can be one of 3 values:
|
||||
|
||||
1. `silent`: Enable cookie prefix checking but silently fail to add the cookie if conditions not met. Default.
|
||||
2. `strict`: Enable cookie prefix checking and error out if conditions not met.
|
||||
3. `unsafe-disabled`: Disable cookie prefix checking.
|
||||
|
||||
Note that if `ignoreError` is passed in as `true` then the error will be silent regardless of `prefixSecurity` option (assuming it's enabled).
|
||||
|
||||
|
||||
# Copyright and License
|
||||
|
||||
BSD-3-Clause:
|
||||
|
||||
```text
|
||||
Copyright (c) 2015, Salesforce.com, Inc.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of Salesforce.com nor the names of its contributors may
|
||||
be used to endorse or promote products derived from this software without
|
||||
specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
```
|
||||
1671
node_modules/@azure/core-http/node_modules/tough-cookie/lib/cookie.js
generated
vendored
1671
node_modules/@azure/core-http/node_modules/tough-cookie/lib/cookie.js
generated
vendored
File diff suppressed because it is too large
Load diff
190
node_modules/@azure/core-http/node_modules/tough-cookie/lib/memstore.js
generated
vendored
190
node_modules/@azure/core-http/node_modules/tough-cookie/lib/memstore.js
generated
vendored
|
|
@ -1,190 +0,0 @@
|
|||
/*!
|
||||
* Copyright (c) 2015, Salesforce.com, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
*
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
*
|
||||
* 3. Neither the name of Salesforce.com nor the names of its contributors may
|
||||
* be used to endorse or promote products derived from this software without
|
||||
* specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
"use strict";
|
||||
const { fromCallback } = require("universalify");
|
||||
const Store = require("./store").Store;
|
||||
const permuteDomain = require("./permuteDomain").permuteDomain;
|
||||
const pathMatch = require("./pathMatch").pathMatch;
|
||||
const util = require("util");
|
||||
|
||||
class MemoryCookieStore extends Store {
|
||||
constructor() {
|
||||
super();
|
||||
this.synchronous = true;
|
||||
this.idx = {};
|
||||
if (util.inspect.custom) {
|
||||
this[util.inspect.custom] = this.inspect;
|
||||
}
|
||||
}
|
||||
|
||||
inspect() {
|
||||
return `{ idx: ${util.inspect(this.idx, false, 2)} }`;
|
||||
}
|
||||
|
||||
findCookie(domain, path, key, cb) {
|
||||
if (!this.idx[domain]) {
|
||||
return cb(null, undefined);
|
||||
}
|
||||
if (!this.idx[domain][path]) {
|
||||
return cb(null, undefined);
|
||||
}
|
||||
return cb(null, this.idx[domain][path][key] || null);
|
||||
}
|
||||
findCookies(domain, path, allowSpecialUseDomain, cb) {
|
||||
const results = [];
|
||||
if (typeof allowSpecialUseDomain === "function") {
|
||||
cb = allowSpecialUseDomain;
|
||||
allowSpecialUseDomain = false;
|
||||
}
|
||||
if (!domain) {
|
||||
return cb(null, []);
|
||||
}
|
||||
|
||||
let pathMatcher;
|
||||
if (!path) {
|
||||
// null means "all paths"
|
||||
pathMatcher = function matchAll(domainIndex) {
|
||||
for (const curPath in domainIndex) {
|
||||
const pathIndex = domainIndex[curPath];
|
||||
for (const key in pathIndex) {
|
||||
results.push(pathIndex[key]);
|
||||
}
|
||||
}
|
||||
};
|
||||
} else {
|
||||
pathMatcher = function matchRFC(domainIndex) {
|
||||
//NOTE: we should use path-match algorithm from S5.1.4 here
|
||||
//(see : https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/canonical_cookie.cc#L299)
|
||||
Object.keys(domainIndex).forEach(cookiePath => {
|
||||
if (pathMatch(path, cookiePath)) {
|
||||
const pathIndex = domainIndex[cookiePath];
|
||||
for (const key in pathIndex) {
|
||||
results.push(pathIndex[key]);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
const domains = permuteDomain(domain, allowSpecialUseDomain) || [domain];
|
||||
const idx = this.idx;
|
||||
domains.forEach(curDomain => {
|
||||
const domainIndex = idx[curDomain];
|
||||
if (!domainIndex) {
|
||||
return;
|
||||
}
|
||||
pathMatcher(domainIndex);
|
||||
});
|
||||
|
||||
cb(null, results);
|
||||
}
|
||||
|
||||
putCookie(cookie, cb) {
|
||||
if (!this.idx[cookie.domain]) {
|
||||
this.idx[cookie.domain] = {};
|
||||
}
|
||||
if (!this.idx[cookie.domain][cookie.path]) {
|
||||
this.idx[cookie.domain][cookie.path] = {};
|
||||
}
|
||||
this.idx[cookie.domain][cookie.path][cookie.key] = cookie;
|
||||
cb(null);
|
||||
}
|
||||
updateCookie(oldCookie, newCookie, cb) {
|
||||
// updateCookie() may avoid updating cookies that are identical. For example,
|
||||
// lastAccessed may not be important to some stores and an equality
|
||||
// comparison could exclude that field.
|
||||
this.putCookie(newCookie, cb);
|
||||
}
|
||||
removeCookie(domain, path, key, cb) {
|
||||
if (
|
||||
this.idx[domain] &&
|
||||
this.idx[domain][path] &&
|
||||
this.idx[domain][path][key]
|
||||
) {
|
||||
delete this.idx[domain][path][key];
|
||||
}
|
||||
cb(null);
|
||||
}
|
||||
removeCookies(domain, path, cb) {
|
||||
if (this.idx[domain]) {
|
||||
if (path) {
|
||||
delete this.idx[domain][path];
|
||||
} else {
|
||||
delete this.idx[domain];
|
||||
}
|
||||
}
|
||||
return cb(null);
|
||||
}
|
||||
removeAllCookies(cb) {
|
||||
this.idx = {};
|
||||
return cb(null);
|
||||
}
|
||||
getAllCookies(cb) {
|
||||
const cookies = [];
|
||||
const idx = this.idx;
|
||||
|
||||
const domains = Object.keys(idx);
|
||||
domains.forEach(domain => {
|
||||
const paths = Object.keys(idx[domain]);
|
||||
paths.forEach(path => {
|
||||
const keys = Object.keys(idx[domain][path]);
|
||||
keys.forEach(key => {
|
||||
if (key !== null) {
|
||||
cookies.push(idx[domain][path][key]);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Sort by creationIndex so deserializing retains the creation order.
|
||||
// When implementing your own store, this SHOULD retain the order too
|
||||
cookies.sort((a, b) => {
|
||||
return (a.creationIndex || 0) - (b.creationIndex || 0);
|
||||
});
|
||||
|
||||
cb(null, cookies);
|
||||
}
|
||||
}
|
||||
|
||||
[
|
||||
"findCookie",
|
||||
"findCookies",
|
||||
"putCookie",
|
||||
"updateCookie",
|
||||
"removeCookie",
|
||||
"removeCookies",
|
||||
"removeAllCookies",
|
||||
"getAllCookies"
|
||||
].forEach(name => {
|
||||
MemoryCookieStore[name] = fromCallback(MemoryCookieStore.prototype[name]);
|
||||
});
|
||||
|
||||
exports.MemoryCookieStore = MemoryCookieStore;
|
||||
61
node_modules/@azure/core-http/node_modules/tough-cookie/lib/pathMatch.js
generated
vendored
61
node_modules/@azure/core-http/node_modules/tough-cookie/lib/pathMatch.js
generated
vendored
|
|
@ -1,61 +0,0 @@
|
|||
/*!
|
||||
* Copyright (c) 2015, Salesforce.com, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
*
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
*
|
||||
* 3. Neither the name of Salesforce.com nor the names of its contributors may
|
||||
* be used to endorse or promote products derived from this software without
|
||||
* specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
"use strict";
|
||||
/*
|
||||
* "A request-path path-matches a given cookie-path if at least one of the
|
||||
* following conditions holds:"
|
||||
*/
|
||||
function pathMatch(reqPath, cookiePath) {
|
||||
// "o The cookie-path and the request-path are identical."
|
||||
if (cookiePath === reqPath) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const idx = reqPath.indexOf(cookiePath);
|
||||
if (idx === 0) {
|
||||
// "o The cookie-path is a prefix of the request-path, and the last
|
||||
// character of the cookie-path is %x2F ("/")."
|
||||
if (cookiePath.substr(-1) === "/") {
|
||||
return true;
|
||||
}
|
||||
|
||||
// " o The cookie-path is a prefix of the request-path, and the first
|
||||
// character of the request-path that is not included in the cookie- path
|
||||
// is a %x2F ("/") character."
|
||||
if (reqPath.substr(cookiePath.length, 1) === "/") {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
exports.pathMatch = pathMatch;
|
||||
70
node_modules/@azure/core-http/node_modules/tough-cookie/lib/permuteDomain.js
generated
vendored
70
node_modules/@azure/core-http/node_modules/tough-cookie/lib/permuteDomain.js
generated
vendored
|
|
@ -1,70 +0,0 @@
|
|||
/*!
|
||||
* Copyright (c) 2015, Salesforce.com, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
*
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
*
|
||||
* 3. Neither the name of Salesforce.com nor the names of its contributors may
|
||||
* be used to endorse or promote products derived from this software without
|
||||
* specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
"use strict";
|
||||
const pubsuffix = require("./pubsuffix-psl");
|
||||
|
||||
// Gives the permutation of all possible domainMatch()es of a given domain. The
|
||||
// array is in shortest-to-longest order. Handy for indexing.
|
||||
const SPECIAL_USE_DOMAINS = ["local"]; // RFC 6761
|
||||
function permuteDomain(domain, allowSpecialUseDomain) {
|
||||
let pubSuf = null;
|
||||
if (allowSpecialUseDomain) {
|
||||
const domainParts = domain.split(".");
|
||||
if (SPECIAL_USE_DOMAINS.includes(domainParts[domainParts.length - 1])) {
|
||||
pubSuf = `${domainParts[domainParts.length - 2]}.${
|
||||
domainParts[domainParts.length - 1]
|
||||
}`;
|
||||
} else {
|
||||
pubSuf = pubsuffix.getPublicSuffix(domain);
|
||||
}
|
||||
} else {
|
||||
pubSuf = pubsuffix.getPublicSuffix(domain);
|
||||
}
|
||||
|
||||
if (!pubSuf) {
|
||||
return null;
|
||||
}
|
||||
if (pubSuf == domain) {
|
||||
return [domain];
|
||||
}
|
||||
|
||||
const prefix = domain.slice(0, -(pubSuf.length + 1)); // ".example.com"
|
||||
const parts = prefix.split(".").reverse();
|
||||
let cur = pubSuf;
|
||||
const permutations = [cur];
|
||||
while (parts.length) {
|
||||
cur = `${parts.shift()}.${cur}`;
|
||||
permutations.push(cur);
|
||||
}
|
||||
return permutations;
|
||||
}
|
||||
|
||||
exports.permuteDomain = permuteDomain;
|
||||
38
node_modules/@azure/core-http/node_modules/tough-cookie/lib/pubsuffix-psl.js
generated
vendored
38
node_modules/@azure/core-http/node_modules/tough-cookie/lib/pubsuffix-psl.js
generated
vendored
|
|
@ -1,38 +0,0 @@
|
|||
/*!
|
||||
* Copyright (c) 2018, Salesforce.com, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
*
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
*
|
||||
* 3. Neither the name of Salesforce.com nor the names of its contributors may
|
||||
* be used to endorse or promote products derived from this software without
|
||||
* specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
"use strict";
|
||||
const psl = require("psl");
|
||||
|
||||
function getPublicSuffix(domain) {
|
||||
return psl.get(domain);
|
||||
}
|
||||
|
||||
exports.getPublicSuffix = getPublicSuffix;
|
||||
76
node_modules/@azure/core-http/node_modules/tough-cookie/lib/store.js
generated
vendored
76
node_modules/@azure/core-http/node_modules/tough-cookie/lib/store.js
generated
vendored
|
|
@ -1,76 +0,0 @@
|
|||
/*!
|
||||
* Copyright (c) 2015, Salesforce.com, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
*
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
*
|
||||
* 3. Neither the name of Salesforce.com nor the names of its contributors may
|
||||
* be used to endorse or promote products derived from this software without
|
||||
* specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
"use strict";
|
||||
/*jshint unused:false */
|
||||
|
||||
class Store {
|
||||
constructor() {
|
||||
this.synchronous = false;
|
||||
}
|
||||
|
||||
findCookie(domain, path, key, cb) {
|
||||
throw new Error("findCookie is not implemented");
|
||||
}
|
||||
|
||||
findCookies(domain, path, allowSpecialUseDomain, cb) {
|
||||
throw new Error("findCookies is not implemented");
|
||||
}
|
||||
|
||||
putCookie(cookie, cb) {
|
||||
throw new Error("putCookie is not implemented");
|
||||
}
|
||||
|
||||
updateCookie(oldCookie, newCookie, cb) {
|
||||
// recommended default implementation:
|
||||
// return this.putCookie(newCookie, cb);
|
||||
throw new Error("updateCookie is not implemented");
|
||||
}
|
||||
|
||||
removeCookie(domain, path, key, cb) {
|
||||
throw new Error("removeCookie is not implemented");
|
||||
}
|
||||
|
||||
removeCookies(domain, path, cb) {
|
||||
throw new Error("removeCookies is not implemented");
|
||||
}
|
||||
|
||||
removeAllCookies(cb) {
|
||||
throw new Error("removeAllCookies is not implemented");
|
||||
}
|
||||
|
||||
getAllCookies(cb) {
|
||||
throw new Error(
|
||||
"getAllCookies is not implemented (therefore jar cannot be serialized)"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
exports.Store = Store;
|
||||
2
node_modules/@azure/core-http/node_modules/tough-cookie/lib/version.js
generated
vendored
2
node_modules/@azure/core-http/node_modules/tough-cookie/lib/version.js
generated
vendored
|
|
@ -1,2 +0,0 @@
|
|||
// generated by genversion
|
||||
module.exports = '4.0.0'
|
||||
109
node_modules/@azure/core-http/node_modules/tough-cookie/package.json
generated
vendored
109
node_modules/@azure/core-http/node_modules/tough-cookie/package.json
generated
vendored
|
|
@ -1,109 +0,0 @@
|
|||
{
|
||||
"author": {
|
||||
"name": "Jeremy Stashewsky",
|
||||
"email": "jstash@gmail.com",
|
||||
"website": "https://github.com/stash"
|
||||
},
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Ivan Nikulin",
|
||||
"website": "https://github.com/inikulin"
|
||||
},
|
||||
{
|
||||
"name": "Shivan Kaul Sahib",
|
||||
"website": "https://github.com/ShivanKaul"
|
||||
},
|
||||
{
|
||||
"name": "Clint Ruoho",
|
||||
"website": "https://github.com/ruoho"
|
||||
},
|
||||
{
|
||||
"name": "Ian Livingstone",
|
||||
"website": "https://github.com/ianlivingstone"
|
||||
},
|
||||
{
|
||||
"name": "Andrew Waterman",
|
||||
"website": "https://github.com/awaterma"
|
||||
},
|
||||
{
|
||||
"name": "Michael de Libero ",
|
||||
"website": "https://github.com/medelibero-sfdc"
|
||||
},
|
||||
{
|
||||
"name": "Jonathan Stewmon",
|
||||
"website": "https://github.com/jstewmon"
|
||||
},
|
||||
{
|
||||
"name": "Miguel Roncancio",
|
||||
"website": "https://github.com/miggs125"
|
||||
},
|
||||
{
|
||||
"name": "Sebastian Mayr",
|
||||
"website": "https://github.com/Sebmaster"
|
||||
},
|
||||
{
|
||||
"name": "Alexander Savin",
|
||||
"website": "https://github.com/apsavin"
|
||||
},
|
||||
{
|
||||
"name": "Lalit Kapoor",
|
||||
"website": "https://github.com/lalitkapoor"
|
||||
},
|
||||
{
|
||||
"name": "Sam Thompson",
|
||||
"website": "https://github.com/sambthompson"
|
||||
}
|
||||
],
|
||||
"license": "BSD-3-Clause",
|
||||
"name": "tough-cookie",
|
||||
"description": "RFC6265 Cookies and Cookie Jar for node.js",
|
||||
"keywords": [
|
||||
"HTTP",
|
||||
"cookie",
|
||||
"cookies",
|
||||
"set-cookie",
|
||||
"cookiejar",
|
||||
"jar",
|
||||
"RFC6265",
|
||||
"RFC2965"
|
||||
],
|
||||
"version": "4.0.0",
|
||||
"homepage": "https://github.com/salesforce/tough-cookie",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/salesforce/tough-cookie.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/salesforce/tough-cookie/issues"
|
||||
},
|
||||
"main": "./lib/cookie",
|
||||
"files": [
|
||||
"lib"
|
||||
],
|
||||
"scripts": {
|
||||
"version": "genversion lib/version.js && git add lib/version.js",
|
||||
"test": "vows test/*_test.js",
|
||||
"cover": "nyc --reporter=lcov --reporter=html vows test/*_test.js",
|
||||
"eslint": "eslint --env node --ext .js .",
|
||||
"prettier": "prettier '**/*.{json,ts,yaml,md}'",
|
||||
"format": "npm run eslint -- --fix"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"async": "^2.6.2",
|
||||
"eslint": "^5.16.0",
|
||||
"eslint-config-prettier": "^4.2.0",
|
||||
"eslint-plugin-prettier": "^3.0.1",
|
||||
"genversion": "^2.1.0",
|
||||
"nyc": "^14.0.0",
|
||||
"prettier": "^1.17.0",
|
||||
"vows": "^0.8.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"psl": "^1.1.33",
|
||||
"punycode": "^2.1.1",
|
||||
"universalify": "^0.1.2"
|
||||
}
|
||||
}
|
||||
15
node_modules/@azure/ms-rest-js/dist/msRest.browser.js
generated
vendored
15
node_modules/@azure/ms-rest-js/dist/msRest.browser.js
generated
vendored
|
|
@ -262,7 +262,7 @@
|
|||
* @const
|
||||
* @type {string}
|
||||
*/
|
||||
msRestVersion: "2.6.2",
|
||||
msRestVersion: "2.7.0",
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
*
|
||||
|
|
@ -1906,10 +1906,15 @@
|
|||
// according to the spec. There are no HTML/XSS security concerns on the usage of
|
||||
// parseFromString() here.
|
||||
var ttPolicy;
|
||||
if (typeof self.trustedTypes !== "undefined") {
|
||||
ttPolicy = self.trustedTypes.createPolicy("@azure/ms-rest-js#xml.browser", {
|
||||
createHTML: function (s) { return s; },
|
||||
});
|
||||
try {
|
||||
if (typeof self.trustedTypes !== "undefined") {
|
||||
ttPolicy = self.trustedTypes.createPolicy("@azure/ms-rest-js#xml.browser", {
|
||||
createHTML: function (s) { return s; },
|
||||
});
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
console.warn('Could not create trusted types policy "@azure/ms-rest-js#xml.browser"');
|
||||
}
|
||||
function parseXML(str) {
|
||||
var _a;
|
||||
|
|
|
|||
2
node_modules/@azure/ms-rest-js/dist/msRest.browser.js.map
generated
vendored
2
node_modules/@azure/ms-rest-js/dist/msRest.browser.js.map
generated
vendored
File diff suppressed because one or more lines are too long
4
node_modules/@azure/ms-rest-js/dist/msRest.browser.min.js
generated
vendored
4
node_modules/@azure/ms-rest-js/dist/msRest.browser.min.js
generated
vendored
File diff suppressed because one or more lines are too long
2
node_modules/@azure/ms-rest-js/dist/msRest.browser.min.js.map
generated
vendored
2
node_modules/@azure/ms-rest-js/dist/msRest.browser.min.js.map
generated
vendored
File diff suppressed because one or more lines are too long
107
node_modules/@azure/ms-rest-js/dist/msRest.node.js
generated
vendored
107
node_modules/@azure/ms-rest-js/dist/msRest.node.js
generated
vendored
|
|
@ -10,7 +10,6 @@ function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'defau
|
|||
|
||||
var uuid = require('uuid');
|
||||
var tslib = require('tslib');
|
||||
var tough = require('tough-cookie');
|
||||
var http = require('http');
|
||||
var https = require('https');
|
||||
var node_fetch = _interopDefault(require('node-fetch'));
|
||||
|
|
@ -194,7 +193,7 @@ var Constants = {
|
|||
* @const
|
||||
* @type {string}
|
||||
*/
|
||||
msRestVersion: "2.6.2",
|
||||
msRestVersion: "2.7.0",
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
*
|
||||
|
|
@ -3331,9 +3330,7 @@ function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) {
|
|||
var NodeFetchHttpClient = /** @class */ (function (_super) {
|
||||
tslib.__extends(NodeFetchHttpClient, _super);
|
||||
function NodeFetchHttpClient() {
|
||||
var _this = _super !== null && _super.apply(this, arguments) || this;
|
||||
_this.cookieJar = new tough.CookieJar(undefined, { looseMode: true });
|
||||
return _this;
|
||||
return _super !== null && _super.apply(this, arguments) || this;
|
||||
}
|
||||
NodeFetchHttpClient.prototype.fetch = function (input, init) {
|
||||
return tslib.__awaiter(this, void 0, void 0, function () {
|
||||
|
|
@ -3344,83 +3341,43 @@ var NodeFetchHttpClient = /** @class */ (function (_super) {
|
|||
};
|
||||
NodeFetchHttpClient.prototype.prepareRequest = function (httpRequest) {
|
||||
return tslib.__awaiter(this, void 0, void 0, function () {
|
||||
var requestInit, cookieString, _a, httpAgent, httpsAgent, tunnel, options, agent;
|
||||
var _this = this;
|
||||
var requestInit, _a, httpAgent, httpsAgent, tunnel, options, agent;
|
||||
return tslib.__generator(this, function (_b) {
|
||||
switch (_b.label) {
|
||||
case 0:
|
||||
requestInit = {};
|
||||
if (!(this.cookieJar && !httpRequest.headers.get("Cookie"))) return [3 /*break*/, 2];
|
||||
return [4 /*yield*/, new Promise(function (resolve, reject) {
|
||||
_this.cookieJar.getCookieString(httpRequest.url, function (err, cookie) {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
else {
|
||||
resolve(cookie);
|
||||
}
|
||||
});
|
||||
})];
|
||||
case 1:
|
||||
cookieString = _b.sent();
|
||||
httpRequest.headers.set("Cookie", cookieString);
|
||||
_b.label = 2;
|
||||
case 2:
|
||||
if (httpRequest.agentSettings) {
|
||||
_a = httpRequest.agentSettings, httpAgent = _a.http, httpsAgent = _a.https;
|
||||
if (httpsAgent && httpRequest.url.startsWith("https")) {
|
||||
requestInit.agent = httpsAgent;
|
||||
}
|
||||
else if (httpAgent) {
|
||||
requestInit.agent = httpAgent;
|
||||
}
|
||||
}
|
||||
else if (httpRequest.proxySettings) {
|
||||
tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers);
|
||||
requestInit.agent = tunnel.agent;
|
||||
}
|
||||
if (httpRequest.keepAlive === true) {
|
||||
if (requestInit.agent) {
|
||||
requestInit.agent.keepAlive = true;
|
||||
}
|
||||
else {
|
||||
options = { keepAlive: true };
|
||||
agent = httpRequest.url.startsWith("https")
|
||||
? new https.Agent(options)
|
||||
: new http.Agent(options);
|
||||
requestInit.agent = agent;
|
||||
}
|
||||
}
|
||||
return [2 /*return*/, requestInit];
|
||||
requestInit = {};
|
||||
if (httpRequest.agentSettings) {
|
||||
_a = httpRequest.agentSettings, httpAgent = _a.http, httpsAgent = _a.https;
|
||||
if (httpsAgent && httpRequest.url.startsWith("https")) {
|
||||
requestInit.agent = httpsAgent;
|
||||
}
|
||||
else if (httpAgent) {
|
||||
requestInit.agent = httpAgent;
|
||||
}
|
||||
}
|
||||
else if (httpRequest.proxySettings) {
|
||||
tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers);
|
||||
requestInit.agent = tunnel.agent;
|
||||
}
|
||||
if (httpRequest.keepAlive === true) {
|
||||
if (requestInit.agent) {
|
||||
requestInit.agent.keepAlive = true;
|
||||
}
|
||||
else {
|
||||
options = { keepAlive: true };
|
||||
agent = httpRequest.url.startsWith("https")
|
||||
? new https.Agent(options)
|
||||
: new http.Agent(options);
|
||||
requestInit.agent = agent;
|
||||
}
|
||||
}
|
||||
return [2 /*return*/, requestInit];
|
||||
});
|
||||
});
|
||||
};
|
||||
NodeFetchHttpClient.prototype.processRequest = function (operationResponse) {
|
||||
NodeFetchHttpClient.prototype.processRequest = function (_operationResponse) {
|
||||
return tslib.__awaiter(this, void 0, void 0, function () {
|
||||
var setCookieHeader_1;
|
||||
var _this = this;
|
||||
return tslib.__generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0:
|
||||
if (!this.cookieJar) return [3 /*break*/, 2];
|
||||
setCookieHeader_1 = operationResponse.headers.get("Set-Cookie");
|
||||
if (!(setCookieHeader_1 != undefined)) return [3 /*break*/, 2];
|
||||
return [4 /*yield*/, new Promise(function (resolve, reject) {
|
||||
_this.cookieJar.setCookie(setCookieHeader_1, operationResponse.request.url, { ignoreError: true }, function (err) {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
})];
|
||||
case 1:
|
||||
_a.sent();
|
||||
_a.label = 2;
|
||||
case 2: return [2 /*return*/];
|
||||
}
|
||||
/* no_op */
|
||||
return [2 /*return*/];
|
||||
});
|
||||
});
|
||||
};
|
||||
|
|
|
|||
2
node_modules/@azure/ms-rest-js/dist/msRest.node.js.map
generated
vendored
2
node_modules/@azure/ms-rest-js/dist/msRest.node.js.map
generated
vendored
File diff suppressed because one or more lines are too long
3
node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts
generated
vendored
3
node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts
generated
vendored
|
|
@ -2,9 +2,8 @@ import { CommonRequestInfo, CommonRequestInit, CommonResponse, FetchHttpClient }
|
|||
import { HttpOperationResponse } from "./httpOperationResponse";
|
||||
import { WebResourceLike } from "./webResource";
|
||||
export declare class NodeFetchHttpClient extends FetchHttpClient {
|
||||
private readonly cookieJar;
|
||||
fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise<CommonResponse>;
|
||||
prepareRequest(httpRequest: WebResourceLike): Promise<Partial<RequestInit>>;
|
||||
processRequest(operationResponse: HttpOperationResponse): Promise<void>;
|
||||
processRequest(_operationResponse: HttpOperationResponse): Promise<void>;
|
||||
}
|
||||
//# sourceMappingURL=nodeFetchHttpClient.d.ts.map
|
||||
2
node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts.map
generated
vendored
2
node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"nodeFetchHttpClient.d.ts","sourceRoot":"","sources":["../../lib/nodeFetchHttpClient.ts"],"names":[],"mappings":"AAQA,OAAO,EACL,iBAAiB,EACjB,iBAAiB,EACjB,cAAc,EACd,eAAe,EAChB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAGhD,qBAAa,mBAAoB,SAAQ,eAAe;IACtD,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAuD;IAE3E,KAAK,CAAC,KAAK,EAAE,iBAAiB,EAAE,IAAI,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,cAAc,CAAC;IAIlF,cAAc,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IAgD3E,cAAc,CAAC,iBAAiB,EAAE,qBAAqB,GAAG,OAAO,CAAC,IAAI,CAAC;CAqB9E"}
|
||||
{"version":3,"file":"nodeFetchHttpClient.d.ts","sourceRoot":"","sources":["../../lib/nodeFetchHttpClient.ts"],"names":[],"mappings":"AAOA,OAAO,EACL,iBAAiB,EACjB,iBAAiB,EACjB,cAAc,EACd,eAAe,EAChB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAGhD,qBAAa,mBAAoB,SAAQ,eAAe;IAChD,KAAK,CAAC,KAAK,EAAE,iBAAiB,EAAE,IAAI,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,cAAc,CAAC;IAIlF,cAAc,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IAkC3E,cAAc,CAAC,kBAAkB,EAAE,qBAAqB,GAAG,OAAO,CAAC,IAAI,CAAC;CAI/E"}
|
||||
105
node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js
generated
vendored
105
node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js
generated
vendored
|
|
@ -1,7 +1,6 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
import { __awaiter, __extends, __generator } from "tslib";
|
||||
import * as tough from "tough-cookie";
|
||||
import * as http from "http";
|
||||
import * as https from "https";
|
||||
import node_fetch from "node-fetch";
|
||||
|
|
@ -10,9 +9,7 @@ import { createProxyAgent } from "./proxyAgent";
|
|||
var NodeFetchHttpClient = /** @class */ (function (_super) {
|
||||
__extends(NodeFetchHttpClient, _super);
|
||||
function NodeFetchHttpClient() {
|
||||
var _this = _super !== null && _super.apply(this, arguments) || this;
|
||||
_this.cookieJar = new tough.CookieJar(undefined, { looseMode: true });
|
||||
return _this;
|
||||
return _super !== null && _super.apply(this, arguments) || this;
|
||||
}
|
||||
NodeFetchHttpClient.prototype.fetch = function (input, init) {
|
||||
return __awaiter(this, void 0, void 0, function () {
|
||||
|
|
@ -23,83 +20,43 @@ var NodeFetchHttpClient = /** @class */ (function (_super) {
|
|||
};
|
||||
NodeFetchHttpClient.prototype.prepareRequest = function (httpRequest) {
|
||||
return __awaiter(this, void 0, void 0, function () {
|
||||
var requestInit, cookieString, _a, httpAgent, httpsAgent, tunnel, options, agent;
|
||||
var _this = this;
|
||||
var requestInit, _a, httpAgent, httpsAgent, tunnel, options, agent;
|
||||
return __generator(this, function (_b) {
|
||||
switch (_b.label) {
|
||||
case 0:
|
||||
requestInit = {};
|
||||
if (!(this.cookieJar && !httpRequest.headers.get("Cookie"))) return [3 /*break*/, 2];
|
||||
return [4 /*yield*/, new Promise(function (resolve, reject) {
|
||||
_this.cookieJar.getCookieString(httpRequest.url, function (err, cookie) {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
else {
|
||||
resolve(cookie);
|
||||
}
|
||||
});
|
||||
})];
|
||||
case 1:
|
||||
cookieString = _b.sent();
|
||||
httpRequest.headers.set("Cookie", cookieString);
|
||||
_b.label = 2;
|
||||
case 2:
|
||||
if (httpRequest.agentSettings) {
|
||||
_a = httpRequest.agentSettings, httpAgent = _a.http, httpsAgent = _a.https;
|
||||
if (httpsAgent && httpRequest.url.startsWith("https")) {
|
||||
requestInit.agent = httpsAgent;
|
||||
}
|
||||
else if (httpAgent) {
|
||||
requestInit.agent = httpAgent;
|
||||
}
|
||||
}
|
||||
else if (httpRequest.proxySettings) {
|
||||
tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers);
|
||||
requestInit.agent = tunnel.agent;
|
||||
}
|
||||
if (httpRequest.keepAlive === true) {
|
||||
if (requestInit.agent) {
|
||||
requestInit.agent.keepAlive = true;
|
||||
}
|
||||
else {
|
||||
options = { keepAlive: true };
|
||||
agent = httpRequest.url.startsWith("https")
|
||||
? new https.Agent(options)
|
||||
: new http.Agent(options);
|
||||
requestInit.agent = agent;
|
||||
}
|
||||
}
|
||||
return [2 /*return*/, requestInit];
|
||||
requestInit = {};
|
||||
if (httpRequest.agentSettings) {
|
||||
_a = httpRequest.agentSettings, httpAgent = _a.http, httpsAgent = _a.https;
|
||||
if (httpsAgent && httpRequest.url.startsWith("https")) {
|
||||
requestInit.agent = httpsAgent;
|
||||
}
|
||||
else if (httpAgent) {
|
||||
requestInit.agent = httpAgent;
|
||||
}
|
||||
}
|
||||
else if (httpRequest.proxySettings) {
|
||||
tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers);
|
||||
requestInit.agent = tunnel.agent;
|
||||
}
|
||||
if (httpRequest.keepAlive === true) {
|
||||
if (requestInit.agent) {
|
||||
requestInit.agent.keepAlive = true;
|
||||
}
|
||||
else {
|
||||
options = { keepAlive: true };
|
||||
agent = httpRequest.url.startsWith("https")
|
||||
? new https.Agent(options)
|
||||
: new http.Agent(options);
|
||||
requestInit.agent = agent;
|
||||
}
|
||||
}
|
||||
return [2 /*return*/, requestInit];
|
||||
});
|
||||
});
|
||||
};
|
||||
NodeFetchHttpClient.prototype.processRequest = function (operationResponse) {
|
||||
NodeFetchHttpClient.prototype.processRequest = function (_operationResponse) {
|
||||
return __awaiter(this, void 0, void 0, function () {
|
||||
var setCookieHeader_1;
|
||||
var _this = this;
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0:
|
||||
if (!this.cookieJar) return [3 /*break*/, 2];
|
||||
setCookieHeader_1 = operationResponse.headers.get("Set-Cookie");
|
||||
if (!(setCookieHeader_1 != undefined)) return [3 /*break*/, 2];
|
||||
return [4 /*yield*/, new Promise(function (resolve, reject) {
|
||||
_this.cookieJar.setCookie(setCookieHeader_1, operationResponse.request.url, { ignoreError: true }, function (err) {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
})];
|
||||
case 1:
|
||||
_a.sent();
|
||||
_a.label = 2;
|
||||
case 2: return [2 /*return*/];
|
||||
}
|
||||
/* no_op */
|
||||
return [2 /*return*/];
|
||||
});
|
||||
});
|
||||
};
|
||||
|
|
|
|||
2
node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js.map
generated
vendored
2
node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"nodeFetchHttpClient.js","sourceRoot":"","sources":["../../lib/nodeFetchHttpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AACtC,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAC/B,OAAO,UAAU,MAAM,YAAY,CAAC;AAEpC,OAAO,EAIL,eAAe,GAChB,MAAM,mBAAmB,CAAC;AAG3B,OAAO,EAAE,gBAAgB,EAAc,MAAM,cAAc,CAAC;AAE5D;IAAyC,uCAAe;IAAxD;QAAA,qEA4EC;QA3EkB,eAAS,GAAG,IAAI,KAAK,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;;IA2EnF,CAAC;IAzEO,mCAAK,GAAX,UAAY,KAAwB,EAAE,IAAwB;;;gBAC5D,sBAAQ,UAAU,CAAC,KAAK,EAAE,IAAI,CAAwC,EAAC;;;KACxE;IAEK,4CAAc,GAApB,UAAqB,WAA4B;;;;;;;wBACzC,WAAW,GAA2C,EAAE,CAAC;6BAE3D,CAAA,IAAI,CAAC,SAAS,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAA,EAApD,wBAAoD;wBACjC,qBAAM,IAAI,OAAO,CAAS,UAAC,OAAO,EAAE,MAAM;gCAC7D,KAAI,CAAC,SAAU,CAAC,eAAe,CAAC,WAAW,CAAC,GAAG,EAAE,UAAC,GAAG,EAAE,MAAM;oCAC3D,IAAI,GAAG,EAAE;wCACP,MAAM,CAAC,GAAG,CAAC,CAAC;qCACb;yCAAM;wCACL,OAAO,CAAC,MAAM,CAAC,CAAC;qCACjB;gCACH,CAAC,CAAC,CAAC;4BACL,CAAC,CAAC,EAAA;;wBARI,YAAY,GAAG,SAQnB;wBAEF,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;;;wBAGlD,IAAI,WAAW,CAAC,aAAa,EAAE;4BACvB,KAAyC,WAAW,CAAC,aAAa,EAA1D,SAAS,UAAA,EAAS,UAAU,WAAA,CAA+B;4BACzE,IAAI,UAAU,IAAI,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;gCACrD,WAAW,CAAC,KAAK,GAAG,UAAU,CAAC;6BAChC;iCAAM,IAAI,SAAS,EAAE;gCACpB,WAAW,CAAC,KAAK,GAAG,SAAS,CAAC;6BAC/B;yBACF;6BAAM,IAAI,WAAW,CAAC,aAAa,EAAE;4BAC9B,MAAM,GAAe,gBAAgB,CACzC,WAAW,CAAC,GAAG,EACf,WAAW,CAAC,aAAa,EACzB,WAAW,CAAC,OAAO,CACpB,CAAC;4BACF,WAAW,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;yBAClC;wBAED,IAAI,WAAW,CAAC,SAAS,KAAK,IAAI,EAAE;4BAClC,IAAI,WAAW,CAAC,KAAK,EAAE;gCACrB,WAAW,CAAC,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;6BACpC;iCAAM;gCACC,OAAO,GAA2C,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC;gCACtE,KAAK,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC;oCAC/C,CAAC,CAAC,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC;oCAC1B,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;gCAC5B,WAAW,CAAC,KAAK,GAAG,KAAK,CAAC;6BAC3B;yBACF;wBAED,sBAAO,WAAW,EAAC;;;;KACpB;IAEK,4CAAc,GAApB,UAAqB,iBAAwC;;;;;;;6BACvD,IAAI,CAAC,SAAS,EAAd,wBAAc;wBACV,oBAAkB,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;6BAChE,CAAA,iBAAe,IAAI,SAAS,CAAA,EAA5B,wBAA4B;wBAC9B,qBAAM,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gCAChC,KAAI,CAAC,SAAU,CAAC,SAAS,CACvB,iBAAe,EACf,iBAAiB,CAAC,OAAO,CAAC,GAAG,EAC7B,EAAE,WAAW,EAAE,IAAI,EAAE,EACrB,UAAC,GAAG;oCACF,IAAI,GAAG,EAAE;wCACP,MAAM,CAAC,GAAG,CAAC,CAAC;qCACb;yCAAM;wCACL,OAAO,EAAE,CAAC;qCACX;gCACH,CAAC,CACF,CAAC;4BACJ,CAAC,CAAC,EAAA;;wBAbF,SAaE,CAAC;;;;;;KAGR;IACH,0BAAC;AAAD,CAAC,AA5ED,CAAyC,eAAe,GA4EvD"}
|
||||
{"version":3,"file":"nodeFetchHttpClient.js","sourceRoot":"","sources":["../../lib/nodeFetchHttpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAC/B,OAAO,UAAU,MAAM,YAAY,CAAC;AAEpC,OAAO,EAIL,eAAe,GAChB,MAAM,mBAAmB,CAAC;AAG3B,OAAO,EAAE,gBAAgB,EAAc,MAAM,cAAc,CAAC;AAE5D;IAAyC,uCAAe;IAAxD;;IA2CA,CAAC;IA1CO,mCAAK,GAAX,UAAY,KAAwB,EAAE,IAAwB;;;gBAC5D,sBAAQ,UAAU,CAAC,KAAK,EAAE,IAAI,CAAwC,EAAC;;;KACxE;IAEK,4CAAc,GAApB,UAAqB,WAA4B;;;;gBACzC,WAAW,GAA2C,EAAE,CAAC;gBAE/D,IAAI,WAAW,CAAC,aAAa,EAAE;oBACvB,KAAyC,WAAW,CAAC,aAAa,EAA1D,SAAS,UAAA,EAAS,UAAU,WAAA,CAA+B;oBACzE,IAAI,UAAU,IAAI,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;wBACrD,WAAW,CAAC,KAAK,GAAG,UAAU,CAAC;qBAChC;yBAAM,IAAI,SAAS,EAAE;wBACpB,WAAW,CAAC,KAAK,GAAG,SAAS,CAAC;qBAC/B;iBACF;qBAAM,IAAI,WAAW,CAAC,aAAa,EAAE;oBAC9B,MAAM,GAAe,gBAAgB,CACzC,WAAW,CAAC,GAAG,EACf,WAAW,CAAC,aAAa,EACzB,WAAW,CAAC,OAAO,CACpB,CAAC;oBACF,WAAW,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;iBAClC;gBAED,IAAI,WAAW,CAAC,SAAS,KAAK,IAAI,EAAE;oBAClC,IAAI,WAAW,CAAC,KAAK,EAAE;wBACrB,WAAW,CAAC,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;qBACpC;yBAAM;wBACC,OAAO,GAA2C,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC;wBACtE,KAAK,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC;4BAC/C,CAAC,CAAC,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC;4BAC1B,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;wBAC5B,WAAW,CAAC,KAAK,GAAG,KAAK,CAAC;qBAC3B;iBACF;gBAED,sBAAO,WAAW,EAAC;;;KACpB;IAEK,4CAAc,GAApB,UAAqB,kBAAyC;;;gBAC5D,WAAW;gBACX,sBAAO;;;KACR;IACH,0BAAC;AAAD,CAAC,AA3CD,CAAyC,eAAe,GA2CvD"}
|
||||
2
node_modules/@azure/ms-rest-js/es/lib/util/constants.js
generated
vendored
2
node_modules/@azure/ms-rest-js/es/lib/util/constants.js
generated
vendored
|
|
@ -6,7 +6,7 @@ export var Constants = {
|
|||
* @const
|
||||
* @type {string}
|
||||
*/
|
||||
msRestVersion: "2.6.2",
|
||||
msRestVersion: "2.7.0",
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
*
|
||||
|
|
|
|||
2
node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts.map
generated
vendored
2
node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"xml.browser.d.ts","sourceRoot":"","sources":["../../../lib/util/xml.browser.ts"],"names":[],"mappings":"AAkBA,wBAAgB,QAAQ,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAUlD;AAwFD,wBAAgB,YAAY,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,CAAC,EAAE;IAAE,QAAQ,CAAC,EAAE,MAAM,CAAA;CAAE,UAMlE"}
|
||||
{"version":3,"file":"xml.browser.d.ts","sourceRoot":"","sources":["../../../lib/util/xml.browser.ts"],"names":[],"mappings":"AAsBA,wBAAgB,QAAQ,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAUlD;AAwFD,wBAAgB,YAAY,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,CAAC,EAAE;IAAE,QAAQ,CAAC,EAAE,MAAM,CAAA;CAAE,UAMlE"}
|
||||
13
node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js
generated
vendored
13
node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js
generated
vendored
|
|
@ -9,10 +9,15 @@ var parser = new DOMParser();
|
|||
// according to the spec. There are no HTML/XSS security concerns on the usage of
|
||||
// parseFromString() here.
|
||||
var ttPolicy;
|
||||
if (typeof self.trustedTypes !== "undefined") {
|
||||
ttPolicy = self.trustedTypes.createPolicy("@azure/ms-rest-js#xml.browser", {
|
||||
createHTML: function (s) { return s; },
|
||||
});
|
||||
try {
|
||||
if (typeof self.trustedTypes !== "undefined") {
|
||||
ttPolicy = self.trustedTypes.createPolicy("@azure/ms-rest-js#xml.browser", {
|
||||
createHTML: function (s) { return s; },
|
||||
});
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
console.warn('Could not create trusted types policy "@azure/ms-rest-js#xml.browser"');
|
||||
}
|
||||
export function parseXML(str) {
|
||||
var _a;
|
||||
|
|
|
|||
2
node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js.map
generated
vendored
2
node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js.map
generated
vendored
File diff suppressed because one or more lines are too long
40
node_modules/@azure/ms-rest-js/lib/nodeFetchHttpClient.ts
generated
vendored
40
node_modules/@azure/ms-rest-js/lib/nodeFetchHttpClient.ts
generated
vendored
|
|
@ -1,7 +1,6 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
|
||||
import * as tough from "tough-cookie";
|
||||
import * as http from "http";
|
||||
import * as https from "https";
|
||||
import node_fetch from "node-fetch";
|
||||
|
|
@ -17,8 +16,6 @@ import { WebResourceLike } from "./webResource";
|
|||
import { createProxyAgent, ProxyAgent } from "./proxyAgent";
|
||||
|
||||
export class NodeFetchHttpClient extends FetchHttpClient {
|
||||
private readonly cookieJar = new tough.CookieJar(undefined, { looseMode: true });
|
||||
|
||||
async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise<CommonResponse> {
|
||||
return (node_fetch(input, init) as unknown) as Promise<CommonResponse>;
|
||||
}
|
||||
|
|
@ -26,20 +23,6 @@ export class NodeFetchHttpClient extends FetchHttpClient {
|
|||
async prepareRequest(httpRequest: WebResourceLike): Promise<Partial<RequestInit>> {
|
||||
const requestInit: Partial<RequestInit & { agent?: any }> = {};
|
||||
|
||||
if (this.cookieJar && !httpRequest.headers.get("Cookie")) {
|
||||
const cookieString = await new Promise<string>((resolve, reject) => {
|
||||
this.cookieJar!.getCookieString(httpRequest.url, (err, cookie) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(cookie);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
httpRequest.headers.set("Cookie", cookieString);
|
||||
}
|
||||
|
||||
if (httpRequest.agentSettings) {
|
||||
const { http: httpAgent, https: httpsAgent } = httpRequest.agentSettings;
|
||||
if (httpsAgent && httpRequest.url.startsWith("https")) {
|
||||
|
|
@ -71,25 +54,8 @@ export class NodeFetchHttpClient extends FetchHttpClient {
|
|||
return requestInit;
|
||||
}
|
||||
|
||||
async processRequest(operationResponse: HttpOperationResponse): Promise<void> {
|
||||
if (this.cookieJar) {
|
||||
const setCookieHeader = operationResponse.headers.get("Set-Cookie");
|
||||
if (setCookieHeader != undefined) {
|
||||
await new Promise((resolve, reject) => {
|
||||
this.cookieJar!.setCookie(
|
||||
setCookieHeader,
|
||||
operationResponse.request.url,
|
||||
{ ignoreError: true },
|
||||
(err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
async processRequest(_operationResponse: HttpOperationResponse): Promise<void> {
|
||||
/* no_op */
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
2
node_modules/@azure/ms-rest-js/lib/util/constants.ts
generated
vendored
2
node_modules/@azure/ms-rest-js/lib/util/constants.ts
generated
vendored
|
|
@ -7,7 +7,7 @@ export const Constants = {
|
|||
* @const
|
||||
* @type {string}
|
||||
*/
|
||||
msRestVersion: "2.6.2",
|
||||
msRestVersion: "2.7.0",
|
||||
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
|
|
|
|||
12
node_modules/@azure/ms-rest-js/lib/util/xml.browser.ts
generated
vendored
12
node_modules/@azure/ms-rest-js/lib/util/xml.browser.ts
generated
vendored
|
|
@ -10,10 +10,14 @@ const parser = new DOMParser();
|
|||
// according to the spec. There are no HTML/XSS security concerns on the usage of
|
||||
// parseFromString() here.
|
||||
let ttPolicy: Pick<TrustedTypePolicy, "createHTML"> | undefined;
|
||||
if (typeof self.trustedTypes !== "undefined") {
|
||||
ttPolicy = self.trustedTypes.createPolicy("@azure/ms-rest-js#xml.browser", {
|
||||
createHTML: (s) => s,
|
||||
});
|
||||
try {
|
||||
if (typeof self.trustedTypes !== "undefined") {
|
||||
ttPolicy = self.trustedTypes.createPolicy("@azure/ms-rest-js#xml.browser", {
|
||||
createHTML: (s: any) => s,
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn('Could not create trusted types policy "@azure/ms-rest-js#xml.browser"');
|
||||
}
|
||||
|
||||
export function parseXML(str: string): Promise<any> {
|
||||
|
|
|
|||
10
node_modules/@azure/ms-rest-js/package.json
generated
vendored
10
node_modules/@azure/ms-rest-js/package.json
generated
vendored
|
|
@ -5,7 +5,7 @@
|
|||
"email": "azsdkteam@microsoft.com",
|
||||
"url": "https://github.com/Azure/ms-rest-js"
|
||||
},
|
||||
"version": "2.6.2",
|
||||
"version": "2.7.0",
|
||||
"description": "Isomorphic client Runtime for Typescript/node.js/browser javascript client libraries generated using AutoRest",
|
||||
"tags": [
|
||||
"isomorphic",
|
||||
|
|
@ -55,18 +55,19 @@
|
|||
"abort-controller": "^3.0.0",
|
||||
"form-data": "^2.5.0",
|
||||
"node-fetch": "^2.6.7",
|
||||
"tough-cookie": "^3.0.1",
|
||||
"tslib": "^1.10.0",
|
||||
"tunnel": "0.0.6",
|
||||
"uuid": "^8.3.2",
|
||||
"xml2js": "^0.4.19"
|
||||
"xml2js": "^0.5.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@azure/logger-js": "^1.1.0",
|
||||
"@microsoft/api-extractor": "^7.18.11",
|
||||
"@ts-common/azure-js-dev-tools": "^19.4.0",
|
||||
"@types/bluebird": "3.5.36",
|
||||
"@types/chai": "^4.1.7",
|
||||
"@types/express": "^4.17.0",
|
||||
"@types/express": "4.17.0",
|
||||
"@types/express-serve-static-core": "4.17.0",
|
||||
"@types/fetch-mock": "^7.3.1",
|
||||
"@types/form-data": "^2.2.1",
|
||||
"@types/glob": "^7.1.1",
|
||||
|
|
@ -76,7 +77,6 @@
|
|||
"@types/node-fetch": "^2.3.7",
|
||||
"@types/semver": "^6.0.1",
|
||||
"@types/sinon": "^7.0.13",
|
||||
"@types/tough-cookie": "^2.3.5",
|
||||
"@types/trusted-types": "^2.0.0",
|
||||
"@types/tunnel": "0.0.1",
|
||||
"@types/uuid": "^8.3.2",
|
||||
|
|
|
|||
24
node_modules/ip-regex/index.js
generated
vendored
24
node_modules/ip-regex/index.js
generated
vendored
|
|
@ -1,24 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
const v4 = '(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])(?:\\.(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])){3}';
|
||||
|
||||
const v6seg = '[0-9a-fA-F]{1,4}';
|
||||
const v6 = `
|
||||
(
|
||||
(?:${v6seg}:){7}(?:${v6seg}|:)| // 1:2:3:4:5:6:7:: 1:2:3:4:5:6:7:8
|
||||
(?:${v6seg}:){6}(?:${v4}|:${v6seg}|:)| // 1:2:3:4:5:6:: 1:2:3:4:5:6::8 1:2:3:4:5:6::8 1:2:3:4:5:6::1.2.3.4
|
||||
(?:${v6seg}:){5}(?::${v4}|(:${v6seg}){1,2}|:)| // 1:2:3:4:5:: 1:2:3:4:5::7:8 1:2:3:4:5::8 1:2:3:4:5::7:1.2.3.4
|
||||
(?:${v6seg}:){4}(?:(:${v6seg}){0,1}:${v4}|(:${v6seg}){1,3}|:)| // 1:2:3:4:: 1:2:3:4::6:7:8 1:2:3:4::8 1:2:3:4::6:7:1.2.3.4
|
||||
(?:${v6seg}:){3}(?:(:${v6seg}){0,2}:${v4}|(:${v6seg}){1,4}|:)| // 1:2:3:: 1:2:3::5:6:7:8 1:2:3::8 1:2:3::5:6:7:1.2.3.4
|
||||
(?:${v6seg}:){2}(?:(:${v6seg}){0,3}:${v4}|(:${v6seg}){1,5}|:)| // 1:2:: 1:2::4:5:6:7:8 1:2::8 1:2::4:5:6:7:1.2.3.4
|
||||
(?:${v6seg}:){1}(?:(:${v6seg}){0,4}:${v4}|(:${v6seg}){1,6}|:)| // 1:: 1::3:4:5:6:7:8 1::8 1::3:4:5:6:7:1.2.3.4
|
||||
(?::((?::${v6seg}){0,5}:${v4}|(?::${v6seg}){1,7}|:)) // ::2:3:4:5:6:7:8 ::2:3:4:5:6:7:8 ::8 ::1.2.3.4
|
||||
)(%[0-9a-zA-Z]{1,})? // %eth0 %1
|
||||
`.replace(/\s*\/\/.*$/gm, '').replace(/\n/g, '').trim();
|
||||
|
||||
const ip = module.exports = opts => opts && opts.exact ?
|
||||
new RegExp(`(?:^${v4}$)|(?:^${v6}$)`) :
|
||||
new RegExp(`(?:${v4})|(?:${v6})`, 'g');
|
||||
|
||||
ip.v4 = opts => opts && opts.exact ? new RegExp(`^${v4}$`) : new RegExp(v4, 'g');
|
||||
ip.v6 = opts => opts && opts.exact ? new RegExp(`^${v6}$`) : new RegExp(v6, 'g');
|
||||
45
node_modules/ip-regex/package.json
generated
vendored
45
node_modules/ip-regex/package.json
generated
vendored
|
|
@ -1,45 +0,0 @@
|
|||
{
|
||||
"name": "ip-regex",
|
||||
"version": "2.1.0",
|
||||
"description": "Regular expression for matching IP addresses (IPv4 & IPv6)",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/ip-regex",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"keywords": [
|
||||
"ip",
|
||||
"ipv6",
|
||||
"ipv4",
|
||||
"regex",
|
||||
"regexp",
|
||||
"re",
|
||||
"match",
|
||||
"test",
|
||||
"find",
|
||||
"text",
|
||||
"pattern",
|
||||
"internet",
|
||||
"protocol",
|
||||
"address",
|
||||
"validate"
|
||||
],
|
||||
"devDependencies": {
|
||||
"ava": "*",
|
||||
"xo": "*"
|
||||
},
|
||||
"xo": {
|
||||
"esnext": true
|
||||
}
|
||||
}
|
||||
63
node_modules/ip-regex/readme.md
generated
vendored
63
node_modules/ip-regex/readme.md
generated
vendored
|
|
@ -1,63 +0,0 @@
|
|||
# ip-regex [](https://travis-ci.org/sindresorhus/ip-regex)
|
||||
|
||||
> Regular expression for matching IP addresses
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install --save ip-regex
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const ipRegex = require('ip-regex');
|
||||
|
||||
// Contains an IP address?
|
||||
ipRegex().test('unicorn 192.168.0.1');
|
||||
//=> true
|
||||
|
||||
// Is an IP address?
|
||||
ipRegex({exact: true}).test('unicorn 192.168.0.1');
|
||||
//=> false
|
||||
|
||||
ipRegex.v6({exact: true}).test('1:2:3:4:5:6:7:8');
|
||||
//=> true
|
||||
|
||||
'unicorn 192.168.0.1 cake 1:2:3:4:5:6:7:8 rainbow'.match(ipRegex());
|
||||
//=> ['192.168.0.1', '1:2:3:4:5:6:7:8']
|
||||
```
|
||||
|
||||
|
||||
## API
|
||||
|
||||
### ipRegex([options])
|
||||
|
||||
Returns a regex for matching both IPv4 and IPv6.
|
||||
|
||||
### ipRegex.v4([options])
|
||||
|
||||
Returns a regex for matching IPv4.
|
||||
|
||||
### ipRegex.v6([options])
|
||||
|
||||
Returns a regex for matching IPv6.
|
||||
|
||||
#### options.exact
|
||||
|
||||
Type: `boolean`<br>
|
||||
Default: `false` *(Matches any IP address in a string)*
|
||||
|
||||
Only match an exact string. Useful with `RegExp#test()` to check if a string is an IP address.
|
||||
|
||||
|
||||
## Related
|
||||
|
||||
- [is-ip](https://github.com/sindresorhus/is-ip) - Check if a string is an IP address
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](https://sindresorhus.com)
|
||||
11
node_modules/ip-regex/license → node_modules/querystringify/LICENSE
generated
vendored
11
node_modules/ip-regex/license → node_modules/querystringify/LICENSE
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
Copyright (c) 2015 Unshift.io, Arnout Kazemier, the Contributors.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
@ -9,13 +9,14 @@ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
61
node_modules/querystringify/README.md
generated
vendored
Normal file
61
node_modules/querystringify/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
# querystringify
|
||||
|
||||
[](https://www.npmjs.com/package/querystringify)[](https://travis-ci.org/unshiftio/querystringify)[](https://david-dm.org/unshiftio/querystringify)[](https://coveralls.io/r/unshiftio/querystringify?branch=master)
|
||||
|
||||
A somewhat JSON compatible interface for query string parsing. This query string
|
||||
parser is dumb, don't expect to much from it as it only wants to parse simple
|
||||
query strings. If you want to parse complex, multi level and deeply nested
|
||||
query strings then you should ask your self. WTF am I doing?
|
||||
|
||||
## Installation
|
||||
|
||||
This module is released in npm as `querystringify`. It's also compatible with
|
||||
`browserify` so it can be used on the server as well as on the client. To
|
||||
install it simply run the following command from your CLI:
|
||||
|
||||
```
|
||||
npm install --save querystringify
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
In the following examples we assume that you've already required the library as:
|
||||
|
||||
```js
|
||||
'use strict';
|
||||
|
||||
var qs = require('querystringify');
|
||||
```
|
||||
|
||||
### qs.parse()
|
||||
|
||||
The parse method transforms a given query string in to an object. Parameters
|
||||
without values are set to empty strings. It does not care if your query string
|
||||
is prefixed with a `?`, a `#`, or not prefixed. It just extracts the parts
|
||||
between the `=` and `&`:
|
||||
|
||||
```js
|
||||
qs.parse('?foo=bar'); // { foo: 'bar' }
|
||||
qs.parse('#foo=bar'); // { foo: 'bar' }
|
||||
qs.parse('foo=bar'); // { foo: 'bar' }
|
||||
qs.parse('foo=bar&bar=foo'); // { foo: 'bar', bar: 'foo' }
|
||||
qs.parse('foo&bar=foo'); // { foo: '', bar: 'foo' }
|
||||
```
|
||||
|
||||
### qs.stringify()
|
||||
|
||||
This transforms a given object in to a query string. By default we return the
|
||||
query string without a `?` prefix. If you want to prefix it by default simply
|
||||
supply `true` as second argument. If it should be prefixed by something else
|
||||
simply supply a string with the prefix value as second argument:
|
||||
|
||||
```js
|
||||
qs.stringify({ foo: bar }); // foo=bar
|
||||
qs.stringify({ foo: bar }, true); // ?foo=bar
|
||||
qs.stringify({ foo: bar }, '#'); // #foo=bar
|
||||
qs.stringify({ foo: '' }, '&'); // &foo=
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
118
node_modules/querystringify/index.js
generated
vendored
Normal file
118
node_modules/querystringify/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,118 @@
|
|||
'use strict';
|
||||
|
||||
var has = Object.prototype.hasOwnProperty
|
||||
, undef;
|
||||
|
||||
/**
|
||||
* Decode a URI encoded string.
|
||||
*
|
||||
* @param {String} input The URI encoded string.
|
||||
* @returns {String|Null} The decoded string.
|
||||
* @api private
|
||||
*/
|
||||
function decode(input) {
|
||||
try {
|
||||
return decodeURIComponent(input.replace(/\+/g, ' '));
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to encode a given input.
|
||||
*
|
||||
* @param {String} input The string that needs to be encoded.
|
||||
* @returns {String|Null} The encoded string.
|
||||
* @api private
|
||||
*/
|
||||
function encode(input) {
|
||||
try {
|
||||
return encodeURIComponent(input);
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple query string parser.
|
||||
*
|
||||
* @param {String} query The query string that needs to be parsed.
|
||||
* @returns {Object}
|
||||
* @api public
|
||||
*/
|
||||
function querystring(query) {
|
||||
var parser = /([^=?#&]+)=?([^&]*)/g
|
||||
, result = {}
|
||||
, part;
|
||||
|
||||
while (part = parser.exec(query)) {
|
||||
var key = decode(part[1])
|
||||
, value = decode(part[2]);
|
||||
|
||||
//
|
||||
// Prevent overriding of existing properties. This ensures that build-in
|
||||
// methods like `toString` or __proto__ are not overriden by malicious
|
||||
// querystrings.
|
||||
//
|
||||
// In the case if failed decoding, we want to omit the key/value pairs
|
||||
// from the result.
|
||||
//
|
||||
if (key === null || value === null || key in result) continue;
|
||||
result[key] = value;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform a query string to an object.
|
||||
*
|
||||
* @param {Object} obj Object that should be transformed.
|
||||
* @param {String} prefix Optional prefix.
|
||||
* @returns {String}
|
||||
* @api public
|
||||
*/
|
||||
function querystringify(obj, prefix) {
|
||||
prefix = prefix || '';
|
||||
|
||||
var pairs = []
|
||||
, value
|
||||
, key;
|
||||
|
||||
//
|
||||
// Optionally prefix with a '?' if needed
|
||||
//
|
||||
if ('string' !== typeof prefix) prefix = '?';
|
||||
|
||||
for (key in obj) {
|
||||
if (has.call(obj, key)) {
|
||||
value = obj[key];
|
||||
|
||||
//
|
||||
// Edge cases where we actually want to encode the value to an empty
|
||||
// string instead of the stringified value.
|
||||
//
|
||||
if (!value && (value === null || value === undef || isNaN(value))) {
|
||||
value = '';
|
||||
}
|
||||
|
||||
key = encode(key);
|
||||
value = encode(value);
|
||||
|
||||
//
|
||||
// If we failed to encode the strings, we should bail out as we don't
|
||||
// want to add invalid strings to the query.
|
||||
//
|
||||
if (key === null || value === null) continue;
|
||||
pairs.push(key +'='+ value);
|
||||
}
|
||||
}
|
||||
|
||||
return pairs.length ? prefix + pairs.join('&') : '';
|
||||
}
|
||||
|
||||
//
|
||||
// Expose the module.
|
||||
//
|
||||
exports.stringify = querystringify;
|
||||
exports.parse = querystring;
|
||||
38
node_modules/querystringify/package.json
generated
vendored
Normal file
38
node_modules/querystringify/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
{
|
||||
"name": "querystringify",
|
||||
"version": "2.2.0",
|
||||
"description": "Querystringify - Small, simple but powerful query string parser.",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "nyc --reporter=html --reporter=text mocha test.js",
|
||||
"watch": "mocha --watch test.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/unshiftio/querystringify"
|
||||
},
|
||||
"keywords": [
|
||||
"query",
|
||||
"string",
|
||||
"query-string",
|
||||
"querystring",
|
||||
"qs",
|
||||
"stringify",
|
||||
"parse",
|
||||
"decode",
|
||||
"encode"
|
||||
],
|
||||
"author": "Arnout Kazemier",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/unshiftio/querystringify/issues"
|
||||
},
|
||||
"homepage": "https://github.com/unshiftio/querystringify",
|
||||
"devDependencies": {
|
||||
"assume": "^2.1.0",
|
||||
"coveralls": "^3.1.0",
|
||||
"mocha": "^8.1.1",
|
||||
"nyc": "^15.1.0",
|
||||
"pre-commit": "^1.2.2"
|
||||
}
|
||||
}
|
||||
2
node_modules/requires-port/.npmignore
generated
vendored
Normal file
2
node_modules/requires-port/.npmignore
generated
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
node_modules
|
||||
coverage
|
||||
19
node_modules/requires-port/.travis.yml
generated
vendored
Normal file
19
node_modules/requires-port/.travis.yml
generated
vendored
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
sudo: false
|
||||
language: node_js
|
||||
node_js:
|
||||
- "4"
|
||||
- "iojs"
|
||||
- "0.12"
|
||||
- "0.10"
|
||||
script:
|
||||
- "npm run test-travis"
|
||||
after_script:
|
||||
- "npm install coveralls@2 && cat coverage/lcov.info | coveralls"
|
||||
matrix:
|
||||
fast_finish: true
|
||||
notifications:
|
||||
irc:
|
||||
channels:
|
||||
- "irc.freenode.org#unshift"
|
||||
on_success: change
|
||||
on_failure: change
|
||||
22
node_modules/requires-port/LICENSE
generated
vendored
Normal file
22
node_modules/requires-port/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 Unshift.io, Arnout Kazemier, the Contributors.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
47
node_modules/requires-port/README.md
generated
vendored
Normal file
47
node_modules/requires-port/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
# requires-port
|
||||
|
||||
[](http://unshift.io)[](http://browsenpm.org/package/requires-port)[](https://travis-ci.org/unshiftio/requires-port)[](https://david-dm.org/unshiftio/requires-port)[](https://coveralls.io/r/unshiftio/requires-port?branch=master)[](http://webchat.freenode.net/?channels=unshift)
|
||||
|
||||
The module name says it all, check if a protocol requires a given port.
|
||||
|
||||
## Installation
|
||||
|
||||
This module is intended to be used with browserify or Node.js and is distributed
|
||||
in the public npm registry. To install it simply run the following command from
|
||||
your CLI:
|
||||
|
||||
```j
|
||||
npm install --save requires-port
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
The module exports it self as function and requires 2 arguments:
|
||||
|
||||
1. The port number, can be a string or number.
|
||||
2. Protocol, can be `http`, `http:` or even `https://yomoma.com`. We just split
|
||||
it at `:` and use the first result. We currently accept the following
|
||||
protocols:
|
||||
- `http`
|
||||
- `https`
|
||||
- `ws`
|
||||
- `wss`
|
||||
- `ftp`
|
||||
- `gopher`
|
||||
- `file`
|
||||
|
||||
It returns a boolean that indicates if protocol requires this port to be added
|
||||
to your URL.
|
||||
|
||||
```js
|
||||
'use strict';
|
||||
|
||||
var required = require('requires-port');
|
||||
|
||||
console.log(required('8080', 'http')) // true
|
||||
console.log(required('80', 'http')) // false
|
||||
```
|
||||
|
||||
# License
|
||||
|
||||
MIT
|
||||
38
node_modules/requires-port/index.js
generated
vendored
Normal file
38
node_modules/requires-port/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
'use strict';
|
||||
|
||||
/**
|
||||
* Check if we're required to add a port number.
|
||||
*
|
||||
* @see https://url.spec.whatwg.org/#default-port
|
||||
* @param {Number|String} port Port number we need to check
|
||||
* @param {String} protocol Protocol we need to check against.
|
||||
* @returns {Boolean} Is it a default port for the given protocol
|
||||
* @api private
|
||||
*/
|
||||
module.exports = function required(port, protocol) {
|
||||
protocol = protocol.split(':')[0];
|
||||
port = +port;
|
||||
|
||||
if (!port) return false;
|
||||
|
||||
switch (protocol) {
|
||||
case 'http':
|
||||
case 'ws':
|
||||
return port !== 80;
|
||||
|
||||
case 'https':
|
||||
case 'wss':
|
||||
return port !== 443;
|
||||
|
||||
case 'ftp':
|
||||
return port !== 21;
|
||||
|
||||
case 'gopher':
|
||||
return port !== 70;
|
||||
|
||||
case 'file':
|
||||
return false;
|
||||
}
|
||||
|
||||
return port !== 0;
|
||||
};
|
||||
47
node_modules/requires-port/package.json
generated
vendored
Normal file
47
node_modules/requires-port/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
{
|
||||
"name": "requires-port",
|
||||
"version": "1.0.0",
|
||||
"description": "Check if a protocol requires a certain port number to be added to an URL.",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"100%": "istanbul check-coverage --statements 100 --functions 100 --lines 100 --branches 100",
|
||||
"test-travis": "istanbul cover _mocha --report lcovonly -- test.js",
|
||||
"coverage": "istanbul cover _mocha -- test.js",
|
||||
"watch": "mocha --watch test.js",
|
||||
"test": "mocha test.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/unshiftio/requires-port"
|
||||
},
|
||||
"keywords": [
|
||||
"port",
|
||||
"require",
|
||||
"http",
|
||||
"https",
|
||||
"ws",
|
||||
"wss",
|
||||
"gopher",
|
||||
"file",
|
||||
"ftp",
|
||||
"requires",
|
||||
"requried",
|
||||
"portnumber",
|
||||
"url",
|
||||
"parsing",
|
||||
"validation",
|
||||
"cows"
|
||||
],
|
||||
"author": "Arnout Kazemier",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/unshiftio/requires-port/issues"
|
||||
},
|
||||
"homepage": "https://github.com/unshiftio/requires-port",
|
||||
"devDependencies": {
|
||||
"assume": "1.3.x",
|
||||
"istanbul": "0.4.x",
|
||||
"mocha": "2.3.x",
|
||||
"pre-commit": "1.1.x"
|
||||
}
|
||||
}
|
||||
98
node_modules/requires-port/test.js
generated
vendored
Normal file
98
node_modules/requires-port/test.js
generated
vendored
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
describe('requires-port', function () {
|
||||
'use strict';
|
||||
|
||||
var assume = require('assume')
|
||||
, required = require('./');
|
||||
|
||||
it('is exported as a function', function () {
|
||||
assume(required).is.a('function');
|
||||
});
|
||||
|
||||
it('does not require empty ports', function () {
|
||||
assume(required('', 'http')).false();
|
||||
assume(required('', 'wss')).false();
|
||||
assume(required('', 'ws')).false();
|
||||
assume(required('', 'cowsack')).false();
|
||||
});
|
||||
|
||||
it('assumes true for unknown protocols',function () {
|
||||
assume(required('808', 'foo')).true();
|
||||
assume(required('80', 'bar')).true();
|
||||
});
|
||||
|
||||
it('never requires port numbers for file', function () {
|
||||
assume(required(8080, 'file')).false();
|
||||
});
|
||||
|
||||
it('does not require port 80 for http', function () {
|
||||
assume(required('80', 'http')).false();
|
||||
assume(required(80, 'http')).false();
|
||||
assume(required(80, 'http://')).false();
|
||||
assume(required(80, 'http://www.google.com')).false();
|
||||
|
||||
assume(required('8080', 'http')).true();
|
||||
assume(required(8080, 'http')).true();
|
||||
assume(required(8080, 'http://')).true();
|
||||
assume(required(8080, 'http://www.google.com')).true();
|
||||
});
|
||||
|
||||
it('does not require port 80 for ws', function () {
|
||||
assume(required('80', 'ws')).false();
|
||||
assume(required(80, 'ws')).false();
|
||||
assume(required(80, 'ws://')).false();
|
||||
assume(required(80, 'ws://www.google.com')).false();
|
||||
|
||||
assume(required('8080', 'ws')).true();
|
||||
assume(required(8080, 'ws')).true();
|
||||
assume(required(8080, 'ws://')).true();
|
||||
assume(required(8080, 'ws://www.google.com')).true();
|
||||
});
|
||||
|
||||
it('does not require port 443 for https', function () {
|
||||
assume(required('443', 'https')).false();
|
||||
assume(required(443, 'https')).false();
|
||||
assume(required(443, 'https://')).false();
|
||||
assume(required(443, 'https://www.google.com')).false();
|
||||
|
||||
assume(required('8080', 'https')).true();
|
||||
assume(required(8080, 'https')).true();
|
||||
assume(required(8080, 'https://')).true();
|
||||
assume(required(8080, 'https://www.google.com')).true();
|
||||
});
|
||||
|
||||
it('does not require port 443 for wss', function () {
|
||||
assume(required('443', 'wss')).false();
|
||||
assume(required(443, 'wss')).false();
|
||||
assume(required(443, 'wss://')).false();
|
||||
assume(required(443, 'wss://www.google.com')).false();
|
||||
|
||||
assume(required('8080', 'wss')).true();
|
||||
assume(required(8080, 'wss')).true();
|
||||
assume(required(8080, 'wss://')).true();
|
||||
assume(required(8080, 'wss://www.google.com')).true();
|
||||
});
|
||||
|
||||
it('does not require port 21 for ftp', function () {
|
||||
assume(required('21', 'ftp')).false();
|
||||
assume(required(21, 'ftp')).false();
|
||||
assume(required(21, 'ftp://')).false();
|
||||
assume(required(21, 'ftp://www.google.com')).false();
|
||||
|
||||
assume(required('8080', 'ftp')).true();
|
||||
assume(required(8080, 'ftp')).true();
|
||||
assume(required(8080, 'ftp://')).true();
|
||||
assume(required(8080, 'ftp://www.google.com')).true();
|
||||
});
|
||||
|
||||
it('does not require port 70 for gopher', function () {
|
||||
assume(required('70', 'gopher')).false();
|
||||
assume(required(70, 'gopher')).false();
|
||||
assume(required(70, 'gopher://')).false();
|
||||
assume(required(70, 'gopher://www.google.com')).false();
|
||||
|
||||
assume(required('8080', 'gopher')).true();
|
||||
assume(required(8080, 'gopher')).true();
|
||||
assume(required(8080, 'gopher://')).true();
|
||||
assume(required(8080, 'gopher://www.google.com')).true();
|
||||
});
|
||||
});
|
||||
499
node_modules/tough-cookie/README.md
generated
vendored
499
node_modules/tough-cookie/README.md
generated
vendored
|
|
@ -1,190 +1,204 @@
|
|||
[RFC6265](https://tools.ietf.org/html/rfc6265) Cookies and CookieJar for Node.js
|
||||
# tough-cookie
|
||||
|
||||
[RFC 6265](https://tools.ietf.org/html/rfc6265) Cookies and CookieJar for Node.js
|
||||
|
||||
[](https://nodei.co/npm/tough-cookie/)
|
||||
|
||||
[](https://travis-ci.org/salesforce/tough-cookie)
|
||||
[](https://travis-ci.org/salesforce/tough-cookie)
|
||||
|
||||
# Synopsis
|
||||
## Synopsis
|
||||
|
||||
``` javascript
|
||||
var tough = require('tough-cookie');
|
||||
```javascript
|
||||
var tough = require("tough-cookie");
|
||||
var Cookie = tough.Cookie;
|
||||
var cookie = Cookie.parse(header);
|
||||
cookie.value = 'somethingdifferent';
|
||||
cookie.value = "somethingdifferent";
|
||||
header = cookie.toString();
|
||||
|
||||
var cookiejar = new tough.CookieJar();
|
||||
cookiejar.setCookie(cookie, 'http://currentdomain.example.com/path', cb);
|
||||
// ...
|
||||
cookiejar.getCookies('http://example.com/otherpath',function(err,cookies) {
|
||||
res.headers['cookie'] = cookies.join('; ');
|
||||
|
||||
// Asynchronous!
|
||||
var cookie = await cookiejar.setCookie(
|
||||
cookie,
|
||||
"https://currentdomain.example.com/path"
|
||||
);
|
||||
var cookies = await cookiejar.getCookies("https://example.com/otherpath");
|
||||
|
||||
// Or with callbacks!
|
||||
cookiejar.setCookie(
|
||||
cookie,
|
||||
"https://currentdomain.example.com/path",
|
||||
function (err, cookie) {
|
||||
/* ... */
|
||||
}
|
||||
);
|
||||
cookiejar.getCookies("http://example.com/otherpath", function (err, cookies) {
|
||||
/* ... */
|
||||
});
|
||||
```
|
||||
|
||||
# Installation
|
||||
Why the name? NPM modules `cookie`, `cookies` and `cookiejar` were already taken.
|
||||
|
||||
It's _so_ easy!
|
||||
## Installation
|
||||
|
||||
`npm install tough-cookie`
|
||||
It's _so_ easy! Install with `npm` or your preferred package manager.
|
||||
|
||||
Why the name? NPM modules `cookie`, `cookies` and `cookiejar` were already taken.
|
||||
```sh
|
||||
npm install tough-cookie
|
||||
```
|
||||
|
||||
## Version Support
|
||||
## Node.js Version Support
|
||||
|
||||
Support for versions of node.js will follow that of the [request](https://www.npmjs.com/package/request) module.
|
||||
We follow the [node.js release schedule](https://github.com/nodejs/Release#release-schedule) and support all versions that are in Active LTS or Maintenance. We will always do a major release when dropping support for older versions of node, and we will do so in consultation with our community.
|
||||
|
||||
# API
|
||||
## API
|
||||
|
||||
## tough
|
||||
### tough
|
||||
|
||||
Functions on the module you get from `require('tough-cookie')`. All can be used as pure functions and don't need to be "bound".
|
||||
The top-level exports from `require('tough-cookie')` can all be used as pure functions and don't need to be bound.
|
||||
|
||||
**Note**: prior to 1.0.x, several of these functions took a `strict` parameter. This has since been removed from the API as it was no longer necessary.
|
||||
#### `parseDate(string)`
|
||||
|
||||
### `parseDate(string)`
|
||||
Parse a cookie date string into a `Date`. Parses according to [RFC 6265 Section 5.1.1](https://datatracker.ietf.org/doc/html/rfc6265#section-5.1.1), not `Date.parse()`.
|
||||
|
||||
Parse a cookie date string into a `Date`. Parses according to RFC6265 Section 5.1.1, not `Date.parse()`.
|
||||
#### `formatDate(date)`
|
||||
|
||||
### `formatDate(date)`
|
||||
Format a `Date` into an [RFC 822](https://datatracker.ietf.org/doc/html/rfc822#section-5) string (the RFC 6265 recommended format).
|
||||
|
||||
Format a Date into a RFC1123 string (the RFC6265-recommended format).
|
||||
#### `canonicalDomain(str)`
|
||||
|
||||
### `canonicalDomain(str)`
|
||||
Transforms a domain name into a canonical domain name. The canonical domain name is a domain name that has been trimmed, lowercased, stripped of leading dot, and optionally punycode-encoded ([Section 5.1.2 of RFC 6265](https://datatracker.ietf.org/doc/html/rfc6265#section-5.1.2)). For the most part, this function is idempotent (calling the function with the output from a previous call returns the same output).
|
||||
|
||||
Transforms a domain-name into a canonical domain-name. The canonical domain-name is a trimmed, lowercased, stripped-of-leading-dot and optionally punycode-encoded domain-name (Section 5.1.2 of RFC6265). For the most part, this function is idempotent (can be run again on its output without ill effects).
|
||||
#### `domainMatch(str, domStr[, canonicalize=true])`
|
||||
|
||||
### `domainMatch(str,domStr[,canonicalize=true])`
|
||||
Answers "does this real domain match the domain in a cookie?". The `str` is the "current" domain name and the `domStr` is the "cookie" domain name. Matches according to [RFC 6265 Section 5.1.3](https://datatracker.ietf.org/doc/html/rfc6265#section-5.1.3), but it helps to think of it as a "suffix match".
|
||||
|
||||
Answers "does this real domain match the domain in a cookie?". The `str` is the "current" domain-name and the `domStr` is the "cookie" domain-name. Matches according to RFC6265 Section 5.1.3, but it helps to think of it as a "suffix match".
|
||||
The `canonicalize` parameter toggles whether the domain parameters get normalized with `canonicalDomain` or not.
|
||||
|
||||
The `canonicalize` parameter will run the other two parameters through `canonicalDomain` or not.
|
||||
#### `defaultPath(path)`
|
||||
|
||||
### `defaultPath(path)`
|
||||
Given a current request/response path, gives the path appropriate for storing in a cookie. This is basically the "directory" of a "file" in the path, but is specified by [Section 5.1.4 of the RFC](https://datatracker.ietf.org/doc/html/rfc6265#section-5.1.4).
|
||||
|
||||
Given a current request/response path, gives the Path apropriate for storing in a cookie. This is basically the "directory" of a "file" in the path, but is specified by Section 5.1.4 of the RFC.
|
||||
The `path` parameter MUST be _only_ the pathname part of a URI (excluding the hostname, query, fragment, and so on). This is the `.pathname` property of node's `uri.parse()` output.
|
||||
|
||||
The `path` parameter MUST be _only_ the pathname part of a URI (i.e. excludes the hostname, query, fragment, etc.). This is the `.pathname` property of node's `uri.parse()` output.
|
||||
#### `pathMatch(reqPath, cookiePath)`
|
||||
|
||||
### `pathMatch(reqPath,cookiePath)`
|
||||
|
||||
Answers "does the request-path path-match a given cookie-path?" as per RFC6265 Section 5.1.4. Returns a boolean.
|
||||
Answers "does the request-path path-match a given cookie-path?" as per [RFC 6265 Section 5.1.4](https://datatracker.ietf.org/doc/html/rfc6265#section-5.1.4). Returns a boolean.
|
||||
|
||||
This is essentially a prefix-match where `cookiePath` is a prefix of `reqPath`.
|
||||
|
||||
### `parse(cookieString[, options])`
|
||||
#### `parse(cookieString[, options])`
|
||||
|
||||
alias for `Cookie.parse(cookieString[, options])`
|
||||
Alias for [`Cookie.parse(cookieString[, options])`](#cookieparsecookiestring-options).
|
||||
|
||||
### `fromJSON(string)`
|
||||
#### `fromJSON(string)`
|
||||
|
||||
alias for `Cookie.fromJSON(string)`
|
||||
Alias for [`Cookie.fromJSON(string)`](#cookiefromjsonstrorobj).
|
||||
|
||||
### `getPublicSuffix(hostname)`
|
||||
#### `getPublicSuffix(hostname)`
|
||||
|
||||
Returns the public suffix of this hostname. The public suffix is the shortest domain-name upon which a cookie can be set. Returns `null` if the hostname cannot have cookies set for it.
|
||||
Returns the public suffix of this hostname. The public suffix is the shortest domain name upon which a cookie can be set. Returns `null` if the hostname cannot have cookies set for it.
|
||||
|
||||
For example: `www.example.com` and `www.subdomain.example.com` both have public suffix `example.com`.
|
||||
|
||||
For further information, see http://publicsuffix.org/. This module derives its list from that site. This call is currently a wrapper around [`psl`](https://www.npmjs.com/package/psl)'s [get() method](https://www.npmjs.com/package/psl#pslgetdomain).
|
||||
For further information, see the [Public Suffix List](http://publicsuffix.org/). This module derives its list from that site. This call is a wrapper around [`psl`](https://www.npmjs.com/package/psl)'s [`get` method](https://www.npmjs.com/package/psl##pslgetdomain).
|
||||
|
||||
### `cookieCompare(a,b)`
|
||||
#### `cookieCompare(a, b)`
|
||||
|
||||
For use with `.sort()`, sorts a list of cookies into the recommended order given in the RFC (Section 5.4 step 2). The sort algorithm is, in order of precedence:
|
||||
For use with `.sort()`, sorts a list of cookies into the recommended order given in step 2 of ([RFC 6265 Section 5.4](https://datatracker.ietf.org/doc/html/rfc6265#section-5.4)). The sort algorithm is, in order of precedence:
|
||||
|
||||
* Longest `.path`
|
||||
* oldest `.creation` (which has a 1ms precision, same as `Date`)
|
||||
* lowest `.creationIndex` (to get beyond the 1ms precision)
|
||||
- Longest `.path`
|
||||
- oldest `.creation` (which has a 1-ms precision, same as `Date`)
|
||||
- lowest `.creationIndex` (to get beyond the 1-ms precision)
|
||||
|
||||
``` javascript
|
||||
var cookies = [ /* unsorted array of Cookie objects */ ];
|
||||
```javascript
|
||||
var cookies = [
|
||||
/* unsorted array of Cookie objects */
|
||||
];
|
||||
cookies = cookies.sort(cookieCompare);
|
||||
```
|
||||
|
||||
**Note**: Since JavaScript's `Date` is limited to a 1ms precision, cookies within the same milisecond are entirely possible. This is especially true when using the `now` option to `.setCookie()`. The `.creationIndex` property is a per-process global counter, assigned during construction with `new Cookie()`. This preserves the spirit of the RFC sorting: older cookies go first. This works great for `MemoryCookieStore`, since `Set-Cookie` headers are parsed in order, but may not be so great for distributed systems. Sophisticated `Store`s may wish to set this to some other _logical clock_ such that if cookies A and B are created in the same millisecond, but cookie A is created before cookie B, then `A.creationIndex < B.creationIndex`. If you want to alter the global counter, which you probably _shouldn't_ do, it's stored in `Cookie.cookiesCreated`.
|
||||
> **Note**: Since the JavaScript `Date` is limited to a 1-ms precision, cookies within the same millisecond are entirely possible. This is especially true when using the `now` option to `.setCookie()`. The `.creationIndex` property is a per-process global counter, assigned during construction with `new Cookie()`, which preserves the spirit of the RFC sorting: older cookies go first. This works great for `MemoryCookieStore` since `Set-Cookie` headers are parsed in order, but is not so great for distributed systems. Sophisticated `Store`s may wish to set this to some other _logical clock_ so that if cookies A and B are created in the same millisecond, but cookie A is created before cookie B, then `A.creationIndex < B.creationIndex`. If you want to alter the global counter, which you probably _shouldn't_ do, it's stored in `Cookie.cookiesCreated`.
|
||||
|
||||
### `permuteDomain(domain)`
|
||||
#### `permuteDomain(domain)`
|
||||
|
||||
Generates a list of all possible domains that `domainMatch()` the parameter. May be handy for implementing cookie stores.
|
||||
Generates a list of all possible domains that `domainMatch()` the parameter. Can be handy for implementing cookie stores.
|
||||
|
||||
### `permutePath(path)`
|
||||
#### `permutePath(path)`
|
||||
|
||||
Generates a list of all possible paths that `pathMatch()` the parameter. May be handy for implementing cookie stores.
|
||||
Generates a list of all possible paths that `pathMatch()` the parameter. Can be handy for implementing cookie stores.
|
||||
|
||||
|
||||
## Cookie
|
||||
### Cookie
|
||||
|
||||
Exported via `tough.Cookie`.
|
||||
|
||||
### `Cookie.parse(cookieString[, options])`
|
||||
#### `Cookie.parse(cookieString[, options])`
|
||||
|
||||
Parses a single Cookie or Set-Cookie HTTP header into a `Cookie` object. Returns `undefined` if the string can't be parsed.
|
||||
Parses a single Cookie or Set-Cookie HTTP header into a `Cookie` object. Returns `undefined` if the string can't be parsed.
|
||||
|
||||
The options parameter is not required and currently has only one property:
|
||||
|
||||
* _loose_ - boolean - if `true` enable parsing of key-less cookies like `=abc` and `=`, which are not RFC-compliant.
|
||||
- _loose_ - boolean - if `true` enable parsing of keyless cookies like `=abc` and `=`, which are not RFC-compliant.
|
||||
|
||||
If options is not an object, it is ignored, which means you can use `Array#map` with it.
|
||||
If options is not an object it is ignored, which means it can be used with [`Array#map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map).
|
||||
|
||||
Here's how to process the Set-Cookie header(s) on a node HTTP/HTTPS response:
|
||||
To process the Set-Cookie header(s) on a node HTTP/HTTPS response:
|
||||
|
||||
``` javascript
|
||||
if (res.headers['set-cookie'] instanceof Array)
|
||||
cookies = res.headers['set-cookie'].map(Cookie.parse);
|
||||
else
|
||||
cookies = [Cookie.parse(res.headers['set-cookie'])];
|
||||
```javascript
|
||||
if (Array.isArray(res.headers["set-cookie"]))
|
||||
cookies = res.headers["set-cookie"].map(Cookie.parse);
|
||||
else cookies = [Cookie.parse(res.headers["set-cookie"])];
|
||||
```
|
||||
|
||||
_Note:_ in version 2.3.3, tough-cookie limited the number of spaces before the `=` to 256 characters. This limitation has since been removed.
|
||||
See [Issue 92](https://github.com/salesforce/tough-cookie/issues/92)
|
||||
_Note:_ In version 2.3.3, tough-cookie limited the number of spaces before the `=` to 256 characters. This limitation was removed in version 2.3.4.
|
||||
For more details, see [issue #92](https://github.com/salesforce/tough-cookie/issues/92).
|
||||
|
||||
### Properties
|
||||
#### Properties
|
||||
|
||||
Cookie object properties:
|
||||
|
||||
* _key_ - string - the name or key of the cookie (default "")
|
||||
* _value_ - string - the value of the cookie (default "")
|
||||
* _expires_ - `Date` - if set, the `Expires=` attribute of the cookie (defaults to the string `"Infinity"`). See `setExpires()`
|
||||
* _maxAge_ - seconds - if set, the `Max-Age=` attribute _in seconds_ of the cookie. May also be set to strings `"Infinity"` and `"-Infinity"` for non-expiry and immediate-expiry, respectively. See `setMaxAge()`
|
||||
* _domain_ - string - the `Domain=` attribute of the cookie
|
||||
* _path_ - string - the `Path=` of the cookie
|
||||
* _secure_ - boolean - the `Secure` cookie flag
|
||||
* _httpOnly_ - boolean - the `HttpOnly` cookie flag
|
||||
* _extensions_ - `Array` - any unrecognized cookie attributes as strings (even if equal-signs inside)
|
||||
* _creation_ - `Date` - when this cookie was constructed
|
||||
* _creationIndex_ - number - set at construction, used to provide greater sort precision (please see `cookieCompare(a,b)` for a full explanation)
|
||||
- _key_ - string - the name or key of the cookie (default `""`)
|
||||
- _value_ - string - the value of the cookie (default `""`)
|
||||
- _expires_ - `Date` - if set, the `Expires=` attribute of the cookie (defaults to the string `"Infinity"`). See `setExpires()`
|
||||
- _maxAge_ - seconds - if set, the `Max-Age=` attribute _in seconds_ of the cookie. Can also be set to strings `"Infinity"` and `"-Infinity"` for non-expiry and immediate-expiry, respectively. See `setMaxAge()`
|
||||
- _domain_ - string - the `Domain=` attribute of the cookie
|
||||
- _path_ - string - the `Path=` of the cookie
|
||||
- _secure_ - boolean - the `Secure` cookie flag
|
||||
- _httpOnly_ - boolean - the `HttpOnly` cookie flag
|
||||
- _sameSite_ - string - the `SameSite` cookie attribute (from [RFC 6265bis](#rfc-6265bis)); must be one of `none`, `lax`, or `strict`
|
||||
- _extensions_ - `Array` - any unrecognized cookie attributes as strings (even if equal-signs inside)
|
||||
- _creation_ - `Date` - when this cookie was constructed
|
||||
- _creationIndex_ - number - set at construction, used to provide greater sort precision (see `cookieCompare(a,b)` for a full explanation)
|
||||
|
||||
After a cookie has been passed through `CookieJar.setCookie()` it will have the following additional attributes:
|
||||
After a cookie has been passed through `CookieJar.setCookie()` it has the following additional attributes:
|
||||
|
||||
* _hostOnly_ - boolean - is this a host-only cookie (i.e. no Domain field was set, but was instead implied)
|
||||
* _pathIsDefault_ - boolean - if true, there was no Path field on the cookie and `defaultPath()` was used to derive one.
|
||||
* _creation_ - `Date` - **modified** from construction to when the cookie was added to the jar
|
||||
* _lastAccessed_ - `Date` - last time the cookie got accessed. Will affect cookie cleaning once implemented. Using `cookiejar.getCookies(...)` will update this attribute.
|
||||
- _hostOnly_ - boolean - is this a host-only cookie (that is, no Domain field was set, but was instead implied).
|
||||
- _pathIsDefault_ - boolean - if true, there was no Path field on the cookie and `defaultPath()` was used to derive one.
|
||||
- _creation_ - `Date` - **modified** from construction to when the cookie was added to the jar.
|
||||
- _lastAccessed_ - `Date` - last time the cookie got accessed. Affects cookie cleaning after it is implemented. Using `cookiejar.getCookies(...)` updates this attribute.
|
||||
|
||||
### `Cookie([{properties}])`
|
||||
#### `new Cookie([properties])`
|
||||
|
||||
Receives an options object that can contain any of the above Cookie properties, uses the default for unspecified properties.
|
||||
Receives an options object that can contain any of the above Cookie properties. Uses the default for unspecified properties.
|
||||
|
||||
### `.toString()`
|
||||
#### `.toString()`
|
||||
|
||||
encode to a Set-Cookie header value. The Expires cookie field is set using `formatDate()`, but is omitted entirely if `.expires` is `Infinity`.
|
||||
Encodes to a Set-Cookie header value. The Expires cookie field is set using `formatDate()`, but is omitted entirely if `.expires` is `Infinity`.
|
||||
|
||||
### `.cookieString()`
|
||||
#### `.cookieString()`
|
||||
|
||||
encode to a Cookie header value (i.e. the `.key` and `.value` properties joined with '=').
|
||||
Encodes to a Cookie header value (specifically, the `.key` and `.value` properties joined with `"="`).
|
||||
|
||||
### `.setExpires(String)`
|
||||
#### `.setExpires(string)`
|
||||
|
||||
sets the expiry based on a date-string passed through `parseDate()`. If parseDate returns `null` (i.e. can't parse this date string), `.expires` is set to `"Infinity"` (a string) is set.
|
||||
Sets the expiry based on a date-string passed through `parseDate()`. If parseDate returns `null` (that is, can't parse this date string), `.expires` is set to `"Infinity"` (a string).
|
||||
|
||||
### `.setMaxAge(number)`
|
||||
#### `.setMaxAge(number)`
|
||||
|
||||
sets the maxAge in seconds. Coerces `-Infinity` to `"-Infinity"` and `Infinity` to `"Infinity"` so it JSON serializes correctly.
|
||||
Sets the maxAge in seconds. Coerces `-Infinity` to `"-Infinity"` and `Infinity` to `"Infinity"` so it correctly serializes to JSON.
|
||||
|
||||
### `.expiryTime([now=Date.now()])`
|
||||
#### `.expiryDate([now=Date.now()])`
|
||||
|
||||
### `.expiryDate([now=Date.now()])`
|
||||
|
||||
expiryTime() Computes the absolute unix-epoch milliseconds that this cookie expires. expiryDate() works similarly, except it returns a `Date` object. Note that in both cases the `now` parameter should be milliseconds.
|
||||
`expiryTime()` computes the absolute unix-epoch milliseconds that this cookie expires. `expiryDate()` works similarly, except it returns a `Date` object. Note that in both cases the `now` parameter should be milliseconds.
|
||||
|
||||
Max-Age takes precedence over Expires (as per the RFC). The `.creation` attribute -- or, by default, the `now` parameter -- is used to offset the `.maxAge` attribute.
|
||||
|
||||
|
|
@ -192,45 +206,45 @@ If Expires (`.expires`) is set, that's returned.
|
|||
|
||||
Otherwise, `expiryTime()` returns `Infinity` and `expiryDate()` returns a `Date` object for "Tue, 19 Jan 2038 03:14:07 GMT" (latest date that can be expressed by a 32-bit `time_t`; the common limit for most user-agents).
|
||||
|
||||
### `.TTL([now=Date.now()])`
|
||||
#### `.TTL([now=Date.now()])`
|
||||
|
||||
compute the TTL relative to `now` (milliseconds). The same precedence rules as for `expiryTime`/`expiryDate` apply.
|
||||
Computes the TTL relative to `now` (milliseconds). The same precedence rules as for `expiryTime`/`expiryDate` apply.
|
||||
|
||||
The "number" `Infinity` is returned for cookies without an explicit expiry and `0` is returned if the cookie is expired. Otherwise a time-to-live in milliseconds is returned.
|
||||
`Infinity` is returned for cookies without an explicit expiry and `0` is returned if the cookie is expired. Otherwise a time-to-live in milliseconds is returned.
|
||||
|
||||
### `.canonicalizedDomain()`
|
||||
#### `.canonicalizedDomain()`
|
||||
|
||||
### `.cdomain()`
|
||||
#### `.cdomain()`
|
||||
|
||||
return the canonicalized `.domain` field. This is lower-cased and punycode (RFC3490) encoded if the domain has any non-ASCII characters.
|
||||
Returns the canonicalized `.domain` field. This is lower-cased and punycode ([RFC 3490](https://datatracker.ietf.org/doc/html/rfc3490)) encoded if the domain has any non-ASCII characters.
|
||||
|
||||
### `.toJSON()`
|
||||
#### `.toJSON()`
|
||||
|
||||
For convenience in using `JSON.serialize(cookie)`. Returns a plain-old `Object` that can be JSON-serialized.
|
||||
|
||||
Any `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are exported in ISO format (`.toISOString()`).
|
||||
Any `Date` properties (such as `.expires`, `.creation`, and `.lastAccessed`) are exported in ISO format (`.toISOString()`).
|
||||
|
||||
**NOTE**: Custom `Cookie` properties will be discarded. In tough-cookie 1.x, since there was no `.toJSON` method explicitly defined, all enumerable properties were captured. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array.
|
||||
> **NOTE**: Custom `Cookie` properties are discarded. In tough-cookie 1.x, since there was no `.toJSON` method explicitly defined, all enumerable properties were captured. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array.
|
||||
|
||||
### `Cookie.fromJSON(strOrObj)`
|
||||
#### `Cookie.fromJSON(strOrObj)`
|
||||
|
||||
Does the reverse of `cookie.toJSON()`. If passed a string, will `JSON.parse()` that first.
|
||||
|
||||
Any `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are parsed via `Date.parse()`, not the tough-cookie `parseDate`, since it's JavaScript/JSON-y timestamps being handled at this layer.
|
||||
Any `Date` properties (such as `.expires`, `.creation`, and `.lastAccessed`) are parsed via [`Date.parse`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/parse), not tough-cookie's `parseDate`, since ISO timestamps are being handled at this layer.
|
||||
|
||||
Returns `null` upon JSON parsing error.
|
||||
Returns `null` upon a JSON parsing error.
|
||||
|
||||
### `.clone()`
|
||||
#### `.clone()`
|
||||
|
||||
Does a deep clone of this cookie, exactly implemented as `Cookie.fromJSON(cookie.toJSON())`.
|
||||
Does a deep clone of this cookie, implemented exactly as `Cookie.fromJSON(cookie.toJSON())`.
|
||||
|
||||
### `.validate()`
|
||||
#### `.validate()`
|
||||
|
||||
Status: *IN PROGRESS*. Works for a few things, but is by no means comprehensive.
|
||||
Status: _IN PROGRESS_. Works for a few things, but is by no means comprehensive.
|
||||
|
||||
validates cookie attributes for semantic correctness. Useful for "lint" checking any Set-Cookie headers you generate. For now, it returns a boolean, but eventually could return a reason string -- you can future-proof with this construct:
|
||||
Validates cookie attributes for semantic correctness. Useful for "lint" checking any Set-Cookie headers you generate. For now, it returns a boolean, but eventually could return a reason string. Future-proof with this construct:
|
||||
|
||||
``` javascript
|
||||
```javascript
|
||||
if (cookie.validate() === true) {
|
||||
// it's tasty
|
||||
} else {
|
||||
|
|
@ -238,221 +252,225 @@ if (cookie.validate() === true) {
|
|||
}
|
||||
```
|
||||
|
||||
|
||||
## CookieJar
|
||||
### CookieJar
|
||||
|
||||
Exported via `tough.CookieJar`.
|
||||
|
||||
### `CookieJar([store],[options])`
|
||||
#### `CookieJar([store][, options])`
|
||||
|
||||
Simply use `new CookieJar()`. If you'd like to use a custom store, pass that to the constructor otherwise a `MemoryCookieStore` will be created and used.
|
||||
Simply use `new CookieJar()`. If a custom store is not passed to the constructor, a [`MemoryCookieStore`](#memorycookiestore) is created and used.
|
||||
|
||||
The `options` object can be omitted and can have the following properties:
|
||||
|
||||
* _rejectPublicSuffixes_ - boolean - default `true` - reject cookies with domains like "com" and "co.uk"
|
||||
* _looseMode_ - boolean - default `false` - accept malformed cookies like `bar` and `=bar`, which have an implied empty name.
|
||||
This is not in the standard, but is used sometimes on the web and is accepted by (most) browsers.
|
||||
- _rejectPublicSuffixes_ - boolean - default `true` - reject cookies with domains like "com" and "co.uk"
|
||||
- _looseMode_ - boolean - default `false` - accept malformed cookies like `bar` and `=bar`, which have an implied empty name.
|
||||
- _prefixSecurity_ - string - default `silent` - set to `'unsafe-disabled'`, `'silent'`, or `'strict'`. See [Cookie Prefixes](#cookie-prefixes) below.
|
||||
- _allowSpecialUseDomain_ - boolean - default `true` - accepts special-use domain suffixes, such as `local`. Useful for testing purposes.
|
||||
This is not in the standard, but is used sometimes on the web and is accepted by most browsers.
|
||||
|
||||
Since eventually this module would like to support database/remote/etc. CookieJars, continuation passing style is used for CookieJar methods.
|
||||
#### `.setCookie(cookieOrString, currentUrl[, options][, callback(err, cookie)])`
|
||||
|
||||
### `.setCookie(cookieOrString, currentUrl, [{options},] cb(err,cookie))`
|
||||
|
||||
Attempt to set the cookie in the cookie jar. If the operation fails, an error will be given to the callback `cb`, otherwise the cookie is passed through. The cookie will have updated `.creation`, `.lastAccessed` and `.hostOnly` properties.
|
||||
Attempt to set the cookie in the cookie jar. The cookie has updated `.creation`, `.lastAccessed` and `.hostOnly` properties. And returns a promise if a callback is not provided.
|
||||
|
||||
The `options` object can be omitted and can have the following properties:
|
||||
|
||||
* _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies.
|
||||
* _secure_ - boolean - autodetect from url - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`.
|
||||
* _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies
|
||||
* _ignoreError_ - boolean - default `false` - silently ignore things like parse errors and invalid domains. `Store` errors aren't ignored by this option.
|
||||
- _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects `HttpOnly` cookies.
|
||||
- _secure_ - boolean - autodetect from URL - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` this defaults to `true`, otherwise `false`.
|
||||
- _now_ - Date - default `new Date()` - what to use for the creation or access time of cookies.
|
||||
- _ignoreError_ - boolean - default `false` - silently ignore things like parse errors and invalid domains. `Store` errors aren't ignored by this option.
|
||||
- _sameSiteContext_ - string - default unset - set to `'none'`, `'lax'`, or `'strict'` See [SameSite Cookies](#samesite-cookies) below.
|
||||
|
||||
As per the RFC, the `.hostOnly` property is set if there was no "Domain=" parameter in the cookie string (or `.domain` was null on the Cookie object). The `.domain` property is set to the fully-qualified hostname of `currentUrl` in this case. Matching this cookie requires an exact hostname match (not a `domainMatch` as per usual).
|
||||
As per the RFC, the `.hostOnly` property is set if there was no "Domain=" parameter in the cookie string (or `.domain` was null on the Cookie object). The `.domain` property is set to the fully-qualified hostname of `currentUrl` in this case. Matching this cookie requires an exact hostname match (not a `domainMatch` as per usual).
|
||||
|
||||
### `.setCookieSync(cookieOrString, currentUrl, [{options}])`
|
||||
#### `.setCookieSync(cookieOrString, currentUrl[, options])`
|
||||
|
||||
Synchronous version of `setCookie`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).
|
||||
Synchronous version of [`setCookie`](#setcookiecookieorstring-currenturl-options-callbackerr-cookie); only works with synchronous stores (that is, the default `MemoryCookieStore`).
|
||||
|
||||
### `.getCookies(currentUrl, [{options},] cb(err,cookies))`
|
||||
#### `.getCookies(currentUrl[, options][, callback(err, cookies)])`
|
||||
|
||||
Retrieve the list of cookies that can be sent in a Cookie header for the current url.
|
||||
Retrieve the list of cookies that can be sent in a Cookie header for the current URL. Returns a promise if a callback is not provided.
|
||||
|
||||
If an error is encountered, that's passed as `err` to the callback, otherwise an `Array` of `Cookie` objects is passed. The array is sorted with `cookieCompare()` unless the `{sort:false}` option is given.
|
||||
Returns an array of `Cookie` objects, sorted by default using [`cookieCompare`](#cookiecomparea-b).
|
||||
|
||||
If an error is encountered it's passed as `err` to the callback, otherwise an array of `Cookie` objects is passed. The array is sorted with `cookieCompare()` unless the `{sort:false}` option is given.
|
||||
|
||||
The `options` object can be omitted and can have the following properties:
|
||||
|
||||
* _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies.
|
||||
* _secure_ - boolean - autodetect from url - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`.
|
||||
* _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies
|
||||
* _expire_ - boolean - default `true` - perform expiry-time checking of cookies and asynchronously remove expired cookies from the store. Using `false` will return expired cookies and **not** remove them from the store (which is useful for replaying Set-Cookie headers, potentially).
|
||||
* _allPaths_ - boolean - default `false` - if `true`, do not scope cookies by path. The default uses RFC-compliant path scoping. **Note**: may not be supported by the underlying store (the default `MemoryCookieStore` supports it).
|
||||
- _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects `HttpOnly` cookies.
|
||||
- _secure_ - boolean - autodetect from URL - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`.
|
||||
- _now_ - Date - default `new Date()` - what to use for the creation or access time of cookies
|
||||
- _expire_ - boolean - default `true` - perform expiry-time checking of cookies and asynchronously remove expired cookies from the store. Using `false` returns expired cookies and does **not** remove them from the store (which is potentially useful for replaying Set-Cookie headers).
|
||||
- _allPaths_ - boolean - default `false` - if `true`, do not scope cookies by path. The default uses RFC-compliant path scoping. **Note**: may not be supported by the underlying store (the default `MemoryCookieStore` supports it).
|
||||
- _sameSiteContext_ - string - default unset - Set this to `'none'`, `'lax'`, or `'strict'` to enforce SameSite cookies upon retrieval. See [SameSite Cookies](#samesite-cookies) below.
|
||||
- _sort_ - boolean - whether to sort the list of cookies.
|
||||
|
||||
The `.lastAccessed` property of the returned cookies will have been updated.
|
||||
|
||||
### `.getCookiesSync(currentUrl, [{options}])`
|
||||
#### `.getCookiesSync(currentUrl, [{options}])`
|
||||
|
||||
Synchronous version of `getCookies`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).
|
||||
Synchronous version of [`getCookies`](#getcookiescurrenturl-options-callbackerr-cookies); only works with synchronous stores (for example, the default `MemoryCookieStore`).
|
||||
|
||||
### `.getCookieString(...)`
|
||||
#### `.getCookieString(...)`
|
||||
|
||||
Accepts the same options as `.getCookies()` but passes a string suitable for a Cookie header rather than an array to the callback. Simply maps the `Cookie` array via `.cookieString()`.
|
||||
Accepts the same options as [`.getCookies()`](#getcookiescurrenturl-options-callbackerr-cookies) but returns a string suitable for a Cookie header rather than an Array.
|
||||
|
||||
### `.getCookieStringSync(...)`
|
||||
#### `.getCookieStringSync(...)`
|
||||
|
||||
Synchronous version of `getCookieString`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).
|
||||
Synchronous version of [`getCookieString`](#getcookiestring); only works with synchronous stores (for example, the default `MemoryCookieStore`).
|
||||
|
||||
### `.getSetCookieStrings(...)`
|
||||
#### `.getSetCookieStrings(...)`
|
||||
|
||||
Returns an array of strings suitable for **Set-Cookie** headers. Accepts the same options as `.getCookies()`. Simply maps the cookie array via `.toString()`.
|
||||
Returns an array of strings suitable for **Set-Cookie** headers. Accepts the same options as [`.getCookies()`](#getcookiescurrenturl-options-callbackerr-cookies). Simply maps the cookie array via `.toString()`.
|
||||
|
||||
### `.getSetCookieStringsSync(...)`
|
||||
#### `.getSetCookieStringsSync(...)`
|
||||
|
||||
Synchronous version of `getSetCookieStrings`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).
|
||||
Synchronous version of [`getSetCookieStrings`](#getsetcookiestrings); only works with synchronous stores (for example, the default `MemoryCookieStore`).
|
||||
|
||||
### `.serialize(cb(err,serializedObject))`
|
||||
#### `.serialize([callback(err, serializedObject)])`
|
||||
|
||||
Returns a promise if a callback is not provided.
|
||||
|
||||
Serialize the Jar if the underlying store supports `.getAllCookies`.
|
||||
|
||||
**NOTE**: Custom `Cookie` properties will be discarded. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array.
|
||||
> **NOTE**: Custom `Cookie` properties are discarded. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array.
|
||||
|
||||
See [Serialization Format].
|
||||
See [Serialization Format](#serialization-format).
|
||||
|
||||
### `.serializeSync()`
|
||||
#### `.serializeSync()`
|
||||
|
||||
Sync version of .serialize
|
||||
Synchronous version of [`serialize`](#serializecallbackerr-serializedobject); only works with synchronous stores (for example, the default `MemoryCookieStore`).
|
||||
|
||||
### `.toJSON()`
|
||||
#### `.toJSON()`
|
||||
|
||||
Alias of .serializeSync() for the convenience of `JSON.stringify(cookiejar)`.
|
||||
Alias of [`.serializeSync()`](#serializesync) for the convenience of `JSON.stringify(cookiejar)`.
|
||||
|
||||
### `CookieJar.deserialize(serialized, [store], cb(err,object))`
|
||||
#### `CookieJar.deserialize(serialized[, store][, callback(err, object)])`
|
||||
|
||||
A new Jar is created and the serialized Cookies are added to the underlying store. Each `Cookie` is added via `store.putCookie` in the order in which they appear in the serialization.
|
||||
A new Jar is created and the serialized Cookies are added to the underlying store. Each `Cookie` is added via `store.putCookie` in the order in which they appear in the serialization. A promise is returned if a callback is not provided.
|
||||
|
||||
The `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created.
|
||||
|
||||
As a convenience, if `serialized` is a string, it is passed through `JSON.parse` first. If that throws an error, this is passed to the callback.
|
||||
As a convenience, if `serialized` is a string, it is passed through `JSON.parse` first.
|
||||
|
||||
### `CookieJar.deserializeSync(serialized, [store])`
|
||||
#### `CookieJar.deserializeSync(serialized[, store])`
|
||||
|
||||
Sync version of `.deserialize`. _Note_ that the `store` must be synchronous for this to work.
|
||||
Sync version of [`.deserialize`](#cookiejardeserializeserialized-store-callbackerr-object); only works with synchronous stores (for example, the default `MemoryCookieStore`).
|
||||
|
||||
### `CookieJar.fromJSON(string)`
|
||||
#### `CookieJar.fromJSON(string)`
|
||||
|
||||
Alias of `.deserializeSync` to provide consistency with `Cookie.fromJSON()`.
|
||||
Alias of [`.deserializeSync`](#cookiejardeserializesyncserialized-store) to provide consistency with [`Cookie.fromJSON()`](#cookiefromjsonstrorobj).
|
||||
|
||||
### `.clone([store,]cb(err,newJar))`
|
||||
#### `.clone([store][, callback(err, cloned))`
|
||||
|
||||
Produces a deep clone of this jar. Modifications to the original won't affect the clone, and vice versa.
|
||||
Produces a deep clone of this jar. Modifications to the original do not affect the clone, and vice versa. Returns a promise if a callback is not provided.
|
||||
|
||||
The `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created. Transferring between store types is supported so long as the source implements `.getAllCookies()` and the destination implements `.putCookie()`.
|
||||
|
||||
### `.cloneSync([store])`
|
||||
#### `.cloneSync([store])`
|
||||
|
||||
Synchronous version of `.clone`, returning a new `CookieJar` instance.
|
||||
Synchronous version of [`.clone`](#clonestore-callbackerr-cloned), returning a new `CookieJar` instance.
|
||||
|
||||
The `store` argument is optional, but must be a _synchronous_ `Store` instance if specified. If not passed, a new instance of `MemoryCookieStore` is used.
|
||||
|
||||
The _source_ and _destination_ must both be synchronous `Store`s. If one or both stores are asynchronous, use `.clone` instead. Recall that `MemoryCookieStore` supports both synchronous and asynchronous API calls.
|
||||
|
||||
### `.removeAllCookies(cb(err))`
|
||||
#### `.removeAllCookies([callback(err)])`
|
||||
|
||||
Removes all cookies from the jar.
|
||||
Removes all cookies from the jar. Returns a promise if a callback is not provided.
|
||||
|
||||
This is a new backwards-compatible feature of `tough-cookie` version 2.5, so not all Stores will implement it efficiently. For Stores that do not implement `removeAllCookies`, the fallback is to call `removeCookie` after `getAllCookies`. If `getAllCookies` fails or isn't implemented in the Store, that error is returned. If one or more of the `removeCookie` calls fail, only the first error is returned.
|
||||
|
||||
### `.removeAllCookiesSync()`
|
||||
#### `.removeAllCookiesSync()`
|
||||
|
||||
Sync version of `.removeAllCookies()`
|
||||
Sync version of [`.removeAllCookies()`](#removeallcookiescallbackerr); only works with synchronous stores (for example, the default `MemoryCookieStore`).
|
||||
|
||||
## Store
|
||||
### Store
|
||||
|
||||
Base class for CookieJar stores. Available as `tough.Store`.
|
||||
|
||||
## Store API
|
||||
### Store API
|
||||
|
||||
The storage model for each `CookieJar` instance can be replaced with a custom implementation. The default is `MemoryCookieStore` which can be found in the `lib/memstore.js` file. The API uses continuation-passing-style to allow for asynchronous stores.
|
||||
The storage model for each `CookieJar` instance can be replaced with a custom implementation. The default is `MemoryCookieStore` which can be found in [`lib/memstore.js`](https://github.com/salesforce/tough-cookie/blob/master/lib/memstore.js). The API uses continuation-passing-style to allow for asynchronous stores.
|
||||
|
||||
Stores should inherit from the base `Store` class, which is available as `require('tough-cookie').Store`.
|
||||
Stores should inherit from the base `Store` class, which is available as a top-level export.
|
||||
|
||||
Stores are asynchronous by default, but if `store.synchronous` is set to `true`, then the `*Sync` methods on the of the containing `CookieJar` can be used (however, the continuation-passing style
|
||||
Stores are asynchronous by default, but if `store.synchronous` is set to `true`, then the `*Sync` methods of the containing `CookieJar` can be used.
|
||||
|
||||
All `domain` parameters will have been normalized before calling.
|
||||
All `domain` parameters are normalized before calling.
|
||||
|
||||
The Cookie store must have all of the following methods.
|
||||
The Cookie store must have all of the following methods. Note that asynchronous implementations **must** support callback parameters.
|
||||
|
||||
### `store.findCookie(domain, path, key, cb(err,cookie))`
|
||||
#### `store.findCookie(domain, path, key, callback(err, cookie))`
|
||||
|
||||
Retrieve a cookie with the given domain, path and key (a.k.a. name). The RFC maintains that exactly one of these cookies should exist in a store. If the store is using versioning, this means that the latest/newest such cookie should be returned.
|
||||
Retrieve a cookie with the given domain, path, and key (name). The RFC maintains that exactly one of these cookies should exist in a store. If the store is using versioning, this means that the latest or newest such cookie should be returned.
|
||||
|
||||
Callback takes an error and the resulting `Cookie` object. If no cookie is found then `null` MUST be passed instead (i.e. not an error).
|
||||
Callback takes an error and the resulting `Cookie` object. If no cookie is found then `null` MUST be passed instead (that is, not an error).
|
||||
|
||||
### `store.findCookies(domain, path, cb(err,cookies))`
|
||||
#### `store.findCookies(domain, path, allowSpecialUseDomain, callback(err, cookies))`
|
||||
|
||||
Locates cookies matching the given domain and path. This is most often called in the context of `cookiejar.getCookies()` above.
|
||||
Locates cookies matching the given domain and path. This is most often called in the context of [`cookiejar.getCookies()`](#getcookiescurrenturl-options-callbackerr-cookies).
|
||||
|
||||
If no cookies are found, the callback MUST be passed an empty array.
|
||||
|
||||
The resulting list will be checked for applicability to the current request according to the RFC (domain-match, path-match, http-only-flag, secure-flag, expiry, etc.), so it's OK to use an optimistic search algorithm when implementing this method. However, the search algorithm used SHOULD try to find cookies that `domainMatch()` the domain and `pathMatch()` the path in order to limit the amount of checking that needs to be done.
|
||||
The resulting list is checked for applicability to the current request according to the RFC (domain-match, path-match, http-only-flag, secure-flag, expiry, and so on), so it's OK to use an optimistic search algorithm when implementing this method. However, the search algorithm used SHOULD try to find cookies that `domainMatch()` the domain and `pathMatch()` the path in order to limit the amount of checking that needs to be done.
|
||||
|
||||
As of version 0.9.12, the `allPaths` option to `cookiejar.getCookies()` above will cause the path here to be `null`. If the path is `null`, path-matching MUST NOT be performed (i.e. domain-matching only).
|
||||
As of version 0.9.12, the `allPaths` option to `cookiejar.getCookies()` above causes the path here to be `null`. If the path is `null`, path-matching MUST NOT be performed (that is, domain-matching only).
|
||||
|
||||
### `store.putCookie(cookie, cb(err))`
|
||||
#### `store.putCookie(cookie, callback(err))`
|
||||
|
||||
Adds a new cookie to the store. The implementation SHOULD replace any existing cookie with the same `.domain`, `.path`, and `.key` properties -- depending on the nature of the implementation, it's possible that between the call to `fetchCookie` and `putCookie` that a duplicate `putCookie` can occur.
|
||||
Adds a new cookie to the store. The implementation SHOULD replace any existing cookie with the same `.domain`, `.path`, and `.key` properties. Depending on the nature of the implementation, it's possible that between the call to `fetchCookie` and `putCookie` that a duplicate `putCookie` can occur.
|
||||
|
||||
The `cookie` object MUST NOT be modified; the caller will have already updated the `.creation` and `.lastAccessed` properties.
|
||||
The `cookie` object MUST NOT be modified; as the caller has already updated the `.creation` and `.lastAccessed` properties.
|
||||
|
||||
Pass an error if the cookie cannot be stored.
|
||||
|
||||
### `store.updateCookie(oldCookie, newCookie, cb(err))`
|
||||
#### `store.updateCookie(oldCookie, newCookie, callback(err))`
|
||||
|
||||
Update an existing cookie. The implementation MUST update the `.value` for a cookie with the same `domain`, `.path` and `.key`. The implementation SHOULD check that the old value in the store is equivalent to `oldCookie` - how the conflict is resolved is up to the store.
|
||||
Update an existing cookie. The implementation MUST update the `.value` for a cookie with the same `domain`, `.path`, and `.key`. The implementation SHOULD check that the old value in the store is equivalent to `oldCookie` - how the conflict is resolved is up to the store.
|
||||
|
||||
The `.lastAccessed` property will always be different between the two objects (to the precision possible via JavaScript's clock). Both `.creation` and `.creationIndex` are guaranteed to be the same. Stores MAY ignore or defer the `.lastAccessed` change at the cost of affecting how cookies are selected for automatic deletion (e.g., least-recently-used, which is up to the store to implement).
|
||||
The `.lastAccessed` property is always different between the two objects (to the precision possible via JavaScript's clock). Both `.creation` and `.creationIndex` are guaranteed to be the same. Stores MAY ignore or defer the `.lastAccessed` change at the cost of affecting how cookies are selected for automatic deletion (for example, least-recently-used, which is up to the store to implement).
|
||||
|
||||
Stores may wish to optimize changing the `.value` of the cookie in the store versus storing a new cookie. If the implementation doesn't define this method a stub that calls `putCookie(newCookie,cb)` will be added to the store object.
|
||||
Stores may wish to optimize changing the `.value` of the cookie in the store versus storing a new cookie. If the implementation doesn't define this method, a stub that calls [`putCookie`](#storeputcookiecookie-callbackerr) is added to the store object.
|
||||
|
||||
The `newCookie` and `oldCookie` objects MUST NOT be modified.
|
||||
|
||||
Pass an error if the newCookie cannot be stored.
|
||||
|
||||
### `store.removeCookie(domain, path, key, cb(err))`
|
||||
#### `store.removeCookie(domain, path, key, callback(err))`
|
||||
|
||||
Remove a cookie from the store (see notes on `findCookie` about the uniqueness constraint).
|
||||
Remove a cookie from the store (see notes on [`findCookie`](#storefindcookiedomain-path-key-callbackerr-cookie) about the uniqueness constraint).
|
||||
|
||||
The implementation MUST NOT pass an error if the cookie doesn't exist; only pass an error due to the failure to remove an existing cookie.
|
||||
The implementation MUST NOT pass an error if the cookie doesn't exist, and only pass an error due to the failure to remove an existing cookie.
|
||||
|
||||
### `store.removeCookies(domain, path, cb(err))`
|
||||
#### `store.removeCookies(domain, path, callback(err))`
|
||||
|
||||
Removes matching cookies from the store. The `path` parameter is optional, and if missing means all paths in a domain should be removed.
|
||||
Removes matching cookies from the store. The `path` parameter is optional and if missing, means all paths in a domain should be removed.
|
||||
|
||||
Pass an error ONLY if removing any existing cookies failed.
|
||||
|
||||
### `store.removeAllCookies(cb(err))`
|
||||
#### `store.removeAllCookies(callback(err))`
|
||||
|
||||
_Optional_. Removes all cookies from the store.
|
||||
|
||||
Pass an error if one or more cookies can't be removed.
|
||||
|
||||
**Note**: New method as of `tough-cookie` version 2.5, so not all Stores will implement this, plus some stores may choose not to implement this.
|
||||
#### `store.getAllCookies(callback(err, cookies))`
|
||||
|
||||
### `store.getAllCookies(cb(err, cookies))`
|
||||
_Optional_. Produces an `Array` of all cookies during [`jar.serialize()`](#serializecallbackerr-serializedobject). The items in the array can be true `Cookie` objects or generic `Object`s with the [Serialization Format](#serialization-format) data structure.
|
||||
|
||||
_Optional_. Produces an `Array` of all cookies during `jar.serialize()`. The items in the array can be true `Cookie` objects or generic `Object`s with the [Serialization Format] data structure.
|
||||
|
||||
Cookies SHOULD be returned in creation order to preserve sorting via `compareCookies()`. For reference, `MemoryCookieStore` will sort by `.creationIndex` since it uses true `Cookie` objects internally. If you don't return the cookies in creation order, they'll still be sorted by creation time, but this only has a precision of 1ms. See `compareCookies` for more detail.
|
||||
Cookies SHOULD be returned in creation order to preserve sorting via [`compareCookie()`](#cookiecomparea-b). For reference, `MemoryCookieStore` sorts by `.creationIndex` since it uses true `Cookie` objects internally. If you don't return the cookies in creation order, they'll still be sorted by creation time, but this only has a precision of 1-ms. See `cookieCompare` for more detail.
|
||||
|
||||
Pass an error if retrieval fails.
|
||||
|
||||
**Note**: not all Stores can implement this due to technical limitations, so it is optional.
|
||||
**Note**: Not all Stores can implement this due to technical limitations, so it is optional.
|
||||
|
||||
## MemoryCookieStore
|
||||
### MemoryCookieStore
|
||||
|
||||
Inherits from `Store`.
|
||||
|
||||
A just-in-memory CookieJar synchronous store implementation, used by default. Despite being a synchronous implementation, it's usable with both the synchronous and asynchronous forms of the `CookieJar` API. Supports serialization, `getAllCookies`, and `removeAllCookies`.
|
||||
|
||||
## Community Cookie Stores
|
||||
### Community Cookie Stores
|
||||
|
||||
These are some Store implementations authored and maintained by the community. They aren't official and we don't vouch for them but you may be interested to have a look:
|
||||
|
||||
|
|
@ -462,10 +480,9 @@ These are some Store implementations authored and maintained by the community. T
|
|||
- [`tough-cookie-filestore`](https://github.com/mitsuru/tough-cookie-filestore): JSON on disk
|
||||
- [`tough-cookie-web-storage-store`](https://github.com/exponentjs/tough-cookie-web-storage-store): DOM localStorage and sessionStorage
|
||||
|
||||
## Serialization Format
|
||||
|
||||
# Serialization Format
|
||||
|
||||
**NOTE**: if you want to have custom `Cookie` properties serialized, add the property name to `Cookie.serializableProperties`.
|
||||
**NOTE**: If you want to have custom `Cookie` properties serialized, add the property name to `Cookie.serializableProperties`.
|
||||
|
||||
```js
|
||||
{
|
||||
|
|
@ -491,7 +508,59 @@ These are some Store implementations authored and maintained by the community. T
|
|||
}
|
||||
```
|
||||
|
||||
# Copyright and License
|
||||
## RFC 6265bis
|
||||
|
||||
Support for RFC 6265bis revision 02 is being developed. Since this is a bit of an omnibus revision to the RFC 6252, support is broken up into the functional areas.
|
||||
|
||||
### Leave Secure Cookies Alone
|
||||
|
||||
Not yet supported.
|
||||
|
||||
This change makes it so that if a cookie is sent from the server to the client with a `Secure` attribute, the channel must also be secure or the cookie is ignored.
|
||||
|
||||
### SameSite Cookies
|
||||
|
||||
Supported.
|
||||
|
||||
This change makes it possible for servers, and supporting clients, to mitigate certain types of CSRF attacks by disallowing `SameSite` cookies from being sent cross-origin.
|
||||
|
||||
On the Cookie object itself, you can get or set the `.sameSite` attribute, which is serialized into the `SameSite=` cookie attribute. When unset or `undefined`, no `SameSite=` attribute is serialized. The valid values of this attribute are `'none'`, `'lax'`, or `'strict'`. Other values are serialized as-is.
|
||||
|
||||
When parsing cookies with a `SameSite` cookie attribute, values other than `'lax'` or `'strict'` are parsed as `'none'`. For example, `SomeCookie=SomeValue; SameSite=garbage` parses so that `cookie.sameSite === 'none'`.
|
||||
|
||||
In order to support SameSite cookies, you must provide a `sameSiteContext` option to _both_ `setCookie` and `getCookies`. Valid values for this option are just like for the Cookie object, but have particular meanings:
|
||||
|
||||
1. `'strict'` mode - If the request is on the same "site for cookies" (see the RFC draft for more information), pass this option to add a layer of defense against CSRF.
|
||||
2. `'lax'` mode - If the request is from another site, _but_ is directly because of navigation by the user, such as, `<link type=prefetch>` or `<a href="...">`, pass `sameSiteContext: 'lax'`.
|
||||
3. `'none'` - Otherwise, pass `sameSiteContext: 'none'` (this indicates a cross-origin request).
|
||||
4. unset/`undefined` - SameSite **is not** be enforced! This can be a valid use-case for when CSRF isn't in the threat model of the system being built.
|
||||
|
||||
It is highly recommended that you read RFC 6265bis for fine details on SameSite cookies. In particular [Section 8.8](https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-02##section-8.8) discusses security considerations and defense in depth.
|
||||
|
||||
### Cookie Prefixes
|
||||
|
||||
Supported.
|
||||
|
||||
Cookie prefixes are a way to indicate that a given cookie was set with a set of attributes simply by inspecting the first few characters of the cookie's name.
|
||||
|
||||
Cookie prefixes are defined in [Section 4.1.3 of 6265bis](https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03##section-4.1.3).
|
||||
|
||||
Two prefixes are defined:
|
||||
|
||||
1. `"__Secure-" Prefix`: If a cookie's name begins with a case-sensitive match for the string "\_\_Secure-", then the cookie was set with a "Secure" attribute.
|
||||
2. `"__Host-" Prefix`: If a cookie's name begins with a case-sensitive match for the string "\_\_Host-", then the cookie was set with a "Secure" attribute, a "Path" attribute with a value of "/", and no "Domain" attribute.
|
||||
|
||||
If `prefixSecurity` is enabled for `CookieJar`, then cookies that match the prefixes defined above but do not obey the attribute restrictions are not added.
|
||||
|
||||
You can define this functionality by passing in the `prefixSecurity` option to `CookieJar`. It can be one of 3 values:
|
||||
|
||||
1. `silent`: Enable cookie prefix checking but silently fail to add the cookie if conditions are not met. Default.
|
||||
2. `strict`: Enable cookie prefix checking and error out if conditions are not met.
|
||||
3. `unsafe-disabled`: Disable cookie prefix checking.
|
||||
|
||||
Note that if `ignoreError` is passed in as `true` then the error is silent regardless of the `prefixSecurity` option (assuming it's enabled).
|
||||
|
||||
## Copyright and License
|
||||
|
||||
BSD-3-Clause:
|
||||
|
||||
|
|
|
|||
2098
node_modules/tough-cookie/lib/cookie.js
generated
vendored
2098
node_modules/tough-cookie/lib/cookie.js
generated
vendored
File diff suppressed because it is too large
Load diff
331
node_modules/tough-cookie/lib/memstore.js
generated
vendored
331
node_modules/tough-cookie/lib/memstore.js
generated
vendored
|
|
@ -28,154 +28,215 @@
|
|||
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
'use strict';
|
||||
var Store = require('./store').Store;
|
||||
var permuteDomain = require('./permuteDomain').permuteDomain;
|
||||
var pathMatch = require('./pathMatch').pathMatch;
|
||||
var util = require('util');
|
||||
"use strict";
|
||||
const { fromCallback } = require("universalify");
|
||||
const Store = require("./store").Store;
|
||||
const permuteDomain = require("./permuteDomain").permuteDomain;
|
||||
const pathMatch = require("./pathMatch").pathMatch;
|
||||
const { getCustomInspectSymbol, getUtilInspect } = require("./utilHelper");
|
||||
|
||||
function MemoryCookieStore() {
|
||||
Store.call(this);
|
||||
this.idx = {};
|
||||
}
|
||||
util.inherits(MemoryCookieStore, Store);
|
||||
exports.MemoryCookieStore = MemoryCookieStore;
|
||||
MemoryCookieStore.prototype.idx = null;
|
||||
|
||||
// Since it's just a struct in RAM, this Store is synchronous
|
||||
MemoryCookieStore.prototype.synchronous = true;
|
||||
|
||||
// force a default depth:
|
||||
MemoryCookieStore.prototype.inspect = function() {
|
||||
return "{ idx: "+util.inspect(this.idx, false, 2)+' }';
|
||||
};
|
||||
|
||||
// Use the new custom inspection symbol to add the custom inspect function if
|
||||
// available.
|
||||
if (util.inspect.custom) {
|
||||
MemoryCookieStore.prototype[util.inspect.custom] = MemoryCookieStore.prototype.inspect;
|
||||
}
|
||||
|
||||
MemoryCookieStore.prototype.findCookie = function(domain, path, key, cb) {
|
||||
if (!this.idx[domain]) {
|
||||
return cb(null,undefined);
|
||||
}
|
||||
if (!this.idx[domain][path]) {
|
||||
return cb(null,undefined);
|
||||
}
|
||||
return cb(null,this.idx[domain][path][key]||null);
|
||||
};
|
||||
|
||||
MemoryCookieStore.prototype.findCookies = function(domain, path, cb) {
|
||||
var results = [];
|
||||
if (!domain) {
|
||||
return cb(null,[]);
|
||||
}
|
||||
|
||||
var pathMatcher;
|
||||
if (!path) {
|
||||
// null means "all paths"
|
||||
pathMatcher = function matchAll(domainIndex) {
|
||||
for (var curPath in domainIndex) {
|
||||
var pathIndex = domainIndex[curPath];
|
||||
for (var key in pathIndex) {
|
||||
results.push(pathIndex[key]);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
} else {
|
||||
pathMatcher = function matchRFC(domainIndex) {
|
||||
//NOTE: we should use path-match algorithm from S5.1.4 here
|
||||
//(see : https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/canonical_cookie.cc#L299)
|
||||
Object.keys(domainIndex).forEach(function (cookiePath) {
|
||||
if (pathMatch(path, cookiePath)) {
|
||||
var pathIndex = domainIndex[cookiePath];
|
||||
|
||||
for (var key in pathIndex) {
|
||||
results.push(pathIndex[key]);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
var domains = permuteDomain(domain) || [domain];
|
||||
var idx = this.idx;
|
||||
domains.forEach(function(curDomain) {
|
||||
var domainIndex = idx[curDomain];
|
||||
if (!domainIndex) {
|
||||
return;
|
||||
class MemoryCookieStore extends Store {
|
||||
constructor() {
|
||||
super();
|
||||
this.synchronous = true;
|
||||
this.idx = Object.create(null);
|
||||
const customInspectSymbol = getCustomInspectSymbol();
|
||||
if (customInspectSymbol) {
|
||||
this[customInspectSymbol] = this.inspect;
|
||||
}
|
||||
pathMatcher(domainIndex);
|
||||
});
|
||||
|
||||
cb(null,results);
|
||||
};
|
||||
|
||||
MemoryCookieStore.prototype.putCookie = function(cookie, cb) {
|
||||
if (!this.idx[cookie.domain]) {
|
||||
this.idx[cookie.domain] = {};
|
||||
}
|
||||
if (!this.idx[cookie.domain][cookie.path]) {
|
||||
this.idx[cookie.domain][cookie.path] = {};
|
||||
|
||||
inspect() {
|
||||
const util = { inspect: getUtilInspect(inspectFallback) };
|
||||
return `{ idx: ${util.inspect(this.idx, false, 2)} }`;
|
||||
}
|
||||
this.idx[cookie.domain][cookie.path][cookie.key] = cookie;
|
||||
cb(null);
|
||||
};
|
||||
|
||||
MemoryCookieStore.prototype.updateCookie = function(oldCookie, newCookie, cb) {
|
||||
// updateCookie() may avoid updating cookies that are identical. For example,
|
||||
// lastAccessed may not be important to some stores and an equality
|
||||
// comparison could exclude that field.
|
||||
this.putCookie(newCookie,cb);
|
||||
};
|
||||
|
||||
MemoryCookieStore.prototype.removeCookie = function(domain, path, key, cb) {
|
||||
if (this.idx[domain] && this.idx[domain][path] && this.idx[domain][path][key]) {
|
||||
delete this.idx[domain][path][key];
|
||||
findCookie(domain, path, key, cb) {
|
||||
if (!this.idx[domain]) {
|
||||
return cb(null, undefined);
|
||||
}
|
||||
if (!this.idx[domain][path]) {
|
||||
return cb(null, undefined);
|
||||
}
|
||||
return cb(null, this.idx[domain][path][key] || null);
|
||||
}
|
||||
cb(null);
|
||||
};
|
||||
findCookies(domain, path, allowSpecialUseDomain, cb) {
|
||||
const results = [];
|
||||
if (typeof allowSpecialUseDomain === "function") {
|
||||
cb = allowSpecialUseDomain;
|
||||
allowSpecialUseDomain = true;
|
||||
}
|
||||
if (!domain) {
|
||||
return cb(null, []);
|
||||
}
|
||||
|
||||
MemoryCookieStore.prototype.removeCookies = function(domain, path, cb) {
|
||||
if (this.idx[domain]) {
|
||||
if (path) {
|
||||
delete this.idx[domain][path];
|
||||
let pathMatcher;
|
||||
if (!path) {
|
||||
// null means "all paths"
|
||||
pathMatcher = function matchAll(domainIndex) {
|
||||
for (const curPath in domainIndex) {
|
||||
const pathIndex = domainIndex[curPath];
|
||||
for (const key in pathIndex) {
|
||||
results.push(pathIndex[key]);
|
||||
}
|
||||
}
|
||||
};
|
||||
} else {
|
||||
delete this.idx[domain];
|
||||
pathMatcher = function matchRFC(domainIndex) {
|
||||
//NOTE: we should use path-match algorithm from S5.1.4 here
|
||||
//(see : https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/canonical_cookie.cc#L299)
|
||||
Object.keys(domainIndex).forEach(cookiePath => {
|
||||
if (pathMatch(path, cookiePath)) {
|
||||
const pathIndex = domainIndex[cookiePath];
|
||||
for (const key in pathIndex) {
|
||||
results.push(pathIndex[key]);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
const domains = permuteDomain(domain, allowSpecialUseDomain) || [domain];
|
||||
const idx = this.idx;
|
||||
domains.forEach(curDomain => {
|
||||
const domainIndex = idx[curDomain];
|
||||
if (!domainIndex) {
|
||||
return;
|
||||
}
|
||||
pathMatcher(domainIndex);
|
||||
});
|
||||
|
||||
cb(null, results);
|
||||
}
|
||||
return cb(null);
|
||||
};
|
||||
|
||||
MemoryCookieStore.prototype.removeAllCookies = function(cb) {
|
||||
this.idx = {};
|
||||
return cb(null);
|
||||
}
|
||||
putCookie(cookie, cb) {
|
||||
if (!this.idx[cookie.domain]) {
|
||||
this.idx[cookie.domain] = Object.create(null);
|
||||
}
|
||||
if (!this.idx[cookie.domain][cookie.path]) {
|
||||
this.idx[cookie.domain][cookie.path] = Object.create(null);
|
||||
}
|
||||
this.idx[cookie.domain][cookie.path][cookie.key] = cookie;
|
||||
cb(null);
|
||||
}
|
||||
updateCookie(oldCookie, newCookie, cb) {
|
||||
// updateCookie() may avoid updating cookies that are identical. For example,
|
||||
// lastAccessed may not be important to some stores and an equality
|
||||
// comparison could exclude that field.
|
||||
this.putCookie(newCookie, cb);
|
||||
}
|
||||
removeCookie(domain, path, key, cb) {
|
||||
if (
|
||||
this.idx[domain] &&
|
||||
this.idx[domain][path] &&
|
||||
this.idx[domain][path][key]
|
||||
) {
|
||||
delete this.idx[domain][path][key];
|
||||
}
|
||||
cb(null);
|
||||
}
|
||||
removeCookies(domain, path, cb) {
|
||||
if (this.idx[domain]) {
|
||||
if (path) {
|
||||
delete this.idx[domain][path];
|
||||
} else {
|
||||
delete this.idx[domain];
|
||||
}
|
||||
}
|
||||
return cb(null);
|
||||
}
|
||||
removeAllCookies(cb) {
|
||||
this.idx = Object.create(null);
|
||||
return cb(null);
|
||||
}
|
||||
getAllCookies(cb) {
|
||||
const cookies = [];
|
||||
const idx = this.idx;
|
||||
|
||||
MemoryCookieStore.prototype.getAllCookies = function(cb) {
|
||||
var cookies = [];
|
||||
var idx = this.idx;
|
||||
|
||||
var domains = Object.keys(idx);
|
||||
domains.forEach(function(domain) {
|
||||
var paths = Object.keys(idx[domain]);
|
||||
paths.forEach(function(path) {
|
||||
var keys = Object.keys(idx[domain][path]);
|
||||
keys.forEach(function(key) {
|
||||
if (key !== null) {
|
||||
cookies.push(idx[domain][path][key]);
|
||||
}
|
||||
const domains = Object.keys(idx);
|
||||
domains.forEach(domain => {
|
||||
const paths = Object.keys(idx[domain]);
|
||||
paths.forEach(path => {
|
||||
const keys = Object.keys(idx[domain][path]);
|
||||
keys.forEach(key => {
|
||||
if (key !== null) {
|
||||
cookies.push(idx[domain][path][key]);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Sort by creationIndex so deserializing retains the creation order.
|
||||
// When implementing your own store, this SHOULD retain the order too
|
||||
cookies.sort(function(a,b) {
|
||||
return (a.creationIndex||0) - (b.creationIndex||0);
|
||||
});
|
||||
// Sort by creationIndex so deserializing retains the creation order.
|
||||
// When implementing your own store, this SHOULD retain the order too
|
||||
cookies.sort((a, b) => {
|
||||
return (a.creationIndex || 0) - (b.creationIndex || 0);
|
||||
});
|
||||
|
||||
cb(null, cookies);
|
||||
};
|
||||
cb(null, cookies);
|
||||
}
|
||||
}
|
||||
|
||||
[
|
||||
"findCookie",
|
||||
"findCookies",
|
||||
"putCookie",
|
||||
"updateCookie",
|
||||
"removeCookie",
|
||||
"removeCookies",
|
||||
"removeAllCookies",
|
||||
"getAllCookies"
|
||||
].forEach(name => {
|
||||
MemoryCookieStore.prototype[name] = fromCallback(
|
||||
MemoryCookieStore.prototype[name]
|
||||
);
|
||||
});
|
||||
|
||||
exports.MemoryCookieStore = MemoryCookieStore;
|
||||
|
||||
function inspectFallback(val) {
|
||||
const domains = Object.keys(val);
|
||||
if (domains.length === 0) {
|
||||
return "[Object: null prototype] {}";
|
||||
}
|
||||
let result = "[Object: null prototype] {\n";
|
||||
Object.keys(val).forEach((domain, i) => {
|
||||
result += formatDomain(domain, val[domain]);
|
||||
if (i < domains.length - 1) {
|
||||
result += ",";
|
||||
}
|
||||
result += "\n";
|
||||
});
|
||||
result += "}";
|
||||
return result;
|
||||
}
|
||||
|
||||
function formatDomain(domainName, domainValue) {
|
||||
const indent = " ";
|
||||
let result = `${indent}'${domainName}': [Object: null prototype] {\n`;
|
||||
Object.keys(domainValue).forEach((path, i, paths) => {
|
||||
result += formatPath(path, domainValue[path]);
|
||||
if (i < paths.length - 1) {
|
||||
result += ",";
|
||||
}
|
||||
result += "\n";
|
||||
});
|
||||
result += `${indent}}`;
|
||||
return result;
|
||||
}
|
||||
|
||||
function formatPath(pathName, pathValue) {
|
||||
const indent = " ";
|
||||
let result = `${indent}'${pathName}': [Object: null prototype] {\n`;
|
||||
Object.keys(pathValue).forEach((cookieName, i, cookieNames) => {
|
||||
const cookie = pathValue[cookieName];
|
||||
result += ` ${cookieName}: ${cookie.inspect()}`;
|
||||
if (i < cookieNames.length - 1) {
|
||||
result += ",";
|
||||
}
|
||||
result += "\n";
|
||||
});
|
||||
result += `${indent}}`;
|
||||
return result;
|
||||
}
|
||||
|
||||
exports.inspectFallback = inspectFallback;
|
||||
|
|
|
|||
4
node_modules/tough-cookie/lib/pathMatch.js
generated
vendored
4
node_modules/tough-cookie/lib/pathMatch.js
generated
vendored
|
|
@ -33,13 +33,13 @@
|
|||
* "A request-path path-matches a given cookie-path if at least one of the
|
||||
* following conditions holds:"
|
||||
*/
|
||||
function pathMatch (reqPath, cookiePath) {
|
||||
function pathMatch(reqPath, cookiePath) {
|
||||
// "o The cookie-path and the request-path are identical."
|
||||
if (cookiePath === reqPath) {
|
||||
return true;
|
||||
}
|
||||
|
||||
var idx = reqPath.indexOf(cookiePath);
|
||||
const idx = reqPath.indexOf(cookiePath);
|
||||
if (idx === 0) {
|
||||
// "o The cookie-path is a prefix of the request-path, and the last
|
||||
// character of the cookie-path is %x2F ("/")."
|
||||
|
|
|
|||
25
node_modules/tough-cookie/lib/permuteDomain.js
generated
vendored
25
node_modules/tough-cookie/lib/permuteDomain.js
generated
vendored
|
|
@ -29,12 +29,16 @@
|
|||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
"use strict";
|
||||
var pubsuffix = require('./pubsuffix-psl');
|
||||
const pubsuffix = require("./pubsuffix-psl");
|
||||
|
||||
// Gives the permutation of all possible domainMatch()es of a given domain. The
|
||||
// array is in shortest-to-longest order. Handy for indexing.
|
||||
function permuteDomain (domain) {
|
||||
var pubSuf = pubsuffix.getPublicSuffix(domain);
|
||||
|
||||
function permuteDomain(domain, allowSpecialUseDomain) {
|
||||
const pubSuf = pubsuffix.getPublicSuffix(domain, {
|
||||
allowSpecialUseDomain: allowSpecialUseDomain
|
||||
});
|
||||
|
||||
if (!pubSuf) {
|
||||
return null;
|
||||
}
|
||||
|
|
@ -42,12 +46,17 @@ function permuteDomain (domain) {
|
|||
return [domain];
|
||||
}
|
||||
|
||||
var prefix = domain.slice(0, -(pubSuf.length + 1)); // ".example.com"
|
||||
var parts = prefix.split('.').reverse();
|
||||
var cur = pubSuf;
|
||||
var permutations = [cur];
|
||||
// Nuke trailing dot
|
||||
if (domain.slice(-1) == ".") {
|
||||
domain = domain.slice(0, -1);
|
||||
}
|
||||
|
||||
const prefix = domain.slice(0, -(pubSuf.length + 1)); // ".example.com"
|
||||
const parts = prefix.split(".").reverse();
|
||||
let cur = pubSuf;
|
||||
const permutations = [cur];
|
||||
while (parts.length) {
|
||||
cur = parts.shift() + '.' + cur;
|
||||
cur = `${parts.shift()}.${cur}`;
|
||||
permutations.push(cur);
|
||||
}
|
||||
return permutations;
|
||||
|
|
|
|||
41
node_modules/tough-cookie/lib/pubsuffix-psl.js
generated
vendored
41
node_modules/tough-cookie/lib/pubsuffix-psl.js
generated
vendored
|
|
@ -28,10 +28,45 @@
|
|||
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
'use strict';
|
||||
var psl = require('psl');
|
||||
"use strict";
|
||||
const psl = require("psl");
|
||||
|
||||
// RFC 6761
|
||||
const SPECIAL_USE_DOMAINS = [
|
||||
"local",
|
||||
"example",
|
||||
"invalid",
|
||||
"localhost",
|
||||
"test"
|
||||
];
|
||||
|
||||
const SPECIAL_TREATMENT_DOMAINS = ["localhost", "invalid"];
|
||||
|
||||
function getPublicSuffix(domain, options = {}) {
|
||||
const domainParts = domain.split(".");
|
||||
const topLevelDomain = domainParts[domainParts.length - 1];
|
||||
const allowSpecialUseDomain = !!options.allowSpecialUseDomain;
|
||||
const ignoreError = !!options.ignoreError;
|
||||
|
||||
if (allowSpecialUseDomain && SPECIAL_USE_DOMAINS.includes(topLevelDomain)) {
|
||||
if (domainParts.length > 1) {
|
||||
const secondLevelDomain = domainParts[domainParts.length - 2];
|
||||
// In aforementioned example, the eTLD/pubSuf will be apple.localhost
|
||||
return `${secondLevelDomain}.${topLevelDomain}`;
|
||||
} else if (SPECIAL_TREATMENT_DOMAINS.includes(topLevelDomain)) {
|
||||
// For a single word special use domain, e.g. 'localhost' or 'invalid', per RFC 6761,
|
||||
// "Application software MAY recognize {localhost/invalid} names as special, or
|
||||
// MAY pass them to name resolution APIs as they would for other domain names."
|
||||
return `${topLevelDomain}`;
|
||||
}
|
||||
}
|
||||
|
||||
if (!ignoreError && SPECIAL_USE_DOMAINS.includes(topLevelDomain)) {
|
||||
throw new Error(
|
||||
`Cookie has domain set to the public suffix "${topLevelDomain}" which is a special use domain. To allow this, configure your CookieJar with {allowSpecialUseDomain:true, rejectPublicSuffixes: false}.`
|
||||
);
|
||||
}
|
||||
|
||||
function getPublicSuffix(domain) {
|
||||
return psl.get(domain);
|
||||
}
|
||||
|
||||
|
|
|
|||
83
node_modules/tough-cookie/lib/store.js
generated
vendored
83
node_modules/tough-cookie/lib/store.js
generated
vendored
|
|
@ -28,48 +28,49 @@
|
|||
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
'use strict';
|
||||
"use strict";
|
||||
/*jshint unused:false */
|
||||
|
||||
function Store() {
|
||||
class Store {
|
||||
constructor() {
|
||||
this.synchronous = false;
|
||||
}
|
||||
|
||||
findCookie(domain, path, key, cb) {
|
||||
throw new Error("findCookie is not implemented");
|
||||
}
|
||||
|
||||
findCookies(domain, path, allowSpecialUseDomain, cb) {
|
||||
throw new Error("findCookies is not implemented");
|
||||
}
|
||||
|
||||
putCookie(cookie, cb) {
|
||||
throw new Error("putCookie is not implemented");
|
||||
}
|
||||
|
||||
updateCookie(oldCookie, newCookie, cb) {
|
||||
// recommended default implementation:
|
||||
// return this.putCookie(newCookie, cb);
|
||||
throw new Error("updateCookie is not implemented");
|
||||
}
|
||||
|
||||
removeCookie(domain, path, key, cb) {
|
||||
throw new Error("removeCookie is not implemented");
|
||||
}
|
||||
|
||||
removeCookies(domain, path, cb) {
|
||||
throw new Error("removeCookies is not implemented");
|
||||
}
|
||||
|
||||
removeAllCookies(cb) {
|
||||
throw new Error("removeAllCookies is not implemented");
|
||||
}
|
||||
|
||||
getAllCookies(cb) {
|
||||
throw new Error(
|
||||
"getAllCookies is not implemented (therefore jar cannot be serialized)"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
exports.Store = Store;
|
||||
|
||||
// Stores may be synchronous, but are still required to use a
|
||||
// Continuation-Passing Style API. The CookieJar itself will expose a "*Sync"
|
||||
// API that converts from synchronous-callbacks to imperative style.
|
||||
Store.prototype.synchronous = false;
|
||||
|
||||
Store.prototype.findCookie = function(domain, path, key, cb) {
|
||||
throw new Error('findCookie is not implemented');
|
||||
};
|
||||
|
||||
Store.prototype.findCookies = function(domain, path, cb) {
|
||||
throw new Error('findCookies is not implemented');
|
||||
};
|
||||
|
||||
Store.prototype.putCookie = function(cookie, cb) {
|
||||
throw new Error('putCookie is not implemented');
|
||||
};
|
||||
|
||||
Store.prototype.updateCookie = function(oldCookie, newCookie, cb) {
|
||||
// recommended default implementation:
|
||||
// return this.putCookie(newCookie, cb);
|
||||
throw new Error('updateCookie is not implemented');
|
||||
};
|
||||
|
||||
Store.prototype.removeCookie = function(domain, path, key, cb) {
|
||||
throw new Error('removeCookie is not implemented');
|
||||
};
|
||||
|
||||
Store.prototype.removeCookies = function(domain, path, cb) {
|
||||
throw new Error('removeCookies is not implemented');
|
||||
};
|
||||
|
||||
Store.prototype.removeAllCookies = function(cb) {
|
||||
throw new Error('removeAllCookies is not implemented');
|
||||
}
|
||||
|
||||
Store.prototype.getAllCookies = function(cb) {
|
||||
throw new Error('getAllCookies is not implemented (therefore jar cannot be serialized)');
|
||||
};
|
||||
|
|
|
|||
39
node_modules/tough-cookie/lib/utilHelper.js
generated
vendored
Normal file
39
node_modules/tough-cookie/lib/utilHelper.js
generated
vendored
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
function requireUtil() {
|
||||
try {
|
||||
// eslint-disable-next-line no-restricted-modules
|
||||
return require("util");
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// for v10.12.0+
|
||||
function lookupCustomInspectSymbol() {
|
||||
return Symbol.for("nodejs.util.inspect.custom");
|
||||
}
|
||||
|
||||
// for older node environments
|
||||
function tryReadingCustomSymbolFromUtilInspect(options) {
|
||||
const _requireUtil = options.requireUtil || requireUtil;
|
||||
const util = _requireUtil();
|
||||
return util ? util.inspect.custom : null;
|
||||
}
|
||||
|
||||
exports.getUtilInspect = function getUtilInspect(fallback, options = {}) {
|
||||
const _requireUtil = options.requireUtil || requireUtil;
|
||||
const util = _requireUtil();
|
||||
return function inspect(value, showHidden, depth) {
|
||||
return util ? util.inspect(value, showHidden, depth) : fallback(value);
|
||||
};
|
||||
};
|
||||
|
||||
exports.getCustomInspectSymbol = function getCustomInspectSymbol(options = {}) {
|
||||
const _lookupCustomInspectSymbol =
|
||||
options.lookupCustomInspectSymbol || lookupCustomInspectSymbol;
|
||||
|
||||
// get custom inspect symbol for node environments
|
||||
return (
|
||||
_lookupCustomInspectSymbol() ||
|
||||
tryReadingCustomSymbolFromUtilInspect(options)
|
||||
);
|
||||
};
|
||||
95
node_modules/tough-cookie/lib/validators.js
generated
vendored
Normal file
95
node_modules/tough-cookie/lib/validators.js
generated
vendored
Normal file
|
|
@ -0,0 +1,95 @@
|
|||
/* ************************************************************************************
|
||||
Extracted from check-types.js
|
||||
https://gitlab.com/philbooth/check-types.js
|
||||
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019 Phil Booth
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
************************************************************************************ */
|
||||
"use strict";
|
||||
|
||||
/* Validation functions copied from check-types package - https://www.npmjs.com/package/check-types */
|
||||
function isFunction(data) {
|
||||
return typeof data === "function";
|
||||
}
|
||||
|
||||
function isNonEmptyString(data) {
|
||||
return isString(data) && data !== "";
|
||||
}
|
||||
|
||||
function isDate(data) {
|
||||
return isInstanceStrict(data, Date) && isInteger(data.getTime());
|
||||
}
|
||||
|
||||
function isEmptyString(data) {
|
||||
return data === "" || (data instanceof String && data.toString() === "");
|
||||
}
|
||||
|
||||
function isString(data) {
|
||||
return typeof data === "string" || data instanceof String;
|
||||
}
|
||||
|
||||
function isObject(data) {
|
||||
return toString.call(data) === "[object Object]";
|
||||
}
|
||||
function isInstanceStrict(data, prototype) {
|
||||
try {
|
||||
return data instanceof prototype;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function isInteger(data) {
|
||||
return typeof data === "number" && data % 1 === 0;
|
||||
}
|
||||
/* End validation functions */
|
||||
|
||||
function validate(bool, cb, options) {
|
||||
if (!isFunction(cb)) {
|
||||
options = cb;
|
||||
cb = null;
|
||||
}
|
||||
if (!isObject(options)) options = { Error: "Failed Check" };
|
||||
if (!bool) {
|
||||
if (cb) {
|
||||
cb(new ParameterError(options));
|
||||
} else {
|
||||
throw new ParameterError(options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class ParameterError extends Error {
|
||||
constructor(...params) {
|
||||
super(...params);
|
||||
}
|
||||
}
|
||||
|
||||
exports.ParameterError = ParameterError;
|
||||
exports.isFunction = isFunction;
|
||||
exports.isNonEmptyString = isNonEmptyString;
|
||||
exports.isDate = isDate;
|
||||
exports.isEmptyString = isEmptyString;
|
||||
exports.isString = isString;
|
||||
exports.isObject = isObject;
|
||||
exports.validate = validate;
|
||||
2
node_modules/tough-cookie/lib/version.js
generated
vendored
2
node_modules/tough-cookie/lib/version.js
generated
vendored
|
|
@ -1,2 +1,2 @@
|
|||
// generated by genversion
|
||||
module.exports = '3.0.1'
|
||||
module.exports = '4.1.3'
|
||||
|
|
|
|||
65
node_modules/tough-cookie/package.json
generated
vendored
65
node_modules/tough-cookie/package.json
generated
vendored
|
|
@ -6,16 +6,44 @@
|
|||
},
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Alexander Savin",
|
||||
"website": "https://github.com/apsavin"
|
||||
"name": "Ivan Nikulin",
|
||||
"website": "https://github.com/inikulin"
|
||||
},
|
||||
{
|
||||
"name": "Shivan Kaul Sahib",
|
||||
"website": "https://github.com/ShivanKaul"
|
||||
},
|
||||
{
|
||||
"name": "Clint Ruoho",
|
||||
"website": "https://github.com/ruoho"
|
||||
},
|
||||
{
|
||||
"name": "Ian Livingstone",
|
||||
"website": "https://github.com/ianlivingstone"
|
||||
},
|
||||
{
|
||||
"name": "Ivan Nikulin",
|
||||
"website": "https://github.com/inikulin"
|
||||
"name": "Andrew Waterman",
|
||||
"website": "https://github.com/awaterma"
|
||||
},
|
||||
{
|
||||
"name": "Michael de Libero ",
|
||||
"website": "https://github.com/medelibero-sfdc"
|
||||
},
|
||||
{
|
||||
"name": "Jonathan Stewmon",
|
||||
"website": "https://github.com/jstewmon"
|
||||
},
|
||||
{
|
||||
"name": "Miguel Roncancio",
|
||||
"website": "https://github.com/miggs125"
|
||||
},
|
||||
{
|
||||
"name": "Sebastian Mayr",
|
||||
"website": "https://github.com/Sebmaster"
|
||||
},
|
||||
{
|
||||
"name": "Alexander Savin",
|
||||
"website": "https://github.com/apsavin"
|
||||
},
|
||||
{
|
||||
"name": "Lalit Kapoor",
|
||||
|
|
@ -24,10 +52,6 @@
|
|||
{
|
||||
"name": "Sam Thompson",
|
||||
"website": "https://github.com/sambthompson"
|
||||
},
|
||||
{
|
||||
"name": "Sebastian Mayr",
|
||||
"website": "https://github.com/Sebmaster"
|
||||
}
|
||||
],
|
||||
"license": "BSD-3-Clause",
|
||||
|
|
@ -43,7 +67,7 @@
|
|||
"RFC6265",
|
||||
"RFC2965"
|
||||
],
|
||||
"version": "3.0.1",
|
||||
"version": "4.1.3",
|
||||
"homepage": "https://github.com/salesforce/tough-cookie",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
|
@ -58,22 +82,29 @@
|
|||
],
|
||||
"scripts": {
|
||||
"version": "genversion lib/version.js && git add lib/version.js",
|
||||
"test": "vows test/*_test.js",
|
||||
"cover": "nyc --reporter=lcov --reporter=html vows test/*_test.js"
|
||||
"test": "vows test/*_test.js && npm run eslint",
|
||||
"cover": "nyc --reporter=lcov --reporter=html vows test/*_test.js",
|
||||
"eslint": "eslint --env node --ext .js .",
|
||||
"prettier": "prettier '**/*.{json,ts,yaml,md}'",
|
||||
"format": "npm run eslint -- --fix"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"async": "^1.4.2",
|
||||
"async": "^2.6.2",
|
||||
"eslint": "^5.16.0",
|
||||
"eslint-config-prettier": "^4.2.0",
|
||||
"eslint-plugin-prettier": "^3.0.1",
|
||||
"genversion": "^2.1.0",
|
||||
"nyc": "^11.6.0",
|
||||
"string.prototype.repeat": "^0.2.0",
|
||||
"nyc": "^14.0.0",
|
||||
"prettier": "^1.17.0",
|
||||
"vows": "^0.8.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"ip-regex": "^2.1.0",
|
||||
"psl": "^1.1.28",
|
||||
"punycode": "^2.1.1"
|
||||
"psl": "^1.1.33",
|
||||
"punycode": "^2.1.1",
|
||||
"universalify": "^0.2.0",
|
||||
"url-parse": "^1.5.3"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
2
node_modules/universalify/README.md
generated
vendored
2
node_modules/universalify/README.md
generated
vendored
|
|
@ -21,7 +21,7 @@ npm install universalify
|
|||
|
||||
Takes a callback-based function to universalify, and returns the universalified function.
|
||||
|
||||
Function must take a callback as the last parameter that will be called with the signature `(error, result)`. `universalify` does not support calling the callback with more than three arguments, and does not ensure that the callback is only called once.
|
||||
Function must take a callback as the last parameter that will be called with the signature `(error, result)`. `universalify` does not support calling the callback with three or more arguments, and does not ensure that the callback is only called once.
|
||||
|
||||
```js
|
||||
function callbackFn (n, cb) {
|
||||
|
|
|
|||
6
node_modules/universalify/index.js
generated
vendored
6
node_modules/universalify/index.js
generated
vendored
|
|
@ -20,6 +20,10 @@ exports.fromPromise = function (fn) {
|
|||
return Object.defineProperty(function () {
|
||||
const cb = arguments[arguments.length - 1]
|
||||
if (typeof cb !== 'function') return fn.apply(this, arguments)
|
||||
else fn.apply(this, arguments).then(r => cb(null, r), cb)
|
||||
else {
|
||||
delete arguments[arguments.length - 1]
|
||||
arguments.length--
|
||||
fn.apply(this, arguments).then(r => cb(null, r), cb)
|
||||
}
|
||||
}, 'name', { value: fn.name })
|
||||
}
|
||||
|
|
|
|||
2
node_modules/universalify/package.json
generated
vendored
2
node_modules/universalify/package.json
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "universalify",
|
||||
"version": "0.1.2",
|
||||
"version": "0.2.0",
|
||||
"description": "Make a callback- or promise-based function support both promises and callbacks.",
|
||||
"keywords": [
|
||||
"callback",
|
||||
|
|
|
|||
22
node_modules/url-parse/LICENSE
generated
vendored
Normal file
22
node_modules/url-parse/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 Unshift.io, Arnout Kazemier, the Contributors.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
153
node_modules/url-parse/README.md
generated
vendored
Normal file
153
node_modules/url-parse/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,153 @@
|
|||
# url-parse
|
||||
|
||||
[](https://www.npmjs.com/package/url-parse)[](https://github.com/unshiftio/url-parse/actions?query=workflow%3ACI+branch%3Amaster)[](https://coveralls.io/r/unshiftio/url-parse?branch=master)
|
||||
|
||||
[](https://saucelabs.com/u/url-parse)
|
||||
|
||||
**`url-parse` was created in 2014 when the WHATWG URL API was not available in
|
||||
Node.js and the `URL` interface was supported only in some browsers. Today this
|
||||
is no longer true. The `URL` interface is available in all supported Node.js
|
||||
release lines and basically all browsers. Consider using it for better security
|
||||
and accuracy.**
|
||||
|
||||
The `url-parse` method exposes two different API interfaces. The
|
||||
[`url`](https://nodejs.org/api/url.html) interface that you know from Node.js
|
||||
and the new [`URL`](https://developer.mozilla.org/en-US/docs/Web/API/URL/URL)
|
||||
interface that is available in the latest browsers.
|
||||
|
||||
In version `0.1` we moved from a DOM based parsing solution, using the `<a>`
|
||||
element, to a full Regular Expression solution. The main reason for this was
|
||||
to make the URL parser available in different JavaScript environments as you
|
||||
don't always have access to the DOM. An example of such environment is the
|
||||
[`Worker`](https://developer.mozilla.org/en/docs/Web/API/Worker) interface.
|
||||
The RegExp based solution didn't work well as it required a lot of lookups
|
||||
causing major problems in FireFox. In version `1.0.0` we ditched the RegExp
|
||||
based solution in favor of a pure string parsing solution which chops up the
|
||||
URL into smaller pieces. This module still has a really small footprint as it
|
||||
has been designed to be used on the client side.
|
||||
|
||||
In addition to URL parsing we also expose the bundled `querystringify` module.
|
||||
|
||||
## Installation
|
||||
|
||||
This module is designed to be used using either browserify or Node.js it's
|
||||
released in the public npm registry and can be installed using:
|
||||
|
||||
```
|
||||
npm install url-parse
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
All examples assume that this library is bootstrapped using:
|
||||
|
||||
```js
|
||||
'use strict';
|
||||
|
||||
var Url = require('url-parse');
|
||||
```
|
||||
|
||||
To parse an URL simply call the `URL` method with the URL that needs to be
|
||||
transformed into an object.
|
||||
|
||||
```js
|
||||
var url = new Url('https://github.com/foo/bar');
|
||||
```
|
||||
|
||||
The `new` keyword is optional but it will save you an extra function invocation.
|
||||
The constructor takes the following arguments:
|
||||
|
||||
- `url` (`String`): A string representing an absolute or relative URL.
|
||||
- `baseURL` (`Object` | `String`): An object or string representing
|
||||
the base URL to use in case `url` is a relative URL. This argument is
|
||||
optional and defaults to [`location`](https://developer.mozilla.org/en-US/docs/Web/API/Location)
|
||||
in the browser.
|
||||
- `parser` (`Boolean` | `Function`): This argument is optional and specifies
|
||||
how to parse the query string. By default it is `false` so the query string
|
||||
is not parsed. If you pass `true` the query string is parsed using the
|
||||
embedded `querystringify` module. If you pass a function the query string
|
||||
will be parsed using this function.
|
||||
|
||||
As said above we also support the Node.js interface so you can also use the
|
||||
library in this way:
|
||||
|
||||
```js
|
||||
'use strict';
|
||||
|
||||
var parse = require('url-parse')
|
||||
, url = parse('https://github.com/foo/bar', true);
|
||||
```
|
||||
|
||||
The returned `url` instance contains the following properties:
|
||||
|
||||
- `protocol`: The protocol scheme of the URL (e.g. `http:`).
|
||||
- `slashes`: A boolean which indicates whether the `protocol` is followed by two
|
||||
forward slashes (`//`).
|
||||
- `auth`: Authentication information portion (e.g. `username:password`).
|
||||
- `username`: Username of basic authentication.
|
||||
- `password`: Password of basic authentication.
|
||||
- `host`: Host name with port number. The hostname might be invalid.
|
||||
- `hostname`: Host name without port number. This might be an invalid hostname.
|
||||
- `port`: Optional port number.
|
||||
- `pathname`: URL path.
|
||||
- `query`: Parsed object containing query string, unless parsing is set to false.
|
||||
- `hash`: The "fragment" portion of the URL including the pound-sign (`#`).
|
||||
- `href`: The full URL.
|
||||
- `origin`: The origin of the URL.
|
||||
|
||||
Note that when `url-parse` is used in a browser environment, it will default to
|
||||
using the browser's current window location as the base URL when parsing all
|
||||
inputs. To parse an input independently of the browser's current URL (e.g. for
|
||||
functionality parity with the library in a Node environment), pass an empty
|
||||
location object as the second parameter:
|
||||
|
||||
```js
|
||||
var parse = require('url-parse');
|
||||
parse('hostname', {});
|
||||
```
|
||||
|
||||
### Url.set(key, value)
|
||||
|
||||
A simple helper function to change parts of the URL and propagating it through
|
||||
all properties. When you set a new `host` you want the same value to be applied
|
||||
to `port` if has a different port number, `hostname` so it has a correct name
|
||||
again and `href` so you have a complete URL.
|
||||
|
||||
```js
|
||||
var parsed = parse('http://google.com/parse-things');
|
||||
|
||||
parsed.set('hostname', 'yahoo.com');
|
||||
console.log(parsed.href); // http://yahoo.com/parse-things
|
||||
```
|
||||
|
||||
It's aware of default ports so you cannot set a port 80 on an URL which has
|
||||
`http` as protocol.
|
||||
|
||||
### Url.toString()
|
||||
|
||||
The returned `url` object comes with a custom `toString` method which will
|
||||
generate a full URL again when called. The method accepts an extra function
|
||||
which will stringify the query string for you. If you don't supply a function we
|
||||
will use our default method.
|
||||
|
||||
```js
|
||||
var location = url.toString(); // http://example.com/whatever/?qs=32
|
||||
```
|
||||
|
||||
You would rarely need to use this method as the full URL is also available as
|
||||
`href` property. If you are using the `URL.set` method to make changes, this
|
||||
will automatically update.
|
||||
|
||||
## Testing
|
||||
|
||||
The testing of this module is done in 3 different ways:
|
||||
|
||||
1. We have unit tests that run under Node.js. You can run these tests with the
|
||||
`npm test` command.
|
||||
2. Code coverage can be run manually using `npm run coverage`.
|
||||
3. For browser testing we use Sauce Labs and `zuul`. You can run browser tests
|
||||
using the `npm run test-browser` command.
|
||||
|
||||
## License
|
||||
|
||||
[MIT](LICENSE)
|
||||
755
node_modules/url-parse/dist/url-parse.js
generated
vendored
Normal file
755
node_modules/url-parse/dist/url-parse.js
generated
vendored
Normal file
|
|
@ -0,0 +1,755 @@
|
|||
(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.URLParse = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({1:[function(require,module,exports){
|
||||
(function (global){(function (){
|
||||
'use strict';
|
||||
|
||||
var required = require('requires-port')
|
||||
, qs = require('querystringify')
|
||||
, controlOrWhitespace = /^[\x00-\x20\u00a0\u1680\u2000-\u200a\u2028\u2029\u202f\u205f\u3000\ufeff]+/
|
||||
, CRHTLF = /[\n\r\t]/g
|
||||
, slashes = /^[A-Za-z][A-Za-z0-9+-.]*:\/\//
|
||||
, port = /:\d+$/
|
||||
, protocolre = /^([a-z][a-z0-9.+-]*:)?(\/\/)?([\\/]+)?([\S\s]*)/i
|
||||
, windowsDriveLetter = /^[a-zA-Z]:/;
|
||||
|
||||
/**
|
||||
* Remove control characters and whitespace from the beginning of a string.
|
||||
*
|
||||
* @param {Object|String} str String to trim.
|
||||
* @returns {String} A new string representing `str` stripped of control
|
||||
* characters and whitespace from its beginning.
|
||||
* @public
|
||||
*/
|
||||
function trimLeft(str) {
|
||||
return (str ? str : '').toString().replace(controlOrWhitespace, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* These are the parse rules for the URL parser, it informs the parser
|
||||
* about:
|
||||
*
|
||||
* 0. The char it Needs to parse, if it's a string it should be done using
|
||||
* indexOf, RegExp using exec and NaN means set as current value.
|
||||
* 1. The property we should set when parsing this value.
|
||||
* 2. Indication if it's backwards or forward parsing, when set as number it's
|
||||
* the value of extra chars that should be split off.
|
||||
* 3. Inherit from location if non existing in the parser.
|
||||
* 4. `toLowerCase` the resulting value.
|
||||
*/
|
||||
var rules = [
|
||||
['#', 'hash'], // Extract from the back.
|
||||
['?', 'query'], // Extract from the back.
|
||||
function sanitize(address, url) { // Sanitize what is left of the address
|
||||
return isSpecial(url.protocol) ? address.replace(/\\/g, '/') : address;
|
||||
},
|
||||
['/', 'pathname'], // Extract from the back.
|
||||
['@', 'auth', 1], // Extract from the front.
|
||||
[NaN, 'host', undefined, 1, 1], // Set left over value.
|
||||
[/:(\d*)$/, 'port', undefined, 1], // RegExp the back.
|
||||
[NaN, 'hostname', undefined, 1, 1] // Set left over.
|
||||
];
|
||||
|
||||
/**
|
||||
* These properties should not be copied or inherited from. This is only needed
|
||||
* for all non blob URL's as a blob URL does not include a hash, only the
|
||||
* origin.
|
||||
*
|
||||
* @type {Object}
|
||||
* @private
|
||||
*/
|
||||
var ignore = { hash: 1, query: 1 };
|
||||
|
||||
/**
|
||||
* The location object differs when your code is loaded through a normal page,
|
||||
* Worker or through a worker using a blob. And with the blobble begins the
|
||||
* trouble as the location object will contain the URL of the blob, not the
|
||||
* location of the page where our code is loaded in. The actual origin is
|
||||
* encoded in the `pathname` so we can thankfully generate a good "default"
|
||||
* location from it so we can generate proper relative URL's again.
|
||||
*
|
||||
* @param {Object|String} loc Optional default location object.
|
||||
* @returns {Object} lolcation object.
|
||||
* @public
|
||||
*/
|
||||
function lolcation(loc) {
|
||||
var globalVar;
|
||||
|
||||
if (typeof window !== 'undefined') globalVar = window;
|
||||
else if (typeof global !== 'undefined') globalVar = global;
|
||||
else if (typeof self !== 'undefined') globalVar = self;
|
||||
else globalVar = {};
|
||||
|
||||
var location = globalVar.location || {};
|
||||
loc = loc || location;
|
||||
|
||||
var finaldestination = {}
|
||||
, type = typeof loc
|
||||
, key;
|
||||
|
||||
if ('blob:' === loc.protocol) {
|
||||
finaldestination = new Url(unescape(loc.pathname), {});
|
||||
} else if ('string' === type) {
|
||||
finaldestination = new Url(loc, {});
|
||||
for (key in ignore) delete finaldestination[key];
|
||||
} else if ('object' === type) {
|
||||
for (key in loc) {
|
||||
if (key in ignore) continue;
|
||||
finaldestination[key] = loc[key];
|
||||
}
|
||||
|
||||
if (finaldestination.slashes === undefined) {
|
||||
finaldestination.slashes = slashes.test(loc.href);
|
||||
}
|
||||
}
|
||||
|
||||
return finaldestination;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether a protocol scheme is special.
|
||||
*
|
||||
* @param {String} The protocol scheme of the URL
|
||||
* @return {Boolean} `true` if the protocol scheme is special, else `false`
|
||||
* @private
|
||||
*/
|
||||
function isSpecial(scheme) {
|
||||
return (
|
||||
scheme === 'file:' ||
|
||||
scheme === 'ftp:' ||
|
||||
scheme === 'http:' ||
|
||||
scheme === 'https:' ||
|
||||
scheme === 'ws:' ||
|
||||
scheme === 'wss:'
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef ProtocolExtract
|
||||
* @type Object
|
||||
* @property {String} protocol Protocol matched in the URL, in lowercase.
|
||||
* @property {Boolean} slashes `true` if protocol is followed by "//", else `false`.
|
||||
* @property {String} rest Rest of the URL that is not part of the protocol.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Extract protocol information from a URL with/without double slash ("//").
|
||||
*
|
||||
* @param {String} address URL we want to extract from.
|
||||
* @param {Object} location
|
||||
* @return {ProtocolExtract} Extracted information.
|
||||
* @private
|
||||
*/
|
||||
function extractProtocol(address, location) {
|
||||
address = trimLeft(address);
|
||||
address = address.replace(CRHTLF, '');
|
||||
location = location || {};
|
||||
|
||||
var match = protocolre.exec(address);
|
||||
var protocol = match[1] ? match[1].toLowerCase() : '';
|
||||
var forwardSlashes = !!match[2];
|
||||
var otherSlashes = !!match[3];
|
||||
var slashesCount = 0;
|
||||
var rest;
|
||||
|
||||
if (forwardSlashes) {
|
||||
if (otherSlashes) {
|
||||
rest = match[2] + match[3] + match[4];
|
||||
slashesCount = match[2].length + match[3].length;
|
||||
} else {
|
||||
rest = match[2] + match[4];
|
||||
slashesCount = match[2].length;
|
||||
}
|
||||
} else {
|
||||
if (otherSlashes) {
|
||||
rest = match[3] + match[4];
|
||||
slashesCount = match[3].length;
|
||||
} else {
|
||||
rest = match[4]
|
||||
}
|
||||
}
|
||||
|
||||
if (protocol === 'file:') {
|
||||
if (slashesCount >= 2) {
|
||||
rest = rest.slice(2);
|
||||
}
|
||||
} else if (isSpecial(protocol)) {
|
||||
rest = match[4];
|
||||
} else if (protocol) {
|
||||
if (forwardSlashes) {
|
||||
rest = rest.slice(2);
|
||||
}
|
||||
} else if (slashesCount >= 2 && isSpecial(location.protocol)) {
|
||||
rest = match[4];
|
||||
}
|
||||
|
||||
return {
|
||||
protocol: protocol,
|
||||
slashes: forwardSlashes || isSpecial(protocol),
|
||||
slashesCount: slashesCount,
|
||||
rest: rest
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a relative URL pathname against a base URL pathname.
|
||||
*
|
||||
* @param {String} relative Pathname of the relative URL.
|
||||
* @param {String} base Pathname of the base URL.
|
||||
* @return {String} Resolved pathname.
|
||||
* @private
|
||||
*/
|
||||
function resolve(relative, base) {
|
||||
if (relative === '') return base;
|
||||
|
||||
var path = (base || '/').split('/').slice(0, -1).concat(relative.split('/'))
|
||||
, i = path.length
|
||||
, last = path[i - 1]
|
||||
, unshift = false
|
||||
, up = 0;
|
||||
|
||||
while (i--) {
|
||||
if (path[i] === '.') {
|
||||
path.splice(i, 1);
|
||||
} else if (path[i] === '..') {
|
||||
path.splice(i, 1);
|
||||
up++;
|
||||
} else if (up) {
|
||||
if (i === 0) unshift = true;
|
||||
path.splice(i, 1);
|
||||
up--;
|
||||
}
|
||||
}
|
||||
|
||||
if (unshift) path.unshift('');
|
||||
if (last === '.' || last === '..') path.push('');
|
||||
|
||||
return path.join('/');
|
||||
}
|
||||
|
||||
/**
|
||||
* The actual URL instance. Instead of returning an object we've opted-in to
|
||||
* create an actual constructor as it's much more memory efficient and
|
||||
* faster and it pleases my OCD.
|
||||
*
|
||||
* It is worth noting that we should not use `URL` as class name to prevent
|
||||
* clashes with the global URL instance that got introduced in browsers.
|
||||
*
|
||||
* @constructor
|
||||
* @param {String} address URL we want to parse.
|
||||
* @param {Object|String} [location] Location defaults for relative paths.
|
||||
* @param {Boolean|Function} [parser] Parser for the query string.
|
||||
* @private
|
||||
*/
|
||||
function Url(address, location, parser) {
|
||||
address = trimLeft(address);
|
||||
address = address.replace(CRHTLF, '');
|
||||
|
||||
if (!(this instanceof Url)) {
|
||||
return new Url(address, location, parser);
|
||||
}
|
||||
|
||||
var relative, extracted, parse, instruction, index, key
|
||||
, instructions = rules.slice()
|
||||
, type = typeof location
|
||||
, url = this
|
||||
, i = 0;
|
||||
|
||||
//
|
||||
// The following if statements allows this module two have compatibility with
|
||||
// 2 different API:
|
||||
//
|
||||
// 1. Node.js's `url.parse` api which accepts a URL, boolean as arguments
|
||||
// where the boolean indicates that the query string should also be parsed.
|
||||
//
|
||||
// 2. The `URL` interface of the browser which accepts a URL, object as
|
||||
// arguments. The supplied object will be used as default values / fall-back
|
||||
// for relative paths.
|
||||
//
|
||||
if ('object' !== type && 'string' !== type) {
|
||||
parser = location;
|
||||
location = null;
|
||||
}
|
||||
|
||||
if (parser && 'function' !== typeof parser) parser = qs.parse;
|
||||
|
||||
location = lolcation(location);
|
||||
|
||||
//
|
||||
// Extract protocol information before running the instructions.
|
||||
//
|
||||
extracted = extractProtocol(address || '', location);
|
||||
relative = !extracted.protocol && !extracted.slashes;
|
||||
url.slashes = extracted.slashes || relative && location.slashes;
|
||||
url.protocol = extracted.protocol || location.protocol || '';
|
||||
address = extracted.rest;
|
||||
|
||||
//
|
||||
// When the authority component is absent the URL starts with a path
|
||||
// component.
|
||||
//
|
||||
if (
|
||||
extracted.protocol === 'file:' && (
|
||||
extracted.slashesCount !== 2 || windowsDriveLetter.test(address)) ||
|
||||
(!extracted.slashes &&
|
||||
(extracted.protocol ||
|
||||
extracted.slashesCount < 2 ||
|
||||
!isSpecial(url.protocol)))
|
||||
) {
|
||||
instructions[3] = [/(.*)/, 'pathname'];
|
||||
}
|
||||
|
||||
for (; i < instructions.length; i++) {
|
||||
instruction = instructions[i];
|
||||
|
||||
if (typeof instruction === 'function') {
|
||||
address = instruction(address, url);
|
||||
continue;
|
||||
}
|
||||
|
||||
parse = instruction[0];
|
||||
key = instruction[1];
|
||||
|
||||
if (parse !== parse) {
|
||||
url[key] = address;
|
||||
} else if ('string' === typeof parse) {
|
||||
index = parse === '@'
|
||||
? address.lastIndexOf(parse)
|
||||
: address.indexOf(parse);
|
||||
|
||||
if (~index) {
|
||||
if ('number' === typeof instruction[2]) {
|
||||
url[key] = address.slice(0, index);
|
||||
address = address.slice(index + instruction[2]);
|
||||
} else {
|
||||
url[key] = address.slice(index);
|
||||
address = address.slice(0, index);
|
||||
}
|
||||
}
|
||||
} else if ((index = parse.exec(address))) {
|
||||
url[key] = index[1];
|
||||
address = address.slice(0, index.index);
|
||||
}
|
||||
|
||||
url[key] = url[key] || (
|
||||
relative && instruction[3] ? location[key] || '' : ''
|
||||
);
|
||||
|
||||
//
|
||||
// Hostname, host and protocol should be lowercased so they can be used to
|
||||
// create a proper `origin`.
|
||||
//
|
||||
if (instruction[4]) url[key] = url[key].toLowerCase();
|
||||
}
|
||||
|
||||
//
|
||||
// Also parse the supplied query string in to an object. If we're supplied
|
||||
// with a custom parser as function use that instead of the default build-in
|
||||
// parser.
|
||||
//
|
||||
if (parser) url.query = parser(url.query);
|
||||
|
||||
//
|
||||
// If the URL is relative, resolve the pathname against the base URL.
|
||||
//
|
||||
if (
|
||||
relative
|
||||
&& location.slashes
|
||||
&& url.pathname.charAt(0) !== '/'
|
||||
&& (url.pathname !== '' || location.pathname !== '')
|
||||
) {
|
||||
url.pathname = resolve(url.pathname, location.pathname);
|
||||
}
|
||||
|
||||
//
|
||||
// Default to a / for pathname if none exists. This normalizes the URL
|
||||
// to always have a /
|
||||
//
|
||||
if (url.pathname.charAt(0) !== '/' && isSpecial(url.protocol)) {
|
||||
url.pathname = '/' + url.pathname;
|
||||
}
|
||||
|
||||
//
|
||||
// We should not add port numbers if they are already the default port number
|
||||
// for a given protocol. As the host also contains the port number we're going
|
||||
// override it with the hostname which contains no port number.
|
||||
//
|
||||
if (!required(url.port, url.protocol)) {
|
||||
url.host = url.hostname;
|
||||
url.port = '';
|
||||
}
|
||||
|
||||
//
|
||||
// Parse down the `auth` for the username and password.
|
||||
//
|
||||
url.username = url.password = '';
|
||||
|
||||
if (url.auth) {
|
||||
index = url.auth.indexOf(':');
|
||||
|
||||
if (~index) {
|
||||
url.username = url.auth.slice(0, index);
|
||||
url.username = encodeURIComponent(decodeURIComponent(url.username));
|
||||
|
||||
url.password = url.auth.slice(index + 1);
|
||||
url.password = encodeURIComponent(decodeURIComponent(url.password))
|
||||
} else {
|
||||
url.username = encodeURIComponent(decodeURIComponent(url.auth));
|
||||
}
|
||||
|
||||
url.auth = url.password ? url.username +':'+ url.password : url.username;
|
||||
}
|
||||
|
||||
url.origin = url.protocol !== 'file:' && isSpecial(url.protocol) && url.host
|
||||
? url.protocol +'//'+ url.host
|
||||
: 'null';
|
||||
|
||||
//
|
||||
// The href is just the compiled result.
|
||||
//
|
||||
url.href = url.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* This is convenience method for changing properties in the URL instance to
|
||||
* insure that they all propagate correctly.
|
||||
*
|
||||
* @param {String} part Property we need to adjust.
|
||||
* @param {Mixed} value The newly assigned value.
|
||||
* @param {Boolean|Function} fn When setting the query, it will be the function
|
||||
* used to parse the query.
|
||||
* When setting the protocol, double slash will be
|
||||
* removed from the final url if it is true.
|
||||
* @returns {URL} URL instance for chaining.
|
||||
* @public
|
||||
*/
|
||||
function set(part, value, fn) {
|
||||
var url = this;
|
||||
|
||||
switch (part) {
|
||||
case 'query':
|
||||
if ('string' === typeof value && value.length) {
|
||||
value = (fn || qs.parse)(value);
|
||||
}
|
||||
|
||||
url[part] = value;
|
||||
break;
|
||||
|
||||
case 'port':
|
||||
url[part] = value;
|
||||
|
||||
if (!required(value, url.protocol)) {
|
||||
url.host = url.hostname;
|
||||
url[part] = '';
|
||||
} else if (value) {
|
||||
url.host = url.hostname +':'+ value;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case 'hostname':
|
||||
url[part] = value;
|
||||
|
||||
if (url.port) value += ':'+ url.port;
|
||||
url.host = value;
|
||||
break;
|
||||
|
||||
case 'host':
|
||||
url[part] = value;
|
||||
|
||||
if (port.test(value)) {
|
||||
value = value.split(':');
|
||||
url.port = value.pop();
|
||||
url.hostname = value.join(':');
|
||||
} else {
|
||||
url.hostname = value;
|
||||
url.port = '';
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case 'protocol':
|
||||
url.protocol = value.toLowerCase();
|
||||
url.slashes = !fn;
|
||||
break;
|
||||
|
||||
case 'pathname':
|
||||
case 'hash':
|
||||
if (value) {
|
||||
var char = part === 'pathname' ? '/' : '#';
|
||||
url[part] = value.charAt(0) !== char ? char + value : value;
|
||||
} else {
|
||||
url[part] = value;
|
||||
}
|
||||
break;
|
||||
|
||||
case 'username':
|
||||
case 'password':
|
||||
url[part] = encodeURIComponent(value);
|
||||
break;
|
||||
|
||||
case 'auth':
|
||||
var index = value.indexOf(':');
|
||||
|
||||
if (~index) {
|
||||
url.username = value.slice(0, index);
|
||||
url.username = encodeURIComponent(decodeURIComponent(url.username));
|
||||
|
||||
url.password = value.slice(index + 1);
|
||||
url.password = encodeURIComponent(decodeURIComponent(url.password));
|
||||
} else {
|
||||
url.username = encodeURIComponent(decodeURIComponent(value));
|
||||
}
|
||||
}
|
||||
|
||||
for (var i = 0; i < rules.length; i++) {
|
||||
var ins = rules[i];
|
||||
|
||||
if (ins[4]) url[ins[1]] = url[ins[1]].toLowerCase();
|
||||
}
|
||||
|
||||
url.auth = url.password ? url.username +':'+ url.password : url.username;
|
||||
|
||||
url.origin = url.protocol !== 'file:' && isSpecial(url.protocol) && url.host
|
||||
? url.protocol +'//'+ url.host
|
||||
: 'null';
|
||||
|
||||
url.href = url.toString();
|
||||
|
||||
return url;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform the properties back in to a valid and full URL string.
|
||||
*
|
||||
* @param {Function} stringify Optional query stringify function.
|
||||
* @returns {String} Compiled version of the URL.
|
||||
* @public
|
||||
*/
|
||||
function toString(stringify) {
|
||||
if (!stringify || 'function' !== typeof stringify) stringify = qs.stringify;
|
||||
|
||||
var query
|
||||
, url = this
|
||||
, host = url.host
|
||||
, protocol = url.protocol;
|
||||
|
||||
if (protocol && protocol.charAt(protocol.length - 1) !== ':') protocol += ':';
|
||||
|
||||
var result =
|
||||
protocol +
|
||||
((url.protocol && url.slashes) || isSpecial(url.protocol) ? '//' : '');
|
||||
|
||||
if (url.username) {
|
||||
result += url.username;
|
||||
if (url.password) result += ':'+ url.password;
|
||||
result += '@';
|
||||
} else if (url.password) {
|
||||
result += ':'+ url.password;
|
||||
result += '@';
|
||||
} else if (
|
||||
url.protocol !== 'file:' &&
|
||||
isSpecial(url.protocol) &&
|
||||
!host &&
|
||||
url.pathname !== '/'
|
||||
) {
|
||||
//
|
||||
// Add back the empty userinfo, otherwise the original invalid URL
|
||||
// might be transformed into a valid one with `url.pathname` as host.
|
||||
//
|
||||
result += '@';
|
||||
}
|
||||
|
||||
//
|
||||
// Trailing colon is removed from `url.host` when it is parsed. If it still
|
||||
// ends with a colon, then add back the trailing colon that was removed. This
|
||||
// prevents an invalid URL from being transformed into a valid one.
|
||||
//
|
||||
if (host[host.length - 1] === ':' || (port.test(url.hostname) && !url.port)) {
|
||||
host += ':';
|
||||
}
|
||||
|
||||
result += host + url.pathname;
|
||||
|
||||
query = 'object' === typeof url.query ? stringify(url.query) : url.query;
|
||||
if (query) result += '?' !== query.charAt(0) ? '?'+ query : query;
|
||||
|
||||
if (url.hash) result += url.hash;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
Url.prototype = { set: set, toString: toString };
|
||||
|
||||
//
|
||||
// Expose the URL parser and some additional properties that might be useful for
|
||||
// others or testing.
|
||||
//
|
||||
Url.extractProtocol = extractProtocol;
|
||||
Url.location = lolcation;
|
||||
Url.trimLeft = trimLeft;
|
||||
Url.qs = qs;
|
||||
|
||||
module.exports = Url;
|
||||
|
||||
}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
|
||||
},{"querystringify":2,"requires-port":3}],2:[function(require,module,exports){
|
||||
'use strict';
|
||||
|
||||
var has = Object.prototype.hasOwnProperty
|
||||
, undef;
|
||||
|
||||
/**
|
||||
* Decode a URI encoded string.
|
||||
*
|
||||
* @param {String} input The URI encoded string.
|
||||
* @returns {String|Null} The decoded string.
|
||||
* @api private
|
||||
*/
|
||||
function decode(input) {
|
||||
try {
|
||||
return decodeURIComponent(input.replace(/\+/g, ' '));
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to encode a given input.
|
||||
*
|
||||
* @param {String} input The string that needs to be encoded.
|
||||
* @returns {String|Null} The encoded string.
|
||||
* @api private
|
||||
*/
|
||||
function encode(input) {
|
||||
try {
|
||||
return encodeURIComponent(input);
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple query string parser.
|
||||
*
|
||||
* @param {String} query The query string that needs to be parsed.
|
||||
* @returns {Object}
|
||||
* @api public
|
||||
*/
|
||||
function querystring(query) {
|
||||
var parser = /([^=?#&]+)=?([^&]*)/g
|
||||
, result = {}
|
||||
, part;
|
||||
|
||||
while (part = parser.exec(query)) {
|
||||
var key = decode(part[1])
|
||||
, value = decode(part[2]);
|
||||
|
||||
//
|
||||
// Prevent overriding of existing properties. This ensures that build-in
|
||||
// methods like `toString` or __proto__ are not overriden by malicious
|
||||
// querystrings.
|
||||
//
|
||||
// In the case if failed decoding, we want to omit the key/value pairs
|
||||
// from the result.
|
||||
//
|
||||
if (key === null || value === null || key in result) continue;
|
||||
result[key] = value;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform a query string to an object.
|
||||
*
|
||||
* @param {Object} obj Object that should be transformed.
|
||||
* @param {String} prefix Optional prefix.
|
||||
* @returns {String}
|
||||
* @api public
|
||||
*/
|
||||
function querystringify(obj, prefix) {
|
||||
prefix = prefix || '';
|
||||
|
||||
var pairs = []
|
||||
, value
|
||||
, key;
|
||||
|
||||
//
|
||||
// Optionally prefix with a '?' if needed
|
||||
//
|
||||
if ('string' !== typeof prefix) prefix = '?';
|
||||
|
||||
for (key in obj) {
|
||||
if (has.call(obj, key)) {
|
||||
value = obj[key];
|
||||
|
||||
//
|
||||
// Edge cases where we actually want to encode the value to an empty
|
||||
// string instead of the stringified value.
|
||||
//
|
||||
if (!value && (value === null || value === undef || isNaN(value))) {
|
||||
value = '';
|
||||
}
|
||||
|
||||
key = encode(key);
|
||||
value = encode(value);
|
||||
|
||||
//
|
||||
// If we failed to encode the strings, we should bail out as we don't
|
||||
// want to add invalid strings to the query.
|
||||
//
|
||||
if (key === null || value === null) continue;
|
||||
pairs.push(key +'='+ value);
|
||||
}
|
||||
}
|
||||
|
||||
return pairs.length ? prefix + pairs.join('&') : '';
|
||||
}
|
||||
|
||||
//
|
||||
// Expose the module.
|
||||
//
|
||||
exports.stringify = querystringify;
|
||||
exports.parse = querystring;
|
||||
|
||||
},{}],3:[function(require,module,exports){
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Check if we're required to add a port number.
|
||||
*
|
||||
* @see https://url.spec.whatwg.org/#default-port
|
||||
* @param {Number|String} port Port number we need to check
|
||||
* @param {String} protocol Protocol we need to check against.
|
||||
* @returns {Boolean} Is it a default port for the given protocol
|
||||
* @api private
|
||||
*/
|
||||
module.exports = function required(port, protocol) {
|
||||
protocol = protocol.split(':')[0];
|
||||
port = +port;
|
||||
|
||||
if (!port) return false;
|
||||
|
||||
switch (protocol) {
|
||||
case 'http':
|
||||
case 'ws':
|
||||
return port !== 80;
|
||||
|
||||
case 'https':
|
||||
case 'wss':
|
||||
return port !== 443;
|
||||
|
||||
case 'ftp':
|
||||
return port !== 21;
|
||||
|
||||
case 'gopher':
|
||||
return port !== 70;
|
||||
|
||||
case 'file':
|
||||
return false;
|
||||
}
|
||||
|
||||
return port !== 0;
|
||||
};
|
||||
|
||||
},{}]},{},[1])(1)
|
||||
});
|
||||
1
node_modules/url-parse/dist/url-parse.min.js
generated
vendored
Normal file
1
node_modules/url-parse/dist/url-parse.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
node_modules/url-parse/dist/url-parse.min.js.map
generated
vendored
Normal file
1
node_modules/url-parse/dist/url-parse.min.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
589
node_modules/url-parse/index.js
generated
vendored
Normal file
589
node_modules/url-parse/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,589 @@
|
|||
'use strict';
|
||||
|
||||
var required = require('requires-port')
|
||||
, qs = require('querystringify')
|
||||
, controlOrWhitespace = /^[\x00-\x20\u00a0\u1680\u2000-\u200a\u2028\u2029\u202f\u205f\u3000\ufeff]+/
|
||||
, CRHTLF = /[\n\r\t]/g
|
||||
, slashes = /^[A-Za-z][A-Za-z0-9+-.]*:\/\//
|
||||
, port = /:\d+$/
|
||||
, protocolre = /^([a-z][a-z0-9.+-]*:)?(\/\/)?([\\/]+)?([\S\s]*)/i
|
||||
, windowsDriveLetter = /^[a-zA-Z]:/;
|
||||
|
||||
/**
|
||||
* Remove control characters and whitespace from the beginning of a string.
|
||||
*
|
||||
* @param {Object|String} str String to trim.
|
||||
* @returns {String} A new string representing `str` stripped of control
|
||||
* characters and whitespace from its beginning.
|
||||
* @public
|
||||
*/
|
||||
function trimLeft(str) {
|
||||
return (str ? str : '').toString().replace(controlOrWhitespace, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* These are the parse rules for the URL parser, it informs the parser
|
||||
* about:
|
||||
*
|
||||
* 0. The char it Needs to parse, if it's a string it should be done using
|
||||
* indexOf, RegExp using exec and NaN means set as current value.
|
||||
* 1. The property we should set when parsing this value.
|
||||
* 2. Indication if it's backwards or forward parsing, when set as number it's
|
||||
* the value of extra chars that should be split off.
|
||||
* 3. Inherit from location if non existing in the parser.
|
||||
* 4. `toLowerCase` the resulting value.
|
||||
*/
|
||||
var rules = [
|
||||
['#', 'hash'], // Extract from the back.
|
||||
['?', 'query'], // Extract from the back.
|
||||
function sanitize(address, url) { // Sanitize what is left of the address
|
||||
return isSpecial(url.protocol) ? address.replace(/\\/g, '/') : address;
|
||||
},
|
||||
['/', 'pathname'], // Extract from the back.
|
||||
['@', 'auth', 1], // Extract from the front.
|
||||
[NaN, 'host', undefined, 1, 1], // Set left over value.
|
||||
[/:(\d*)$/, 'port', undefined, 1], // RegExp the back.
|
||||
[NaN, 'hostname', undefined, 1, 1] // Set left over.
|
||||
];
|
||||
|
||||
/**
|
||||
* These properties should not be copied or inherited from. This is only needed
|
||||
* for all non blob URL's as a blob URL does not include a hash, only the
|
||||
* origin.
|
||||
*
|
||||
* @type {Object}
|
||||
* @private
|
||||
*/
|
||||
var ignore = { hash: 1, query: 1 };
|
||||
|
||||
/**
|
||||
* The location object differs when your code is loaded through a normal page,
|
||||
* Worker or through a worker using a blob. And with the blobble begins the
|
||||
* trouble as the location object will contain the URL of the blob, not the
|
||||
* location of the page where our code is loaded in. The actual origin is
|
||||
* encoded in the `pathname` so we can thankfully generate a good "default"
|
||||
* location from it so we can generate proper relative URL's again.
|
||||
*
|
||||
* @param {Object|String} loc Optional default location object.
|
||||
* @returns {Object} lolcation object.
|
||||
* @public
|
||||
*/
|
||||
function lolcation(loc) {
|
||||
var globalVar;
|
||||
|
||||
if (typeof window !== 'undefined') globalVar = window;
|
||||
else if (typeof global !== 'undefined') globalVar = global;
|
||||
else if (typeof self !== 'undefined') globalVar = self;
|
||||
else globalVar = {};
|
||||
|
||||
var location = globalVar.location || {};
|
||||
loc = loc || location;
|
||||
|
||||
var finaldestination = {}
|
||||
, type = typeof loc
|
||||
, key;
|
||||
|
||||
if ('blob:' === loc.protocol) {
|
||||
finaldestination = new Url(unescape(loc.pathname), {});
|
||||
} else if ('string' === type) {
|
||||
finaldestination = new Url(loc, {});
|
||||
for (key in ignore) delete finaldestination[key];
|
||||
} else if ('object' === type) {
|
||||
for (key in loc) {
|
||||
if (key in ignore) continue;
|
||||
finaldestination[key] = loc[key];
|
||||
}
|
||||
|
||||
if (finaldestination.slashes === undefined) {
|
||||
finaldestination.slashes = slashes.test(loc.href);
|
||||
}
|
||||
}
|
||||
|
||||
return finaldestination;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether a protocol scheme is special.
|
||||
*
|
||||
* @param {String} The protocol scheme of the URL
|
||||
* @return {Boolean} `true` if the protocol scheme is special, else `false`
|
||||
* @private
|
||||
*/
|
||||
function isSpecial(scheme) {
|
||||
return (
|
||||
scheme === 'file:' ||
|
||||
scheme === 'ftp:' ||
|
||||
scheme === 'http:' ||
|
||||
scheme === 'https:' ||
|
||||
scheme === 'ws:' ||
|
||||
scheme === 'wss:'
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef ProtocolExtract
|
||||
* @type Object
|
||||
* @property {String} protocol Protocol matched in the URL, in lowercase.
|
||||
* @property {Boolean} slashes `true` if protocol is followed by "//", else `false`.
|
||||
* @property {String} rest Rest of the URL that is not part of the protocol.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Extract protocol information from a URL with/without double slash ("//").
|
||||
*
|
||||
* @param {String} address URL we want to extract from.
|
||||
* @param {Object} location
|
||||
* @return {ProtocolExtract} Extracted information.
|
||||
* @private
|
||||
*/
|
||||
function extractProtocol(address, location) {
|
||||
address = trimLeft(address);
|
||||
address = address.replace(CRHTLF, '');
|
||||
location = location || {};
|
||||
|
||||
var match = protocolre.exec(address);
|
||||
var protocol = match[1] ? match[1].toLowerCase() : '';
|
||||
var forwardSlashes = !!match[2];
|
||||
var otherSlashes = !!match[3];
|
||||
var slashesCount = 0;
|
||||
var rest;
|
||||
|
||||
if (forwardSlashes) {
|
||||
if (otherSlashes) {
|
||||
rest = match[2] + match[3] + match[4];
|
||||
slashesCount = match[2].length + match[3].length;
|
||||
} else {
|
||||
rest = match[2] + match[4];
|
||||
slashesCount = match[2].length;
|
||||
}
|
||||
} else {
|
||||
if (otherSlashes) {
|
||||
rest = match[3] + match[4];
|
||||
slashesCount = match[3].length;
|
||||
} else {
|
||||
rest = match[4]
|
||||
}
|
||||
}
|
||||
|
||||
if (protocol === 'file:') {
|
||||
if (slashesCount >= 2) {
|
||||
rest = rest.slice(2);
|
||||
}
|
||||
} else if (isSpecial(protocol)) {
|
||||
rest = match[4];
|
||||
} else if (protocol) {
|
||||
if (forwardSlashes) {
|
||||
rest = rest.slice(2);
|
||||
}
|
||||
} else if (slashesCount >= 2 && isSpecial(location.protocol)) {
|
||||
rest = match[4];
|
||||
}
|
||||
|
||||
return {
|
||||
protocol: protocol,
|
||||
slashes: forwardSlashes || isSpecial(protocol),
|
||||
slashesCount: slashesCount,
|
||||
rest: rest
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a relative URL pathname against a base URL pathname.
|
||||
*
|
||||
* @param {String} relative Pathname of the relative URL.
|
||||
* @param {String} base Pathname of the base URL.
|
||||
* @return {String} Resolved pathname.
|
||||
* @private
|
||||
*/
|
||||
function resolve(relative, base) {
|
||||
if (relative === '') return base;
|
||||
|
||||
var path = (base || '/').split('/').slice(0, -1).concat(relative.split('/'))
|
||||
, i = path.length
|
||||
, last = path[i - 1]
|
||||
, unshift = false
|
||||
, up = 0;
|
||||
|
||||
while (i--) {
|
||||
if (path[i] === '.') {
|
||||
path.splice(i, 1);
|
||||
} else if (path[i] === '..') {
|
||||
path.splice(i, 1);
|
||||
up++;
|
||||
} else if (up) {
|
||||
if (i === 0) unshift = true;
|
||||
path.splice(i, 1);
|
||||
up--;
|
||||
}
|
||||
}
|
||||
|
||||
if (unshift) path.unshift('');
|
||||
if (last === '.' || last === '..') path.push('');
|
||||
|
||||
return path.join('/');
|
||||
}
|
||||
|
||||
/**
|
||||
* The actual URL instance. Instead of returning an object we've opted-in to
|
||||
* create an actual constructor as it's much more memory efficient and
|
||||
* faster and it pleases my OCD.
|
||||
*
|
||||
* It is worth noting that we should not use `URL` as class name to prevent
|
||||
* clashes with the global URL instance that got introduced in browsers.
|
||||
*
|
||||
* @constructor
|
||||
* @param {String} address URL we want to parse.
|
||||
* @param {Object|String} [location] Location defaults for relative paths.
|
||||
* @param {Boolean|Function} [parser] Parser for the query string.
|
||||
* @private
|
||||
*/
|
||||
function Url(address, location, parser) {
|
||||
address = trimLeft(address);
|
||||
address = address.replace(CRHTLF, '');
|
||||
|
||||
if (!(this instanceof Url)) {
|
||||
return new Url(address, location, parser);
|
||||
}
|
||||
|
||||
var relative, extracted, parse, instruction, index, key
|
||||
, instructions = rules.slice()
|
||||
, type = typeof location
|
||||
, url = this
|
||||
, i = 0;
|
||||
|
||||
//
|
||||
// The following if statements allows this module two have compatibility with
|
||||
// 2 different API:
|
||||
//
|
||||
// 1. Node.js's `url.parse` api which accepts a URL, boolean as arguments
|
||||
// where the boolean indicates that the query string should also be parsed.
|
||||
//
|
||||
// 2. The `URL` interface of the browser which accepts a URL, object as
|
||||
// arguments. The supplied object will be used as default values / fall-back
|
||||
// for relative paths.
|
||||
//
|
||||
if ('object' !== type && 'string' !== type) {
|
||||
parser = location;
|
||||
location = null;
|
||||
}
|
||||
|
||||
if (parser && 'function' !== typeof parser) parser = qs.parse;
|
||||
|
||||
location = lolcation(location);
|
||||
|
||||
//
|
||||
// Extract protocol information before running the instructions.
|
||||
//
|
||||
extracted = extractProtocol(address || '', location);
|
||||
relative = !extracted.protocol && !extracted.slashes;
|
||||
url.slashes = extracted.slashes || relative && location.slashes;
|
||||
url.protocol = extracted.protocol || location.protocol || '';
|
||||
address = extracted.rest;
|
||||
|
||||
//
|
||||
// When the authority component is absent the URL starts with a path
|
||||
// component.
|
||||
//
|
||||
if (
|
||||
extracted.protocol === 'file:' && (
|
||||
extracted.slashesCount !== 2 || windowsDriveLetter.test(address)) ||
|
||||
(!extracted.slashes &&
|
||||
(extracted.protocol ||
|
||||
extracted.slashesCount < 2 ||
|
||||
!isSpecial(url.protocol)))
|
||||
) {
|
||||
instructions[3] = [/(.*)/, 'pathname'];
|
||||
}
|
||||
|
||||
for (; i < instructions.length; i++) {
|
||||
instruction = instructions[i];
|
||||
|
||||
if (typeof instruction === 'function') {
|
||||
address = instruction(address, url);
|
||||
continue;
|
||||
}
|
||||
|
||||
parse = instruction[0];
|
||||
key = instruction[1];
|
||||
|
||||
if (parse !== parse) {
|
||||
url[key] = address;
|
||||
} else if ('string' === typeof parse) {
|
||||
index = parse === '@'
|
||||
? address.lastIndexOf(parse)
|
||||
: address.indexOf(parse);
|
||||
|
||||
if (~index) {
|
||||
if ('number' === typeof instruction[2]) {
|
||||
url[key] = address.slice(0, index);
|
||||
address = address.slice(index + instruction[2]);
|
||||
} else {
|
||||
url[key] = address.slice(index);
|
||||
address = address.slice(0, index);
|
||||
}
|
||||
}
|
||||
} else if ((index = parse.exec(address))) {
|
||||
url[key] = index[1];
|
||||
address = address.slice(0, index.index);
|
||||
}
|
||||
|
||||
url[key] = url[key] || (
|
||||
relative && instruction[3] ? location[key] || '' : ''
|
||||
);
|
||||
|
||||
//
|
||||
// Hostname, host and protocol should be lowercased so they can be used to
|
||||
// create a proper `origin`.
|
||||
//
|
||||
if (instruction[4]) url[key] = url[key].toLowerCase();
|
||||
}
|
||||
|
||||
//
|
||||
// Also parse the supplied query string in to an object. If we're supplied
|
||||
// with a custom parser as function use that instead of the default build-in
|
||||
// parser.
|
||||
//
|
||||
if (parser) url.query = parser(url.query);
|
||||
|
||||
//
|
||||
// If the URL is relative, resolve the pathname against the base URL.
|
||||
//
|
||||
if (
|
||||
relative
|
||||
&& location.slashes
|
||||
&& url.pathname.charAt(0) !== '/'
|
||||
&& (url.pathname !== '' || location.pathname !== '')
|
||||
) {
|
||||
url.pathname = resolve(url.pathname, location.pathname);
|
||||
}
|
||||
|
||||
//
|
||||
// Default to a / for pathname if none exists. This normalizes the URL
|
||||
// to always have a /
|
||||
//
|
||||
if (url.pathname.charAt(0) !== '/' && isSpecial(url.protocol)) {
|
||||
url.pathname = '/' + url.pathname;
|
||||
}
|
||||
|
||||
//
|
||||
// We should not add port numbers if they are already the default port number
|
||||
// for a given protocol. As the host also contains the port number we're going
|
||||
// override it with the hostname which contains no port number.
|
||||
//
|
||||
if (!required(url.port, url.protocol)) {
|
||||
url.host = url.hostname;
|
||||
url.port = '';
|
||||
}
|
||||
|
||||
//
|
||||
// Parse down the `auth` for the username and password.
|
||||
//
|
||||
url.username = url.password = '';
|
||||
|
||||
if (url.auth) {
|
||||
index = url.auth.indexOf(':');
|
||||
|
||||
if (~index) {
|
||||
url.username = url.auth.slice(0, index);
|
||||
url.username = encodeURIComponent(decodeURIComponent(url.username));
|
||||
|
||||
url.password = url.auth.slice(index + 1);
|
||||
url.password = encodeURIComponent(decodeURIComponent(url.password))
|
||||
} else {
|
||||
url.username = encodeURIComponent(decodeURIComponent(url.auth));
|
||||
}
|
||||
|
||||
url.auth = url.password ? url.username +':'+ url.password : url.username;
|
||||
}
|
||||
|
||||
url.origin = url.protocol !== 'file:' && isSpecial(url.protocol) && url.host
|
||||
? url.protocol +'//'+ url.host
|
||||
: 'null';
|
||||
|
||||
//
|
||||
// The href is just the compiled result.
|
||||
//
|
||||
url.href = url.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* This is convenience method for changing properties in the URL instance to
|
||||
* insure that they all propagate correctly.
|
||||
*
|
||||
* @param {String} part Property we need to adjust.
|
||||
* @param {Mixed} value The newly assigned value.
|
||||
* @param {Boolean|Function} fn When setting the query, it will be the function
|
||||
* used to parse the query.
|
||||
* When setting the protocol, double slash will be
|
||||
* removed from the final url if it is true.
|
||||
* @returns {URL} URL instance for chaining.
|
||||
* @public
|
||||
*/
|
||||
function set(part, value, fn) {
|
||||
var url = this;
|
||||
|
||||
switch (part) {
|
||||
case 'query':
|
||||
if ('string' === typeof value && value.length) {
|
||||
value = (fn || qs.parse)(value);
|
||||
}
|
||||
|
||||
url[part] = value;
|
||||
break;
|
||||
|
||||
case 'port':
|
||||
url[part] = value;
|
||||
|
||||
if (!required(value, url.protocol)) {
|
||||
url.host = url.hostname;
|
||||
url[part] = '';
|
||||
} else if (value) {
|
||||
url.host = url.hostname +':'+ value;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case 'hostname':
|
||||
url[part] = value;
|
||||
|
||||
if (url.port) value += ':'+ url.port;
|
||||
url.host = value;
|
||||
break;
|
||||
|
||||
case 'host':
|
||||
url[part] = value;
|
||||
|
||||
if (port.test(value)) {
|
||||
value = value.split(':');
|
||||
url.port = value.pop();
|
||||
url.hostname = value.join(':');
|
||||
} else {
|
||||
url.hostname = value;
|
||||
url.port = '';
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case 'protocol':
|
||||
url.protocol = value.toLowerCase();
|
||||
url.slashes = !fn;
|
||||
break;
|
||||
|
||||
case 'pathname':
|
||||
case 'hash':
|
||||
if (value) {
|
||||
var char = part === 'pathname' ? '/' : '#';
|
||||
url[part] = value.charAt(0) !== char ? char + value : value;
|
||||
} else {
|
||||
url[part] = value;
|
||||
}
|
||||
break;
|
||||
|
||||
case 'username':
|
||||
case 'password':
|
||||
url[part] = encodeURIComponent(value);
|
||||
break;
|
||||
|
||||
case 'auth':
|
||||
var index = value.indexOf(':');
|
||||
|
||||
if (~index) {
|
||||
url.username = value.slice(0, index);
|
||||
url.username = encodeURIComponent(decodeURIComponent(url.username));
|
||||
|
||||
url.password = value.slice(index + 1);
|
||||
url.password = encodeURIComponent(decodeURIComponent(url.password));
|
||||
} else {
|
||||
url.username = encodeURIComponent(decodeURIComponent(value));
|
||||
}
|
||||
}
|
||||
|
||||
for (var i = 0; i < rules.length; i++) {
|
||||
var ins = rules[i];
|
||||
|
||||
if (ins[4]) url[ins[1]] = url[ins[1]].toLowerCase();
|
||||
}
|
||||
|
||||
url.auth = url.password ? url.username +':'+ url.password : url.username;
|
||||
|
||||
url.origin = url.protocol !== 'file:' && isSpecial(url.protocol) && url.host
|
||||
? url.protocol +'//'+ url.host
|
||||
: 'null';
|
||||
|
||||
url.href = url.toString();
|
||||
|
||||
return url;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform the properties back in to a valid and full URL string.
|
||||
*
|
||||
* @param {Function} stringify Optional query stringify function.
|
||||
* @returns {String} Compiled version of the URL.
|
||||
* @public
|
||||
*/
|
||||
function toString(stringify) {
|
||||
if (!stringify || 'function' !== typeof stringify) stringify = qs.stringify;
|
||||
|
||||
var query
|
||||
, url = this
|
||||
, host = url.host
|
||||
, protocol = url.protocol;
|
||||
|
||||
if (protocol && protocol.charAt(protocol.length - 1) !== ':') protocol += ':';
|
||||
|
||||
var result =
|
||||
protocol +
|
||||
((url.protocol && url.slashes) || isSpecial(url.protocol) ? '//' : '');
|
||||
|
||||
if (url.username) {
|
||||
result += url.username;
|
||||
if (url.password) result += ':'+ url.password;
|
||||
result += '@';
|
||||
} else if (url.password) {
|
||||
result += ':'+ url.password;
|
||||
result += '@';
|
||||
} else if (
|
||||
url.protocol !== 'file:' &&
|
||||
isSpecial(url.protocol) &&
|
||||
!host &&
|
||||
url.pathname !== '/'
|
||||
) {
|
||||
//
|
||||
// Add back the empty userinfo, otherwise the original invalid URL
|
||||
// might be transformed into a valid one with `url.pathname` as host.
|
||||
//
|
||||
result += '@';
|
||||
}
|
||||
|
||||
//
|
||||
// Trailing colon is removed from `url.host` when it is parsed. If it still
|
||||
// ends with a colon, then add back the trailing colon that was removed. This
|
||||
// prevents an invalid URL from being transformed into a valid one.
|
||||
//
|
||||
if (host[host.length - 1] === ':' || (port.test(url.hostname) && !url.port)) {
|
||||
host += ':';
|
||||
}
|
||||
|
||||
result += host + url.pathname;
|
||||
|
||||
query = 'object' === typeof url.query ? stringify(url.query) : url.query;
|
||||
if (query) result += '?' !== query.charAt(0) ? '?'+ query : query;
|
||||
|
||||
if (url.hash) result += url.hash;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
Url.prototype = { set: set, toString: toString };
|
||||
|
||||
//
|
||||
// Expose the URL parser and some additional properties that might be useful for
|
||||
// others or testing.
|
||||
//
|
||||
Url.extractProtocol = extractProtocol;
|
||||
Url.location = lolcation;
|
||||
Url.trimLeft = trimLeft;
|
||||
Url.qs = qs;
|
||||
|
||||
module.exports = Url;
|
||||
49
node_modules/url-parse/package.json
generated
vendored
Normal file
49
node_modules/url-parse/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
{
|
||||
"name": "url-parse",
|
||||
"version": "1.5.10",
|
||||
"description": "Small footprint URL parser that works seamlessly across Node.js and browser environments",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"browserify": "rm -rf dist && mkdir -p dist && browserify index.js -s URLParse -o dist/url-parse.js",
|
||||
"minify": "uglifyjs dist/url-parse.js --source-map -cm -o dist/url-parse.min.js",
|
||||
"test": "c8 --reporter=lcov --reporter=text mocha test/test.js",
|
||||
"test-browser": "node test/browser.js",
|
||||
"prepublishOnly": "npm run browserify && npm run minify",
|
||||
"watch": "mocha --watch test/test.js"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/unshiftio/url-parse.git"
|
||||
},
|
||||
"keywords": [
|
||||
"URL",
|
||||
"parser",
|
||||
"uri",
|
||||
"url",
|
||||
"parse",
|
||||
"query",
|
||||
"string",
|
||||
"querystring",
|
||||
"stringify"
|
||||
],
|
||||
"author": "Arnout Kazemier",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"querystringify": "^2.1.1",
|
||||
"requires-port": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"assume": "^2.2.0",
|
||||
"browserify": "^17.0.0",
|
||||
"c8": "^7.3.1",
|
||||
"mocha": "^9.0.3",
|
||||
"pre-commit": "^1.2.2",
|
||||
"sauce-browsers": "^2.0.0",
|
||||
"sauce-test": "^1.3.3",
|
||||
"uglify-js": "^3.5.7"
|
||||
}
|
||||
}
|
||||
67
package-lock.json
generated
67
package-lock.json
generated
|
|
@ -292,19 +292,6 @@
|
|||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-http/node_modules/tough-cookie": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz",
|
||||
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==",
|
||||
"dependencies": {
|
||||
"psl": "^1.1.33",
|
||||
"punycode": "^2.1.1",
|
||||
"universalify": "^0.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-http/node_modules/tslib": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz",
|
||||
|
|
@ -387,19 +374,18 @@
|
|||
"integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ=="
|
||||
},
|
||||
"node_modules/@azure/ms-rest-js": {
|
||||
"version": "2.6.2",
|
||||
"resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.6.2.tgz",
|
||||
"integrity": "sha512-0/8rOxAoR9M3qKUdbGOIYtHtQkm4m5jdoDNdxTU0DkOr84KwyAdJuW/RfjJinGyig4h73DNF0rdCl6XowgCYcg==",
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.7.0.tgz",
|
||||
"integrity": "sha512-ngbzWbqF+NmztDOpLBVDxYM+XLcUj7nKhxGbSU9WtIsXfRB//cf2ZbAG5HkOrhU9/wd/ORRB6lM/d69RKVjiyA==",
|
||||
"dependencies": {
|
||||
"@azure/core-auth": "^1.1.4",
|
||||
"abort-controller": "^3.0.0",
|
||||
"form-data": "^2.5.0",
|
||||
"node-fetch": "^2.6.7",
|
||||
"tough-cookie": "^3.0.1",
|
||||
"tslib": "^1.10.0",
|
||||
"tunnel": "0.0.6",
|
||||
"uuid": "^8.3.2",
|
||||
"xml2js": "^0.4.19"
|
||||
"xml2js": "^0.5.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/ms-rest-js/node_modules/uuid": {
|
||||
|
|
@ -3737,14 +3723,6 @@
|
|||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/ip-regex": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz",
|
||||
"integrity": "sha512-58yWmlHpp7VYfcdTwMTvwMmqx/Elfxjd9RXTDyMsbL7lLWmhMylLEqiYVLKuLzOZqVgiWXD9MfR62Vv89VRxkw==",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/irregular-plurals": {
|
||||
"version": "3.3.0",
|
||||
"resolved": "https://registry.npmjs.org/irregular-plurals/-/irregular-plurals-3.3.0.tgz",
|
||||
|
|
@ -5062,6 +5040,11 @@
|
|||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/querystringify": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
|
||||
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
|
||||
},
|
||||
"node_modules/queue-microtask": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
||||
|
|
@ -5157,6 +5140,11 @@
|
|||
"node": ">=0.10.5"
|
||||
}
|
||||
},
|
||||
"node_modules/requires-port": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
|
||||
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
|
||||
},
|
||||
"node_modules/resolve": {
|
||||
"version": "1.22.1",
|
||||
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
|
||||
|
|
@ -5772,13 +5760,14 @@
|
|||
}
|
||||
},
|
||||
"node_modules/tough-cookie": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz",
|
||||
"integrity": "sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==",
|
||||
"version": "4.1.3",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
|
||||
"integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
|
||||
"dependencies": {
|
||||
"ip-regex": "^2.1.0",
|
||||
"psl": "^1.1.28",
|
||||
"punycode": "^2.1.1"
|
||||
"psl": "^1.1.33",
|
||||
"punycode": "^2.1.1",
|
||||
"universalify": "^0.2.0",
|
||||
"url-parse": "^1.5.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
|
|
@ -5907,8 +5896,9 @@
|
|||
"integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w=="
|
||||
},
|
||||
"node_modules/universalify": {
|
||||
"version": "0.1.2",
|
||||
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
|
||||
"integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==",
|
||||
"engines": {
|
||||
"node": ">= 4.0.0"
|
||||
}
|
||||
|
|
@ -5948,6 +5938,15 @@
|
|||
"punycode": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/url-parse": {
|
||||
"version": "1.5.10",
|
||||
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
|
||||
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
|
||||
"dependencies": {
|
||||
"querystringify": "^2.1.1",
|
||||
"requires-port": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/util": {
|
||||
"version": "0.10.4",
|
||||
"integrity": "sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A==",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue