Upgrade Ava to v4
This commit is contained in:
parent
9a40cc5274
commit
ce89f1b611
1153 changed files with 27264 additions and 95308 deletions
20
node_modules/cbor/LICENSE.md
generated
vendored
Normal file
20
node_modules/cbor/LICENSE.md
generated
vendored
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2021 Joe Hildebrand
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
272
node_modules/cbor/README.md
generated
vendored
Normal file
272
node_modules/cbor/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,272 @@
|
|||
# cbor
|
||||
|
||||
Encode and parse data in the Concise Binary Object Representation (CBOR) data format ([RFC8949](https://www.rfc-editor.org/rfc/rfc8949.html)).
|
||||
|
||||
## Supported Node.js versions
|
||||
|
||||
This project now only supports versions of Node that the Node team is
|
||||
[currently supporting](https://github.com/nodejs/Release#release-schedule).
|
||||
Ava's [support
|
||||
statement](https://github.com/avajs/ava/blob/main/docs/support-statement.md)
|
||||
is what we will be using as well. Currently, that means Node `10`+ is
|
||||
required. If you need to support an older version of Node (back to version
|
||||
6), use cbor version 5.2.x, which will get nothing but security updates from
|
||||
here on out.
|
||||
|
||||
## Installation:
|
||||
|
||||
```bash
|
||||
$ npm install --save cbor
|
||||
```
|
||||
|
||||
**NOTE**
|
||||
If you are going to use this on the web, use [cbor-web](../cbor-web) instead.
|
||||
|
||||
If you need support for encoding and decoding BigDecimal fractions (tag 4) or
|
||||
BigFloats (tag 5), please see [cbor-bigdecimal](../cbor-bigdecimal).
|
||||
|
||||
## Documentation:
|
||||
|
||||
See the full API [documentation](http://hildjj.github.io/node-cbor/).
|
||||
|
||||
For a command-line interface, see [cbor-cli](../cbor-cli).
|
||||
|
||||
Example:
|
||||
```js
|
||||
const cbor = require('cbor')
|
||||
const assert = require('assert')
|
||||
|
||||
let encoded = cbor.encode(true) // Returns <Buffer f5>
|
||||
cbor.decodeFirst(encoded, (error, obj) => {
|
||||
// If there was an error, error != null
|
||||
// obj is the unpacked object
|
||||
assert.ok(obj === true)
|
||||
})
|
||||
|
||||
// Use integers as keys?
|
||||
const m = new Map()
|
||||
m.set(1, 2)
|
||||
encoded = cbor.encode(m) // <Buffer a1 01 02>
|
||||
```
|
||||
|
||||
Allows streaming as well:
|
||||
|
||||
```js
|
||||
const cbor = require('cbor')
|
||||
const fs = require('fs')
|
||||
|
||||
const d = new cbor.Decoder()
|
||||
d.on('data', obj => {
|
||||
console.log(obj)
|
||||
})
|
||||
|
||||
const s = fs.createReadStream('foo')
|
||||
s.pipe(d)
|
||||
|
||||
const d2 = new cbor.Decoder({input: '00', encoding: 'hex'})
|
||||
d.on('data', obj => {
|
||||
console.log(obj)
|
||||
})
|
||||
```
|
||||
|
||||
There is also support for synchronous decodes:
|
||||
|
||||
```js
|
||||
try {
|
||||
console.log(cbor.decodeFirstSync('02')) // 2
|
||||
console.log(cbor.decodeAllSync('0202')) // [2, 2]
|
||||
} catch (e) {
|
||||
// Throws on invalid input
|
||||
}
|
||||
```
|
||||
|
||||
The sync encoding and decoding are exported as a
|
||||
[leveldb encoding](https://github.com/Level/levelup#custom_encodings), as
|
||||
`cbor.leveldb`.
|
||||
|
||||
## highWaterMark
|
||||
|
||||
The synchronous routines for encoding and decoding will have problems with
|
||||
objects that are larger than 16kB, which the default buffer size for Node
|
||||
streams. There are a few ways to fix this:
|
||||
|
||||
1) pass in a `highWaterMark` option with the value of the largest buffer size you think you will need:
|
||||
|
||||
```js
|
||||
cbor.encodeOne(new ArrayBuffer(40000), {highWaterMark: 65535})
|
||||
```
|
||||
|
||||
2) use stream mode. Catch the `data`, `finish`, and `error` events. Make sure to call `end()` when you're done.
|
||||
|
||||
```js
|
||||
const enc = new cbor.Encoder()
|
||||
enc.on('data', buf => /* Send the data somewhere */ null)
|
||||
enc.on('error', console.error)
|
||||
enc.on('finish', () => /* Tell the consumer we are finished */ null)
|
||||
|
||||
enc.end(['foo', 1, false])
|
||||
```
|
||||
|
||||
3) use `encodeAsync()`, which uses the approach from approach 2 to return a memory-inefficient promise for a Buffer.
|
||||
|
||||
## Supported types
|
||||
|
||||
The following types are supported for encoding:
|
||||
|
||||
* boolean
|
||||
* number (including -0, NaN, and ±Infinity)
|
||||
* string
|
||||
* Array, Set (encoded as Array)
|
||||
* Object (including null), Map
|
||||
* undefined
|
||||
* Buffer
|
||||
* Date,
|
||||
* RegExp
|
||||
* URL
|
||||
* TypedArrays, ArrayBuffer, DataView
|
||||
* Map, Set
|
||||
* BigInt
|
||||
|
||||
Decoding supports the above types, including the following CBOR tag numbers:
|
||||
|
||||
| Tag | Generated Type |
|
||||
|-----|---------------------|
|
||||
| 0 | Date |
|
||||
| 1 | Date |
|
||||
| 2 | BigInt |
|
||||
| 3 | BigInt |
|
||||
| 21 | Tagged, with toJSON |
|
||||
| 22 | Tagged, with toJSON |
|
||||
| 23 | Tagged, with toJSON |
|
||||
| 32 | URL |
|
||||
| 33 | Tagged |
|
||||
| 34 | Tagged |
|
||||
| 35 | RegExp |
|
||||
| 64 | Uint8Array |
|
||||
| 65 | Uint16Array |
|
||||
| 66 | Uint32Array |
|
||||
| 67 | BigUint64Array |
|
||||
| 68 | Uint8ClampedArray |
|
||||
| 69 | Uint16Array |
|
||||
| 70 | Uint32Array |
|
||||
| 71 | BigUint64Array |
|
||||
| 72 | Int8Array |
|
||||
| 73 | Int16Array |
|
||||
| 74 | Int32Array |
|
||||
| 75 | BigInt64Array |
|
||||
| 77 | Int16Array |
|
||||
| 78 | Int32Array |
|
||||
| 79 | BigInt64Array |
|
||||
| 81 | Float32Array |
|
||||
| 82 | Float64Array |
|
||||
| 85 | Float32Array |
|
||||
| 86 | Float64Array |
|
||||
| 258 | Set |
|
||||
|
||||
## Adding new Encoders
|
||||
|
||||
There are several ways to add a new encoder:
|
||||
|
||||
### `encodeCBOR` method
|
||||
|
||||
This is the easiest approach, if you can modify the class being encoded. Add an
|
||||
`encodeCBOR` method to your class, which takes a single parameter of the encoder
|
||||
currently being used. Your method should return `true` on success, else `false`.
|
||||
Your method may call `encoder.push(buffer)` or `encoder.pushAny(any)` as needed.
|
||||
|
||||
For example:
|
||||
|
||||
```js
|
||||
class Foo {
|
||||
constructor() {
|
||||
this.one = 1
|
||||
this.two = 2
|
||||
}
|
||||
|
||||
encodeCBOR(encoder) {
|
||||
const tagged = new Tagged(64000, [this.one, this.two])
|
||||
return encoder.pushAny(tagged)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
You can also modify an existing type by monkey-patching an `encodeCBOR` function
|
||||
onto its prototype, but this isn't recommended.
|
||||
|
||||
### `addSemanticType`
|
||||
|
||||
Sometimes, you want to support an existing type without modification to that
|
||||
type. In this case, call `addSemanticType(type, encodeFunction)` on an existing
|
||||
`Encoder` instance. The `encodeFunction` takes an encoder and an object to
|
||||
encode, for example:
|
||||
|
||||
```js
|
||||
class Bar {
|
||||
constructor() {
|
||||
this.three = 3
|
||||
}
|
||||
}
|
||||
const enc = new Encoder()
|
||||
enc.addSemanticType(Bar, (encoder, b) => {
|
||||
encoder.pushAny(b.three)
|
||||
})
|
||||
```
|
||||
|
||||
## Adding new decoders
|
||||
|
||||
Most of the time, you will want to add support for decoding a new tag type. If
|
||||
the Decoder class encounters a tag it doesn't support, it will generate a `Tagged`
|
||||
instance that you can handle or ignore as needed. To have a specific type
|
||||
generated instead, pass a `tags` option to the `Decoder`'s constructor, consisting
|
||||
of an object with tag number keys and function values. The function will be
|
||||
passed the decoded value associated with the tag, and should return the decoded
|
||||
value. For the `Foo` example above, this might look like:
|
||||
|
||||
```js
|
||||
const d = new Decoder({
|
||||
tags: {
|
||||
64000: val => {
|
||||
// Check val to make sure it's an Array as expected, etc.
|
||||
const foo = new Foo()
|
||||
;[foo.one, foo.two] = val
|
||||
return foo
|
||||
},
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
You can also replace the default decoders by passing in an appropriate tag
|
||||
function. For example:
|
||||
|
||||
```js
|
||||
cbor.decodeFirstSync(input, {
|
||||
tags: {
|
||||
// Replace the Tag 0 (RFC3339 Date/Time string) decoder.
|
||||
// See https://tc39.es/proposal-temporal/docs/ for the upcoming
|
||||
// Temporal built-in, which supports nanosecond time:
|
||||
0: x => Temporal.Instant.from(x),
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
Developers
|
||||
----------
|
||||
|
||||
The tests for this package use a set of test vectors from RFC 8949 appendix A
|
||||
by importing a machine readable version of them from
|
||||
https://github.com/cbor/test-vectors. For these tests to work, you will need
|
||||
to use the command `git submodule update --init` after cloning or pulling this
|
||||
code. See https://gist.github.com/gitaarik/8735255#file-git_submodules-md
|
||||
for more information.
|
||||
|
||||
Get a list of build steps with `npm run`. I use `npm run dev`, which rebuilds,
|
||||
runs tests, and refreshes a browser window with coverage metrics every time I
|
||||
save a `.js` file. If you don't want to run the fuzz tests every time, set
|
||||
a `NO_GARBAGE` environment variable:
|
||||
|
||||
```
|
||||
env NO_GARBAGE=1 npm run dev
|
||||
```
|
||||
|
||||
[](https://github.com/hildjj/node-cbor/actions?query=workflow%3ATests)
|
||||
[](https://coveralls.io/r/hildjj/node-cbor?branch=main)
|
||||
102
node_modules/cbor/lib/cbor.js
generated
vendored
Normal file
102
node_modules/cbor/lib/cbor.js
generated
vendored
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
'use strict'
|
||||
|
||||
exports.Commented = require('./commented')
|
||||
exports.Diagnose = require('./diagnose')
|
||||
exports.Decoder = require('./decoder')
|
||||
exports.Encoder = require('./encoder')
|
||||
exports.Simple = require('./simple')
|
||||
exports.Tagged = require('./tagged')
|
||||
exports.Map = require('./map')
|
||||
|
||||
/**
|
||||
* Convenience name for {@linkcode Commented.comment}.
|
||||
*/
|
||||
exports.comment = exports.Commented.comment
|
||||
|
||||
/**
|
||||
* Convenience name for {@linkcode Decoder.decodeAll}.
|
||||
*/
|
||||
exports.decodeAll = exports.Decoder.decodeAll
|
||||
|
||||
/**
|
||||
* Convenience name for {@linkcode Decoder.decodeFirst}.
|
||||
*/
|
||||
exports.decodeFirst = exports.Decoder.decodeFirst
|
||||
|
||||
/**
|
||||
* Convenience name for {@linkcode Decoder.decodeAllSync}.
|
||||
*/
|
||||
exports.decodeAllSync = exports.Decoder.decodeAllSync
|
||||
|
||||
/**
|
||||
* Convenience name for {@linkcode Decoder.decodeFirstSync}.
|
||||
*/
|
||||
exports.decodeFirstSync = exports.Decoder.decodeFirstSync
|
||||
|
||||
/**
|
||||
* Convenience name for {@linkcode Diagnose.diagnose}.
|
||||
*/
|
||||
exports.diagnose = exports.Diagnose.diagnose
|
||||
|
||||
/**
|
||||
* Convenience name for {@linkcode Encoder.encode}.
|
||||
*/
|
||||
exports.encode = exports.Encoder.encode
|
||||
|
||||
/**
|
||||
* Convenience name for {@linkcode Encoder.encodeCanonical}.
|
||||
*/
|
||||
exports.encodeCanonical = exports.Encoder.encodeCanonical
|
||||
|
||||
/**
|
||||
* Convenience name for {@linkcode Encoder.encodeOne}.
|
||||
*/
|
||||
exports.encodeOne = exports.Encoder.encodeOne
|
||||
|
||||
/**
|
||||
* Convenience name for {@linkcode Encoder.encodeAsync}.
|
||||
*/
|
||||
exports.encodeAsync = exports.Encoder.encodeAsync
|
||||
|
||||
/**
|
||||
* Convenience name for {@linkcode Decoder.decodeFirstSync}.
|
||||
*/
|
||||
exports.decode = exports.Decoder.decodeFirstSync
|
||||
|
||||
/**
|
||||
* The codec information for
|
||||
* {@link https://github.com/Level/encoding-down encoding-down}, which is a
|
||||
* codec framework for leveldb. CBOR is a particularly convenient format for
|
||||
* both keys and values, as it can deal with a lot of types that JSON can't
|
||||
* handle without losing type information.
|
||||
*
|
||||
* @example
|
||||
* const level = require('level')
|
||||
* const cbor = require('cbor')
|
||||
*
|
||||
* async function putget() {
|
||||
* const db = level('./db', {
|
||||
* keyEncoding: cbor.leveldb,
|
||||
* valueEncoding: cbor.leveldb,
|
||||
* })
|
||||
*
|
||||
* await db.put({a: 1}, 9857298342094820394820394820398234092834n)
|
||||
* const val = await db.get({a: 1})
|
||||
* }
|
||||
*/
|
||||
exports.leveldb = {
|
||||
decode: exports.Decoder.decodeFirstSync,
|
||||
encode: exports.Encoder.encode,
|
||||
buffer: true,
|
||||
name: 'cbor',
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset everything that we can predict a plugin might have altered in good
|
||||
* faith. For now that includes the default set of tags that decoding and
|
||||
* encoding will use.
|
||||
*/
|
||||
exports.reset = function reset() {
|
||||
exports.Encoder.reset()
|
||||
exports.Tagged.reset()
|
||||
}
|
||||
369
node_modules/cbor/lib/commented.js
generated
vendored
Normal file
369
node_modules/cbor/lib/commented.js
generated
vendored
Normal file
|
|
@ -0,0 +1,369 @@
|
|||
'use strict'
|
||||
|
||||
const stream = require('stream')
|
||||
const utils = require('./utils')
|
||||
const Decoder = require('./decoder')
|
||||
const NoFilter = require('nofilter')
|
||||
const {MT, NUMBYTES, SYMS} = require('./constants')
|
||||
const {Buffer} = require('buffer')
|
||||
|
||||
function plural(c) {
|
||||
if (c > 1) {
|
||||
return 's'
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef CommentOptions
|
||||
* @property {number} [max_depth=10] How many times to indent
|
||||
* the dashes.
|
||||
* @property {number} [depth=1] Initial indentation depth.
|
||||
* @property {boolean} [no_summary=false] If true, omit the summary
|
||||
* of the full bytes read at the end.
|
||||
* @property {object} [tags] Mapping from tag number to function(v),
|
||||
* where v is the decoded value that comes after the tag, and where the
|
||||
* function returns the correctly-created value for that tag.
|
||||
* @property {boolean} [preferWeb=false] If true, prefer Uint8Arrays to
|
||||
* be generated instead of node Buffers. This might turn on some more
|
||||
* changes in the future, so forward-compatibility is not guaranteed yet.
|
||||
* @property {BufferEncoding} [encoding='hex'] Encoding to use for input, if it
|
||||
* is a string.
|
||||
*/
|
||||
/**
|
||||
* @callback commentCallback
|
||||
* @param {Error} [error] If one was generated.
|
||||
* @param {string} [commented] The comment string.
|
||||
* @returns {void}
|
||||
*/
|
||||
/**
|
||||
* Normalize inputs to the static functions.
|
||||
*
|
||||
* @param {CommentOptions|commentCallback|string|number} opts Encoding,
|
||||
* max_depth, or callback.
|
||||
* @param {commentCallback} [cb] Called on completion.
|
||||
* @returns {{options: CommentOptions, cb: commentCallback}} Normalized value.
|
||||
* @throws {TypeError} Unknown option type.
|
||||
* @private
|
||||
*/
|
||||
function normalizeOptions(opts, cb) {
|
||||
switch (typeof opts) {
|
||||
case 'function':
|
||||
return {options: {}, cb: /** @type {commentCallback} */ (opts)}
|
||||
case 'string':
|
||||
return {options: {encoding: /** @type {BufferEncoding} */ (opts)}, cb}
|
||||
case 'number':
|
||||
return {options: {max_depth: opts}, cb}
|
||||
case 'object':
|
||||
return {options: opts || {}, cb}
|
||||
default:
|
||||
throw new TypeError('Unknown option type')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate the expanded format of RFC 8949, section 3.2.2.
|
||||
*
|
||||
* @extends stream.Transform
|
||||
*/
|
||||
class Commented extends stream.Transform {
|
||||
/**
|
||||
* Create a CBOR commenter.
|
||||
*
|
||||
* @param {CommentOptions} [options={}] Stream options.
|
||||
*/
|
||||
constructor(options = {}) {
|
||||
const {
|
||||
depth = 1,
|
||||
max_depth = 10,
|
||||
no_summary = false,
|
||||
// Decoder options
|
||||
tags = {},
|
||||
preferWeb,
|
||||
encoding,
|
||||
// Stream.Transform options
|
||||
...superOpts
|
||||
} = options
|
||||
|
||||
super({
|
||||
...superOpts,
|
||||
readableObjectMode: false,
|
||||
writableObjectMode: false,
|
||||
})
|
||||
|
||||
this.depth = depth
|
||||
this.max_depth = max_depth
|
||||
this.all = new NoFilter()
|
||||
|
||||
if (!tags[24]) {
|
||||
tags[24] = this._tag_24.bind(this)
|
||||
}
|
||||
this.parser = new Decoder({
|
||||
tags,
|
||||
max_depth,
|
||||
preferWeb,
|
||||
encoding,
|
||||
})
|
||||
this.parser.on('value', this._on_value.bind(this))
|
||||
this.parser.on('start', this._on_start.bind(this))
|
||||
this.parser.on('start-string', this._on_start_string.bind(this))
|
||||
this.parser.on('stop', this._on_stop.bind(this))
|
||||
this.parser.on('more-bytes', this._on_more.bind(this))
|
||||
this.parser.on('error', this._on_error.bind(this))
|
||||
if (!no_summary) {
|
||||
this.parser.on('data', this._on_data.bind(this))
|
||||
}
|
||||
this.parser.bs.on('read', this._on_read.bind(this))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer} v Descend into embedded CBOR.
|
||||
* @private
|
||||
*/
|
||||
_tag_24(v) {
|
||||
const c = new Commented({depth: this.depth + 1, no_summary: true})
|
||||
|
||||
c.on('data', b => this.push(b))
|
||||
c.on('error', er => this.emit('error', er))
|
||||
c.end(v)
|
||||
}
|
||||
|
||||
_transform(fresh, encoding, cb) {
|
||||
this.parser.write(fresh, encoding, cb)
|
||||
}
|
||||
|
||||
_flush(cb) {
|
||||
// TODO: find the test that covers this, and look at the return value
|
||||
return this.parser._flush(cb)
|
||||
}
|
||||
|
||||
/**
|
||||
* Comment on an input Buffer or string, creating a string passed to the
|
||||
* callback. If callback not specified, a promise is returned.
|
||||
*
|
||||
* @static
|
||||
* @param {string|Buffer|ArrayBuffer|Uint8Array|Uint8ClampedArray
|
||||
* |DataView|stream.Readable} input Something to parse.
|
||||
* @param {CommentOptions|commentCallback|string|number} [options={}]
|
||||
* Encoding, max_depth, or callback.
|
||||
* @param {commentCallback} [cb] If specified, called on completion.
|
||||
* @returns {Promise} If cb not specified.
|
||||
* @throws {Error} Input required.
|
||||
*/
|
||||
static comment(input, options = {}, cb = null) {
|
||||
if (input == null) {
|
||||
throw new Error('input required')
|
||||
}
|
||||
({options, cb} = normalizeOptions(options, cb))
|
||||
const bs = new NoFilter()
|
||||
const {encoding = 'hex', ...opts} = options
|
||||
const d = new Commented(opts)
|
||||
let p = null
|
||||
|
||||
if (typeof cb === 'function') {
|
||||
d.on('end', () => {
|
||||
cb(null, bs.toString('utf8'))
|
||||
})
|
||||
d.on('error', cb)
|
||||
} else {
|
||||
p = new Promise((resolve, reject) => {
|
||||
d.on('end', () => {
|
||||
resolve(bs.toString('utf8'))
|
||||
})
|
||||
d.on('error', reject)
|
||||
})
|
||||
}
|
||||
d.pipe(bs)
|
||||
utils.guessEncoding(input, encoding).pipe(d)
|
||||
return p
|
||||
}
|
||||
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_on_error(er) {
|
||||
this.push('ERROR: ')
|
||||
this.push(er.toString())
|
||||
this.push('\n')
|
||||
}
|
||||
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_on_read(buf) {
|
||||
this.all.write(buf)
|
||||
const hex = buf.toString('hex')
|
||||
|
||||
this.push(new Array(this.depth + 1).join(' '))
|
||||
this.push(hex)
|
||||
|
||||
let ind = ((this.max_depth - this.depth) * 2) - hex.length
|
||||
if (ind < 1) {
|
||||
ind = 1
|
||||
}
|
||||
this.push(new Array(ind + 1).join(' '))
|
||||
this.push('-- ')
|
||||
}
|
||||
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_on_more(mt, len, parent_mt, pos) {
|
||||
let desc = ''
|
||||
|
||||
this.depth++
|
||||
switch (mt) {
|
||||
case MT.POS_INT:
|
||||
desc = 'Positive number,'
|
||||
break
|
||||
case MT.NEG_INT:
|
||||
desc = 'Negative number,'
|
||||
break
|
||||
case MT.ARRAY:
|
||||
desc = 'Array, length'
|
||||
break
|
||||
case MT.MAP:
|
||||
desc = 'Map, count'
|
||||
break
|
||||
case MT.BYTE_STRING:
|
||||
desc = 'Bytes, length'
|
||||
break
|
||||
case MT.UTF8_STRING:
|
||||
desc = 'String, length'
|
||||
break
|
||||
case MT.SIMPLE_FLOAT:
|
||||
if (len === 1) {
|
||||
desc = 'Simple value,'
|
||||
} else {
|
||||
desc = 'Float,'
|
||||
}
|
||||
break
|
||||
}
|
||||
this.push(`${desc} next ${len} byte${plural(len)}\n`)
|
||||
}
|
||||
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_on_start_string(mt, len, parent_mt, pos) {
|
||||
let desc = ''
|
||||
|
||||
this.depth++
|
||||
switch (mt) {
|
||||
case MT.BYTE_STRING:
|
||||
desc = `Bytes, length: ${len}`
|
||||
break
|
||||
case MT.UTF8_STRING:
|
||||
desc = `String, length: ${len.toString()}`
|
||||
break
|
||||
}
|
||||
this.push(`${desc}\n`)
|
||||
}
|
||||
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_on_start(mt, tag, parent_mt, pos) {
|
||||
this.depth++
|
||||
switch (parent_mt) {
|
||||
case MT.ARRAY:
|
||||
this.push(`[${pos}], `)
|
||||
break
|
||||
case MT.MAP:
|
||||
if (pos % 2) {
|
||||
this.push(`{Val:${Math.floor(pos / 2)}}, `)
|
||||
} else {
|
||||
this.push(`{Key:${Math.floor(pos / 2)}}, `)
|
||||
}
|
||||
break
|
||||
}
|
||||
switch (mt) {
|
||||
case MT.TAG:
|
||||
this.push(`Tag #${tag}`)
|
||||
if (tag === 24) {
|
||||
this.push(' Encoded CBOR data item')
|
||||
}
|
||||
break
|
||||
case MT.ARRAY:
|
||||
if (tag === SYMS.STREAM) {
|
||||
this.push('Array (streaming)')
|
||||
} else {
|
||||
this.push(`Array, ${tag} item${plural(tag)}`)
|
||||
}
|
||||
break
|
||||
case MT.MAP:
|
||||
if (tag === SYMS.STREAM) {
|
||||
this.push('Map (streaming)')
|
||||
} else {
|
||||
this.push(`Map, ${tag} pair${plural(tag)}`)
|
||||
}
|
||||
break
|
||||
case MT.BYTE_STRING:
|
||||
this.push('Bytes (streaming)')
|
||||
break
|
||||
case MT.UTF8_STRING:
|
||||
this.push('String (streaming)')
|
||||
break
|
||||
}
|
||||
this.push('\n')
|
||||
}
|
||||
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_on_stop(mt) {
|
||||
this.depth--
|
||||
}
|
||||
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
_on_value(val, parent_mt, pos, ai) {
|
||||
if (val !== SYMS.BREAK) {
|
||||
switch (parent_mt) {
|
||||
case MT.ARRAY:
|
||||
this.push(`[${pos}], `)
|
||||
break
|
||||
case MT.MAP:
|
||||
if (pos % 2) {
|
||||
this.push(`{Val:${Math.floor(pos / 2)}}, `)
|
||||
} else {
|
||||
this.push(`{Key:${Math.floor(pos / 2)}}, `)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
const str = utils.cborValueToString(val, -Infinity)
|
||||
|
||||
if ((typeof val === 'string') ||
|
||||
(Buffer.isBuffer(val))) {
|
||||
if (val.length > 0) {
|
||||
this.push(str)
|
||||
this.push('\n')
|
||||
}
|
||||
this.depth--
|
||||
} else {
|
||||
this.push(str)
|
||||
this.push('\n')
|
||||
}
|
||||
|
||||
switch (ai) {
|
||||
case NUMBYTES.ONE:
|
||||
case NUMBYTES.TWO:
|
||||
case NUMBYTES.FOUR:
|
||||
case NUMBYTES.EIGHT:
|
||||
this.depth--
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_on_data() {
|
||||
this.push('0x')
|
||||
this.push(this.all.read().toString('hex'))
|
||||
this.push('\n')
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Commented
|
||||
79
node_modules/cbor/lib/constants.js
generated
vendored
Normal file
79
node_modules/cbor/lib/constants.js
generated
vendored
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
'use strict'
|
||||
|
||||
/**
|
||||
* @enum {number}
|
||||
*/
|
||||
exports.MT = {
|
||||
POS_INT: 0,
|
||||
NEG_INT: 1,
|
||||
BYTE_STRING: 2,
|
||||
UTF8_STRING: 3,
|
||||
ARRAY: 4,
|
||||
MAP: 5,
|
||||
TAG: 6,
|
||||
SIMPLE_FLOAT: 7,
|
||||
}
|
||||
|
||||
/**
|
||||
* @enum {number}
|
||||
*/
|
||||
exports.TAG = {
|
||||
DATE_STRING: 0,
|
||||
DATE_EPOCH: 1,
|
||||
POS_BIGINT: 2,
|
||||
NEG_BIGINT: 3,
|
||||
DECIMAL_FRAC: 4,
|
||||
BIGFLOAT: 5,
|
||||
BASE64URL_EXPECTED: 21,
|
||||
BASE64_EXPECTED: 22,
|
||||
BASE16_EXPECTED: 23,
|
||||
CBOR: 24,
|
||||
URI: 32,
|
||||
BASE64URL: 33,
|
||||
BASE64: 34,
|
||||
REGEXP: 35,
|
||||
MIME: 36,
|
||||
// https://github.com/input-output-hk/cbor-sets-spec/blob/master/CBOR_SETS.md
|
||||
SET: 258,
|
||||
}
|
||||
|
||||
/**
|
||||
* @enum {number}
|
||||
*/
|
||||
exports.NUMBYTES = {
|
||||
ZERO: 0,
|
||||
ONE: 24,
|
||||
TWO: 25,
|
||||
FOUR: 26,
|
||||
EIGHT: 27,
|
||||
INDEFINITE: 31,
|
||||
}
|
||||
|
||||
/**
|
||||
* @enum {number}
|
||||
*/
|
||||
exports.SIMPLE = {
|
||||
FALSE: 20,
|
||||
TRUE: 21,
|
||||
NULL: 22,
|
||||
UNDEFINED: 23,
|
||||
}
|
||||
|
||||
exports.SYMS = {
|
||||
NULL: Symbol.for('github.com/hildjj/node-cbor/null'),
|
||||
UNDEFINED: Symbol.for('github.com/hildjj/node-cbor/undef'),
|
||||
PARENT: Symbol.for('github.com/hildjj/node-cbor/parent'),
|
||||
BREAK: Symbol.for('github.com/hildjj/node-cbor/break'),
|
||||
STREAM: Symbol.for('github.com/hildjj/node-cbor/stream'),
|
||||
}
|
||||
|
||||
exports.SHIFT32 = 0x100000000
|
||||
|
||||
exports.BI = {
|
||||
MINUS_ONE: BigInt(-1),
|
||||
NEG_MAX: BigInt(-1) - BigInt(Number.MAX_SAFE_INTEGER),
|
||||
MAXINT32: BigInt('0xffffffff'),
|
||||
MAXINT64: BigInt('0xffffffffffffffff'),
|
||||
SHIFT32: BigInt(exports.SHIFT32),
|
||||
}
|
||||
|
||||
670
node_modules/cbor/lib/decoder.js
generated
vendored
Normal file
670
node_modules/cbor/lib/decoder.js
generated
vendored
Normal file
|
|
@ -0,0 +1,670 @@
|
|||
'use strict'
|
||||
|
||||
const BinaryParseStream = require('../vendor/binary-parse-stream')
|
||||
const Tagged = require('./tagged')
|
||||
const Simple = require('./simple')
|
||||
const utils = require('./utils')
|
||||
const NoFilter = require('nofilter')
|
||||
const stream = require('stream')
|
||||
const constants = require('./constants')
|
||||
const {MT, NUMBYTES, SYMS, BI} = constants
|
||||
const {Buffer} = require('buffer')
|
||||
|
||||
const COUNT = Symbol('count')
|
||||
const MAJOR = Symbol('major type')
|
||||
const ERROR = Symbol('error')
|
||||
const NOT_FOUND = Symbol('not found')
|
||||
|
||||
function parentArray(parent, typ, count) {
|
||||
const a = []
|
||||
|
||||
a[COUNT] = count
|
||||
a[SYMS.PARENT] = parent
|
||||
a[MAJOR] = typ
|
||||
return a
|
||||
}
|
||||
|
||||
function parentBufferStream(parent, typ) {
|
||||
const b = new NoFilter()
|
||||
|
||||
b[COUNT] = -1
|
||||
b[SYMS.PARENT] = parent
|
||||
b[MAJOR] = typ
|
||||
return b
|
||||
}
|
||||
|
||||
class UnexpectedDataError extends Error {
|
||||
constructor(byte, value) {
|
||||
super(`Unexpected data: 0x${byte.toString(16)}`)
|
||||
this.name = 'UnexpectedDataError'
|
||||
this.byte = byte
|
||||
this.value = value
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Things that can act as inputs, from which a NoFilter can be created.
|
||||
*
|
||||
* @typedef {string|Buffer|ArrayBuffer|Uint8Array|Uint8ClampedArray
|
||||
* |DataView|stream.Readable} BufferLike
|
||||
*/
|
||||
/**
|
||||
* @typedef ExtendedResults
|
||||
* @property {any} value The value that was found.
|
||||
* @property {number} length The number of bytes of the original input that
|
||||
* were read.
|
||||
* @property {Buffer} bytes The bytes of the original input that were used
|
||||
* to produce the value.
|
||||
* @property {Buffer} [unused] The bytes that were left over from the original
|
||||
* input. This property only exists if {@linkcode Decoder.decodeFirst} or
|
||||
* {@linkcode Decoder.decodeFirstSync} was called.
|
||||
*/
|
||||
/**
|
||||
* @typedef DecoderOptions
|
||||
* @property {number} [max_depth=-1] The maximum depth to parse.
|
||||
* Use -1 for "until you run out of memory". Set this to a finite
|
||||
* positive number for un-trusted inputs. Most standard inputs won't nest
|
||||
* more than 100 or so levels; I've tested into the millions before
|
||||
* running out of memory.
|
||||
* @property {Tagged.TagMap} [tags] Mapping from tag number to function(v),
|
||||
* where v is the decoded value that comes after the tag, and where the
|
||||
* function returns the correctly-created value for that tag.
|
||||
* @property {boolean} [preferWeb=false] If true, prefer Uint8Arrays to
|
||||
* be generated instead of node Buffers. This might turn on some more
|
||||
* changes in the future, so forward-compatibility is not guaranteed yet.
|
||||
* @property {BufferEncoding} [encoding='hex'] The encoding of the input.
|
||||
* Ignored if input is a Buffer.
|
||||
* @property {boolean} [required=false] Should an error be thrown when no
|
||||
* data is in the input?
|
||||
* @property {boolean} [extendedResults=false] If true, emit extended
|
||||
* results, which will be an object with shape {@link ExtendedResults}.
|
||||
* The value will already have been null-checked.
|
||||
* @property {boolean} [preventDuplicateKeys=false] If true, error is
|
||||
* thrown if a map has duplicate keys.
|
||||
*/
|
||||
/**
|
||||
* @callback decodeCallback
|
||||
* @param {Error} [error] If one was generated.
|
||||
* @param {any} [value] The decoded value.
|
||||
* @returns {void}
|
||||
*/
|
||||
/**
|
||||
* @param {DecoderOptions|decodeCallback|string} opts Options,
|
||||
* the callback, or input incoding.
|
||||
* @param {decodeCallback} [cb] Called on completion.
|
||||
* @returns {{options: DecoderOptions, cb: decodeCallback}} Normalized.
|
||||
* @throws {TypeError} On unknown option type.
|
||||
* @private
|
||||
*/
|
||||
function normalizeOptions(opts, cb) {
|
||||
switch (typeof opts) {
|
||||
case 'function':
|
||||
return {options: {}, cb: /** @type {decodeCallback} */ (opts)}
|
||||
case 'string':
|
||||
return {options: {encoding: /** @type {BufferEncoding} */ (opts)}, cb}
|
||||
case 'object':
|
||||
return {options: opts || {}, cb}
|
||||
default:
|
||||
throw new TypeError('Unknown option type')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode a stream of CBOR bytes by transforming them into equivalent
|
||||
* JavaScript data. Because of the limitations of Node object streams,
|
||||
* special symbols are emitted instead of NULL or UNDEFINED. Fix those
|
||||
* up by calling {@link Decoder.nullcheck}.
|
||||
*
|
||||
* @extends BinaryParseStream
|
||||
*/
|
||||
class Decoder extends BinaryParseStream {
|
||||
/**
|
||||
* Create a parsing stream.
|
||||
*
|
||||
* @param {DecoderOptions} [options={}] Options.
|
||||
*/
|
||||
constructor(options = {}) {
|
||||
const {
|
||||
tags = {},
|
||||
max_depth = -1,
|
||||
preferWeb = false,
|
||||
required = false,
|
||||
encoding = 'hex',
|
||||
extendedResults = false,
|
||||
preventDuplicateKeys = false,
|
||||
...superOpts
|
||||
} = options
|
||||
|
||||
super({defaultEncoding: encoding, ...superOpts})
|
||||
|
||||
this.running = true
|
||||
this.max_depth = max_depth
|
||||
this.tags = tags
|
||||
this.preferWeb = preferWeb
|
||||
this.extendedResults = extendedResults
|
||||
this.required = required
|
||||
this.preventDuplicateKeys = preventDuplicateKeys
|
||||
|
||||
if (extendedResults) {
|
||||
this.bs.on('read', this._onRead.bind(this))
|
||||
this.valueBytes = /** @type {NoFilter} */ (new NoFilter())
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check the given value for a symbol encoding a NULL or UNDEFINED value in
|
||||
* the CBOR stream.
|
||||
*
|
||||
* @static
|
||||
* @param {any} val The value to check.
|
||||
* @returns {any} The corrected value.
|
||||
* @throws {Error} Nothing was found.
|
||||
* @example
|
||||
* myDecoder.on('data', val => {
|
||||
* val = Decoder.nullcheck(val)
|
||||
* // ...
|
||||
* })
|
||||
*/
|
||||
static nullcheck(val) {
|
||||
switch (val) {
|
||||
case SYMS.NULL:
|
||||
return null
|
||||
case SYMS.UNDEFINED:
|
||||
return undefined
|
||||
// Leaving this in for now as belt-and-suspenders, but I'm pretty sure
|
||||
// it can't happen.
|
||||
/* istanbul ignore next */
|
||||
case NOT_FOUND:
|
||||
/* istanbul ignore next */
|
||||
throw new Error('Value not found')
|
||||
default:
|
||||
return val
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode the first CBOR item in the input, synchronously. This will throw
|
||||
* an exception if the input is not valid CBOR, or if there are more bytes
|
||||
* left over at the end (if options.extendedResults is not true).
|
||||
*
|
||||
* @static
|
||||
* @param {BufferLike} input If a Readable stream, must have
|
||||
* received the `readable` event already, or you will get an error
|
||||
* claiming "Insufficient data".
|
||||
* @param {DecoderOptions|string} [options={}] Options or encoding for input.
|
||||
* @returns {ExtendedResults|any} The decoded value.
|
||||
* @throws {UnexpectedDataError} Data is left over after decoding.
|
||||
* @throws {Error} Insufficient data.
|
||||
*/
|
||||
static decodeFirstSync(input, options = {}) {
|
||||
if (input == null) {
|
||||
throw new TypeError('input required')
|
||||
}
|
||||
({options} = normalizeOptions(options))
|
||||
const {encoding = 'hex', ...opts} = options
|
||||
const c = new Decoder(opts)
|
||||
const s = utils.guessEncoding(input, encoding)
|
||||
|
||||
// For/of doesn't work when you need to call next() with a value
|
||||
// generator created by parser will be "done" after each CBOR entity
|
||||
// parser will yield numbers of bytes that it wants
|
||||
const parser = c._parse()
|
||||
let state = parser.next()
|
||||
|
||||
while (!state.done) {
|
||||
const b = s.read(state.value)
|
||||
|
||||
if ((b == null) || (b.length !== state.value)) {
|
||||
throw new Error('Insufficient data')
|
||||
}
|
||||
if (c.extendedResults) {
|
||||
c.valueBytes.write(b)
|
||||
}
|
||||
state = parser.next(b)
|
||||
}
|
||||
|
||||
let val = null
|
||||
if (c.extendedResults) {
|
||||
val = state.value
|
||||
val.unused = s.read()
|
||||
} else {
|
||||
val = Decoder.nullcheck(state.value)
|
||||
if (s.length > 0) {
|
||||
const nextByte = s.read(1)
|
||||
|
||||
s.unshift(nextByte)
|
||||
throw new UnexpectedDataError(nextByte[0], val)
|
||||
}
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode all of the CBOR items in the input into an array. This will throw
|
||||
* an exception if the input is not valid CBOR; a zero-length input will
|
||||
* return an empty array.
|
||||
*
|
||||
* @static
|
||||
* @param {BufferLike} input What to parse?
|
||||
* @param {DecoderOptions|string} [options={}] Options or encoding
|
||||
* for input.
|
||||
* @returns {Array<ExtendedResults>|Array<any>} Array of all found items.
|
||||
* @throws {TypeError} No input provided.
|
||||
* @throws {Error} Insufficient data provided.
|
||||
*/
|
||||
static decodeAllSync(input, options = {}) {
|
||||
if (input == null) {
|
||||
throw new TypeError('input required')
|
||||
}
|
||||
({options} = normalizeOptions(options))
|
||||
const {encoding = 'hex', ...opts} = options
|
||||
const c = new Decoder(opts)
|
||||
const s = utils.guessEncoding(input, encoding)
|
||||
const res = []
|
||||
|
||||
while (s.length > 0) {
|
||||
const parser = c._parse()
|
||||
let state = parser.next()
|
||||
|
||||
while (!state.done) {
|
||||
const b = s.read(state.value)
|
||||
|
||||
if ((b == null) || (b.length !== state.value)) {
|
||||
throw new Error('Insufficient data')
|
||||
}
|
||||
if (c.extendedResults) {
|
||||
c.valueBytes.write(b)
|
||||
}
|
||||
state = parser.next(b)
|
||||
}
|
||||
res.push(Decoder.nullcheck(state.value))
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode the first CBOR item in the input. This will error if there are
|
||||
* more bytes left over at the end (if options.extendedResults is not true),
|
||||
* and optionally if there were no valid CBOR bytes in the input. Emits the
|
||||
* {Decoder.NOT_FOUND} Symbol in the callback if no data was found and the
|
||||
* `required` option is false.
|
||||
*
|
||||
* @static
|
||||
* @param {BufferLike} input What to parse?
|
||||
* @param {DecoderOptions|decodeCallback|string} [options={}] Options, the
|
||||
* callback, or input encoding.
|
||||
* @param {decodeCallback} [cb] Callback.
|
||||
* @returns {Promise<ExtendedResults|any>} Returned even if callback is
|
||||
* specified.
|
||||
* @throws {TypeError} No input provided.
|
||||
*/
|
||||
static decodeFirst(input, options = {}, cb = null) {
|
||||
if (input == null) {
|
||||
throw new TypeError('input required')
|
||||
}
|
||||
({options, cb} = normalizeOptions(options, cb))
|
||||
const {encoding = 'hex', required = false, ...opts} = options
|
||||
|
||||
const c = new Decoder(opts)
|
||||
let v = /** @type {any} */ (NOT_FOUND)
|
||||
const s = utils.guessEncoding(input, encoding)
|
||||
const p = new Promise((resolve, reject) => {
|
||||
c.on('data', val => {
|
||||
v = Decoder.nullcheck(val)
|
||||
c.close()
|
||||
})
|
||||
c.once('error', er => {
|
||||
if (c.extendedResults && (er instanceof UnexpectedDataError)) {
|
||||
v.unused = c.bs.slice()
|
||||
return resolve(v)
|
||||
}
|
||||
if (v !== NOT_FOUND) {
|
||||
// Typescript work-around
|
||||
// eslint-disable-next-line dot-notation
|
||||
er['value'] = v
|
||||
}
|
||||
v = ERROR
|
||||
c.close()
|
||||
return reject(er)
|
||||
})
|
||||
c.once('end', () => {
|
||||
switch (v) {
|
||||
case NOT_FOUND:
|
||||
if (required) {
|
||||
return reject(new Error('No CBOR found'))
|
||||
}
|
||||
return resolve(v)
|
||||
// Pretty sure this can't happen, but not *certain*.
|
||||
/* istanbul ignore next */
|
||||
case ERROR:
|
||||
/* istanbul ignore next */
|
||||
return undefined
|
||||
default:
|
||||
return resolve(v)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
if (typeof cb === 'function') {
|
||||
p.then(val => cb(null, val), cb)
|
||||
}
|
||||
s.pipe(c)
|
||||
return p
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback decodeAllCallback
|
||||
* @param {Error} error If one was generated.
|
||||
* @param {Array<ExtendedResults>|Array<any>} value All of the decoded
|
||||
* values, wrapped in an Array.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Decode all of the CBOR items in the input. This will error if there are
|
||||
* more bytes left over at the end.
|
||||
*
|
||||
* @static
|
||||
* @param {BufferLike} input What to parse?
|
||||
* @param {DecoderOptions|decodeAllCallback|string} [options={}]
|
||||
* Decoding options, the callback, or the input encoding.
|
||||
* @param {decodeAllCallback} [cb] Callback.
|
||||
* @returns {Promise<Array<ExtendedResults>|Array<any>>} Even if callback
|
||||
* is specified.
|
||||
* @throws {TypeError} No input specified.
|
||||
*/
|
||||
static decodeAll(input, options = {}, cb = null) {
|
||||
if (input == null) {
|
||||
throw new TypeError('input required')
|
||||
}
|
||||
({options, cb} = normalizeOptions(options, cb))
|
||||
const {encoding = 'hex', ...opts} = options
|
||||
|
||||
const c = new Decoder(opts)
|
||||
const vals = []
|
||||
|
||||
c.on('data', val => vals.push(Decoder.nullcheck(val)))
|
||||
|
||||
const p = new Promise((resolve, reject) => {
|
||||
c.on('error', reject)
|
||||
c.on('end', () => resolve(vals))
|
||||
})
|
||||
|
||||
if (typeof cb === 'function') {
|
||||
p.then(v => cb(undefined, v), er => cb(er, undefined))
|
||||
}
|
||||
utils.guessEncoding(input, encoding).pipe(c)
|
||||
return p
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop processing.
|
||||
*/
|
||||
close() {
|
||||
this.running = false
|
||||
this.__fresh = true
|
||||
}
|
||||
|
||||
/**
|
||||
* Only called if extendedResults is true.
|
||||
*
|
||||
* @ignore
|
||||
*/
|
||||
_onRead(data) {
|
||||
this.valueBytes.write(data)
|
||||
}
|
||||
|
||||
/**
|
||||
* @yields {number} Number of bytes to read.
|
||||
* @returns {Generator<number, any, Buffer>} Yields a number of bytes,
|
||||
* returns anything, next returns a Buffer.
|
||||
* @throws {Error} Maximum depth exceeded.
|
||||
* @ignore
|
||||
*/
|
||||
*_parse() {
|
||||
let parent = null
|
||||
let depth = 0
|
||||
let val = null
|
||||
|
||||
while (true) {
|
||||
if ((this.max_depth >= 0) && (depth > this.max_depth)) {
|
||||
throw new Error(`Maximum depth ${this.max_depth} exceeded`)
|
||||
}
|
||||
|
||||
const [octet] = yield 1
|
||||
if (!this.running) {
|
||||
this.bs.unshift(Buffer.from([octet]))
|
||||
throw new UnexpectedDataError(octet)
|
||||
}
|
||||
const mt = octet >> 5
|
||||
const ai = octet & 0x1f
|
||||
const parent_major = (parent == null) ? undefined : parent[MAJOR]
|
||||
const parent_length = (parent == null) ? undefined : parent.length
|
||||
|
||||
switch (ai) {
|
||||
case NUMBYTES.ONE:
|
||||
this.emit('more-bytes', mt, 1, parent_major, parent_length)
|
||||
;[val] = yield 1
|
||||
break
|
||||
case NUMBYTES.TWO:
|
||||
case NUMBYTES.FOUR:
|
||||
case NUMBYTES.EIGHT: {
|
||||
const numbytes = 1 << (ai - 24)
|
||||
|
||||
this.emit('more-bytes', mt, numbytes, parent_major, parent_length)
|
||||
const buf = yield numbytes
|
||||
val = (mt === MT.SIMPLE_FLOAT) ?
|
||||
buf :
|
||||
utils.parseCBORint(ai, buf)
|
||||
break
|
||||
}
|
||||
case 28:
|
||||
case 29:
|
||||
case 30:
|
||||
this.running = false
|
||||
throw new Error(`Additional info not implemented: ${ai}`)
|
||||
case NUMBYTES.INDEFINITE:
|
||||
switch (mt) {
|
||||
case MT.POS_INT:
|
||||
case MT.NEG_INT:
|
||||
case MT.TAG:
|
||||
throw new Error(`Invalid indefinite encoding for MT ${mt}`)
|
||||
}
|
||||
val = -1
|
||||
break
|
||||
default:
|
||||
val = ai
|
||||
}
|
||||
switch (mt) {
|
||||
case MT.POS_INT:
|
||||
// Val already decoded
|
||||
break
|
||||
case MT.NEG_INT:
|
||||
if (val === Number.MAX_SAFE_INTEGER) {
|
||||
val = BI.NEG_MAX
|
||||
} else {
|
||||
val = (typeof val === 'bigint') ? BI.MINUS_ONE - val : -1 - val
|
||||
}
|
||||
break
|
||||
case MT.BYTE_STRING:
|
||||
case MT.UTF8_STRING:
|
||||
switch (val) {
|
||||
case 0:
|
||||
this.emit('start-string', mt, val, parent_major, parent_length)
|
||||
if (mt === MT.UTF8_STRING) {
|
||||
val = ''
|
||||
} else {
|
||||
val = this.preferWeb ? new Uint8Array(0) : Buffer.allocUnsafe(0)
|
||||
}
|
||||
break
|
||||
case -1:
|
||||
this.emit('start', mt, SYMS.STREAM, parent_major, parent_length)
|
||||
parent = parentBufferStream(parent, mt)
|
||||
depth++
|
||||
continue
|
||||
default:
|
||||
this.emit('start-string', mt, val, parent_major, parent_length)
|
||||
val = yield val
|
||||
if (mt === MT.UTF8_STRING) {
|
||||
val = utils.utf8(val)
|
||||
} else if (this.preferWeb) {
|
||||
val = new Uint8Array(val.buffer, val.byteOffset, val.length)
|
||||
}
|
||||
}
|
||||
break
|
||||
case MT.ARRAY:
|
||||
case MT.MAP:
|
||||
switch (val) {
|
||||
case 0:
|
||||
val = (mt === MT.MAP) ? {} : []
|
||||
break
|
||||
case -1:
|
||||
this.emit('start', mt, SYMS.STREAM, parent_major, parent_length)
|
||||
parent = parentArray(parent, mt, -1)
|
||||
depth++
|
||||
continue
|
||||
default:
|
||||
this.emit('start', mt, val, parent_major, parent_length)
|
||||
parent = parentArray(parent, mt, val * (mt - 3))
|
||||
depth++
|
||||
continue
|
||||
}
|
||||
break
|
||||
case MT.TAG:
|
||||
this.emit('start', mt, val, parent_major, parent_length)
|
||||
parent = parentArray(parent, mt, 1)
|
||||
parent.push(val)
|
||||
depth++
|
||||
continue
|
||||
case MT.SIMPLE_FLOAT:
|
||||
if (typeof val === 'number') {
|
||||
if ((ai === NUMBYTES.ONE) && (val < 32)) {
|
||||
throw new Error(
|
||||
`Invalid two-byte encoding of simple value ${val}`
|
||||
)
|
||||
}
|
||||
const hasParent = (parent != null)
|
||||
val = Simple.decode(
|
||||
val,
|
||||
hasParent,
|
||||
hasParent && (parent[COUNT] < 0)
|
||||
)
|
||||
} else {
|
||||
val = utils.parseCBORfloat(val)
|
||||
}
|
||||
}
|
||||
this.emit('value', val, parent_major, parent_length, ai)
|
||||
let again = false
|
||||
while (parent != null) {
|
||||
if (val === SYMS.BREAK) {
|
||||
parent[COUNT] = 1
|
||||
} else if (Array.isArray(parent)) {
|
||||
parent.push(val)
|
||||
} else {
|
||||
// Assert: parent instanceof NoFilter
|
||||
const pm = parent[MAJOR]
|
||||
|
||||
if ((pm != null) && (pm !== mt)) {
|
||||
this.running = false
|
||||
throw new Error('Invalid major type in indefinite encoding')
|
||||
}
|
||||
parent.write(val)
|
||||
}
|
||||
|
||||
if ((--parent[COUNT]) !== 0) {
|
||||
again = true
|
||||
break
|
||||
}
|
||||
--depth
|
||||
delete parent[COUNT]
|
||||
|
||||
if (Array.isArray(parent)) {
|
||||
switch (parent[MAJOR]) {
|
||||
case MT.ARRAY:
|
||||
val = parent
|
||||
break
|
||||
case MT.MAP: {
|
||||
let allstrings = true
|
||||
|
||||
if ((parent.length % 2) !== 0) {
|
||||
throw new Error(`Invalid map length: ${parent.length}`)
|
||||
}
|
||||
for (let i = 0, len = parent.length; i < len; i += 2) {
|
||||
if ((typeof parent[i] !== 'string') ||
|
||||
(parent[i] === '__proto__')) {
|
||||
allstrings = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if (allstrings) {
|
||||
val = {}
|
||||
for (let i = 0, len = parent.length; i < len; i += 2) {
|
||||
if (this.preventDuplicateKeys &&
|
||||
Object.prototype.hasOwnProperty.call(val, parent[i])) {
|
||||
throw new Error('Duplicate keys in a map')
|
||||
}
|
||||
val[parent[i]] = parent[i + 1]
|
||||
}
|
||||
} else {
|
||||
val = new Map()
|
||||
for (let i = 0, len = parent.length; i < len; i += 2) {
|
||||
if (this.preventDuplicateKeys && val.has(parent[i])) {
|
||||
throw new Error('Duplicate keys in a map')
|
||||
}
|
||||
val.set(parent[i], parent[i + 1])
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
case MT.TAG: {
|
||||
const t = new Tagged(parent[0], parent[1])
|
||||
|
||||
val = t.convert(this.tags)
|
||||
break
|
||||
}
|
||||
}
|
||||
} else /* istanbul ignore else */ if (parent instanceof NoFilter) {
|
||||
// Only parent types are Array and NoFilter for (Array/Map) and
|
||||
// (bytes/string) respectively.
|
||||
switch (parent[MAJOR]) {
|
||||
case MT.BYTE_STRING:
|
||||
val = parent.slice()
|
||||
if (this.preferWeb) {
|
||||
val = new Uint8Array(
|
||||
/** @type {Buffer} */ (val).buffer,
|
||||
/** @type {Buffer} */ (val).byteOffset,
|
||||
/** @type {Buffer} */ (val).length
|
||||
)
|
||||
}
|
||||
break
|
||||
case MT.UTF8_STRING:
|
||||
val = parent.toString('utf-8')
|
||||
break
|
||||
}
|
||||
}
|
||||
this.emit('stop', parent[MAJOR])
|
||||
|
||||
const old = parent
|
||||
parent = parent[SYMS.PARENT]
|
||||
delete old[SYMS.PARENT]
|
||||
delete old[MAJOR]
|
||||
}
|
||||
if (!again) {
|
||||
if (this.extendedResults) {
|
||||
const bytes = this.valueBytes.slice()
|
||||
const ret = {
|
||||
value: Decoder.nullcheck(val),
|
||||
bytes,
|
||||
length: bytes.length,
|
||||
}
|
||||
|
||||
this.valueBytes = new NoFilter()
|
||||
return ret
|
||||
}
|
||||
return val
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Decoder.NOT_FOUND = NOT_FOUND
|
||||
module.exports = Decoder
|
||||
260
node_modules/cbor/lib/diagnose.js
generated
vendored
Normal file
260
node_modules/cbor/lib/diagnose.js
generated
vendored
Normal file
|
|
@ -0,0 +1,260 @@
|
|||
'use strict'
|
||||
|
||||
const stream = require('stream')
|
||||
const Decoder = require('./decoder')
|
||||
const utils = require('./utils')
|
||||
const NoFilter = require('nofilter')
|
||||
const {MT, SYMS} = require('./constants')
|
||||
|
||||
/**
|
||||
* Things that can act as inputs, from which a NoFilter can be created.
|
||||
*
|
||||
* @typedef {string|Buffer|ArrayBuffer|Uint8Array|Uint8ClampedArray
|
||||
* |DataView|stream.Readable} BufferLike
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef DiagnoseOptions
|
||||
* @property {string} [separator='\n'] Output between detected objects.
|
||||
* @property {boolean} [stream_errors=false] Put error info into the
|
||||
* output stream.
|
||||
* @property {number} [max_depth=-1] The maximum depth to parse.
|
||||
* Use -1 for "until you run out of memory". Set this to a finite
|
||||
* positive number for un-trusted inputs. Most standard inputs won't nest
|
||||
* more than 100 or so levels; I've tested into the millions before
|
||||
* running out of memory.
|
||||
* @property {object} [tags] Mapping from tag number to function(v),
|
||||
* where v is the decoded value that comes after the tag, and where the
|
||||
* function returns the correctly-created value for that tag.
|
||||
* @property {boolean} [preferWeb=false] If true, prefer Uint8Arrays to
|
||||
* be generated instead of node Buffers. This might turn on some more
|
||||
* changes in the future, so forward-compatibility is not guaranteed yet.
|
||||
* @property {BufferEncoding} [encoding='hex'] The encoding of input, ignored if
|
||||
* input is not string.
|
||||
*/
|
||||
/**
|
||||
* @callback diagnoseCallback
|
||||
* @param {Error} [error] If one was generated.
|
||||
* @param {string} [value] The diagnostic value.
|
||||
* @returns {void}
|
||||
*/
|
||||
/**
|
||||
* @param {DiagnoseOptions|diagnoseCallback|string} opts Options,
|
||||
* the callback, or input incoding.
|
||||
* @param {diagnoseCallback} [cb] Called on completion.
|
||||
* @returns {{options: DiagnoseOptions, cb: diagnoseCallback}} Normalized.
|
||||
* @throws {TypeError} Unknown option type.
|
||||
* @private
|
||||
*/
|
||||
function normalizeOptions(opts, cb) {
|
||||
switch (typeof opts) {
|
||||
case 'function':
|
||||
return {options: {}, cb: /** @type {diagnoseCallback} */ (opts)}
|
||||
case 'string':
|
||||
return {options: {encoding: /** @type {BufferEncoding} */ (opts)}, cb}
|
||||
case 'object':
|
||||
return {options: opts || {}, cb}
|
||||
default:
|
||||
throw new TypeError('Unknown option type')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Output the diagnostic format from a stream of CBOR bytes.
|
||||
*
|
||||
* @extends stream.Transform
|
||||
*/
|
||||
class Diagnose extends stream.Transform {
|
||||
/**
|
||||
* Creates an instance of Diagnose.
|
||||
*
|
||||
* @param {DiagnoseOptions} [options={}] Options for creation.
|
||||
*/
|
||||
constructor(options = {}) {
|
||||
const {
|
||||
separator = '\n',
|
||||
stream_errors = false,
|
||||
// Decoder options
|
||||
tags,
|
||||
max_depth,
|
||||
preferWeb,
|
||||
encoding,
|
||||
// Stream.Transform options
|
||||
...superOpts
|
||||
} = options
|
||||
super({
|
||||
...superOpts,
|
||||
readableObjectMode: false,
|
||||
writableObjectMode: false,
|
||||
})
|
||||
|
||||
this.float_bytes = -1
|
||||
this.separator = separator
|
||||
this.stream_errors = stream_errors
|
||||
this.parser = new Decoder({
|
||||
tags,
|
||||
max_depth,
|
||||
preferWeb,
|
||||
encoding,
|
||||
})
|
||||
this.parser.on('more-bytes', this._on_more.bind(this))
|
||||
this.parser.on('value', this._on_value.bind(this))
|
||||
this.parser.on('start', this._on_start.bind(this))
|
||||
this.parser.on('stop', this._on_stop.bind(this))
|
||||
this.parser.on('data', this._on_data.bind(this))
|
||||
this.parser.on('error', this._on_error.bind(this))
|
||||
}
|
||||
|
||||
_transform(fresh, encoding, cb) {
|
||||
return this.parser.write(fresh, encoding, cb)
|
||||
}
|
||||
|
||||
_flush(cb) {
|
||||
return this.parser._flush(er => {
|
||||
if (this.stream_errors) {
|
||||
if (er) {
|
||||
this._on_error(er)
|
||||
}
|
||||
return cb()
|
||||
}
|
||||
return cb(er)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience function to return a string in diagnostic format.
|
||||
*
|
||||
* @param {BufferLike} input The CBOR bytes to format.
|
||||
* @param {DiagnoseOptions |diagnoseCallback|string} [options={}]
|
||||
* Options, the callback, or the input encoding.
|
||||
* @param {diagnoseCallback} [cb] Callback.
|
||||
* @throws {TypeError} Input not provided.
|
||||
* @returns {Promise} If callback not specified.
|
||||
*/
|
||||
static diagnose(input, options = {}, cb = null) {
|
||||
if (input == null) {
|
||||
throw new TypeError('input required')
|
||||
}
|
||||
({options, cb} = normalizeOptions(options, cb))
|
||||
const {encoding = 'hex', ...opts} = options
|
||||
|
||||
const bs = new NoFilter()
|
||||
const d = new Diagnose(opts)
|
||||
let p = null
|
||||
if (typeof cb === 'function') {
|
||||
d.on('end', () => cb(null, bs.toString('utf8')))
|
||||
d.on('error', cb)
|
||||
} else {
|
||||
p = new Promise((resolve, reject) => {
|
||||
d.on('end', () => resolve(bs.toString('utf8')))
|
||||
d.on('error', reject)
|
||||
})
|
||||
}
|
||||
d.pipe(bs)
|
||||
utils.guessEncoding(input, encoding).pipe(d)
|
||||
return p
|
||||
}
|
||||
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_on_error(er) {
|
||||
if (this.stream_errors) {
|
||||
this.push(er.toString())
|
||||
} else {
|
||||
this.emit('error', er)
|
||||
}
|
||||
}
|
||||
|
||||
/** @private */
|
||||
_on_more(mt, len, parent_mt, pos) {
|
||||
if (mt === MT.SIMPLE_FLOAT) {
|
||||
this.float_bytes = {
|
||||
2: 1,
|
||||
4: 2,
|
||||
8: 3,
|
||||
}[len]
|
||||
}
|
||||
}
|
||||
|
||||
/** @private */
|
||||
_fore(parent_mt, pos) {
|
||||
switch (parent_mt) {
|
||||
case MT.BYTE_STRING:
|
||||
case MT.UTF8_STRING:
|
||||
case MT.ARRAY:
|
||||
if (pos > 0) {
|
||||
this.push(', ')
|
||||
}
|
||||
break
|
||||
case MT.MAP:
|
||||
if (pos > 0) {
|
||||
if (pos % 2) {
|
||||
this.push(': ')
|
||||
} else {
|
||||
this.push(', ')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** @private */
|
||||
_on_value(val, parent_mt, pos) {
|
||||
if (val === SYMS.BREAK) {
|
||||
return
|
||||
}
|
||||
this._fore(parent_mt, pos)
|
||||
const fb = this.float_bytes
|
||||
this.float_bytes = -1
|
||||
this.push(utils.cborValueToString(val, fb))
|
||||
}
|
||||
|
||||
/** @private */
|
||||
_on_start(mt, tag, parent_mt, pos) {
|
||||
this._fore(parent_mt, pos)
|
||||
switch (mt) {
|
||||
case MT.TAG:
|
||||
this.push(`${tag}(`)
|
||||
break
|
||||
case MT.ARRAY:
|
||||
this.push('[')
|
||||
break
|
||||
case MT.MAP:
|
||||
this.push('{')
|
||||
break
|
||||
case MT.BYTE_STRING:
|
||||
case MT.UTF8_STRING:
|
||||
this.push('(')
|
||||
break
|
||||
}
|
||||
if (tag === SYMS.STREAM) {
|
||||
this.push('_ ')
|
||||
}
|
||||
}
|
||||
|
||||
/** @private */
|
||||
_on_stop(mt) {
|
||||
switch (mt) {
|
||||
case MT.TAG:
|
||||
this.push(')')
|
||||
break
|
||||
case MT.ARRAY:
|
||||
this.push(']')
|
||||
break
|
||||
case MT.MAP:
|
||||
this.push('}')
|
||||
break
|
||||
case MT.BYTE_STRING:
|
||||
case MT.UTF8_STRING:
|
||||
this.push(')')
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
/** @private */
|
||||
_on_data() {
|
||||
this.push(this.separator)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Diagnose
|
||||
1101
node_modules/cbor/lib/encoder.js
generated
vendored
Normal file
1101
node_modules/cbor/lib/encoder.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
182
node_modules/cbor/lib/map.js
generated
vendored
Normal file
182
node_modules/cbor/lib/map.js
generated
vendored
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
'use strict'
|
||||
|
||||
const {Buffer} = require('buffer')
|
||||
const encoder = require('./encoder')
|
||||
const decoder = require('./decoder')
|
||||
const {MT} = require('./constants')
|
||||
|
||||
/**
|
||||
* Wrapper around a JavaScript Map object that allows the keys to be
|
||||
* any complex type. The base Map object allows this, but will only
|
||||
* compare the keys by identity, not by value. CborMap translates keys
|
||||
* to CBOR first (and base64's them to ensure by-value comparison).
|
||||
*
|
||||
* This is not a subclass of Object, because it would be tough to get
|
||||
* the semantics to be an exact match.
|
||||
*
|
||||
* @extends Map
|
||||
*/
|
||||
class CborMap extends Map {
|
||||
/**
|
||||
* Creates an instance of CborMap.
|
||||
*
|
||||
* @param {Iterable<any>} [iterable] An Array or other iterable
|
||||
* object whose elements are key-value pairs (arrays with two elements, e.g.
|
||||
* <code>[[ 1, 'one' ],[ 2, 'two' ]]</code>). Each key-value pair is added
|
||||
* to the new CborMap; null values are treated as undefined.
|
||||
*/
|
||||
constructor(iterable) {
|
||||
super(iterable)
|
||||
}
|
||||
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
static _encode(key) {
|
||||
return encoder.encodeCanonical(key).toString('base64')
|
||||
}
|
||||
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
static _decode(key) {
|
||||
return decoder.decodeFirstSync(key, 'base64')
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a specified element.
|
||||
*
|
||||
* @param {any} key The key identifying the element to retrieve.
|
||||
* Can be any type, which will be serialized into CBOR and compared by
|
||||
* value.
|
||||
* @returns {any} The element if it exists, or <code>undefined</code>.
|
||||
*/
|
||||
get(key) {
|
||||
return super.get(CborMap._encode(key))
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds or updates an element with a specified key and value.
|
||||
*
|
||||
* @param {any} key The key identifying the element to store.
|
||||
* Can be any type, which will be serialized into CBOR and compared by
|
||||
* value.
|
||||
* @param {any} val The element to store.
|
||||
* @returns {this} This object.
|
||||
*/
|
||||
set(key, val) {
|
||||
return super.set(CborMap._encode(key), val)
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the specified element.
|
||||
*
|
||||
* @param {any} key The key identifying the element to delete. Can be any
|
||||
* type, which will be serialized into CBOR and compared by value.
|
||||
* @returns {boolean} True if an element in the Map object existed and has
|
||||
* been removed, or false if the element does not exist.
|
||||
*/
|
||||
delete(key) {
|
||||
return super.delete(CborMap._encode(key))
|
||||
}
|
||||
|
||||
/**
|
||||
* Does an element with the specified key exist?
|
||||
*
|
||||
* @param {any} key The key identifying the element to check.
|
||||
* Can be any type, which will be serialized into CBOR and compared by
|
||||
* value.
|
||||
* @returns {boolean} True if an element with the specified key exists in
|
||||
* the Map object; otherwise false.
|
||||
*/
|
||||
has(key) {
|
||||
return super.has(CborMap._encode(key))
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new Iterator object that contains the keys for each element
|
||||
* in the Map object in insertion order. The keys are decoded into their
|
||||
* original format.
|
||||
*
|
||||
* @yields {any} The keys of the map.
|
||||
*/
|
||||
*keys() {
|
||||
for (const k of super.keys()) {
|
||||
yield CborMap._decode(k)
|
||||
}
|
||||
}
|
||||
|
||||
/* eslint-disable jsdoc/require-returns-check */
|
||||
/**
|
||||
* Returns a new Iterator object that contains the [key, value] pairs for
|
||||
* each element in the Map object in insertion order.
|
||||
*
|
||||
* @yields {any[]} Key value pairs.
|
||||
* @returns {IterableIterator<any, any>} Key value pairs.
|
||||
*/
|
||||
*entries() {
|
||||
for (const kv of super.entries()) {
|
||||
yield [CborMap._decode(kv[0]), kv[1]]
|
||||
}
|
||||
}
|
||||
/* eslint-enable jsdoc/require-returns-check */
|
||||
|
||||
/**
|
||||
* Returns a new Iterator object that contains the [key, value] pairs for
|
||||
* each element in the Map object in insertion order.
|
||||
*
|
||||
* @returns {IterableIterator} Key value pairs.
|
||||
*/
|
||||
[Symbol.iterator]() {
|
||||
return this.entries()
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a provided function once per each key/value pair in the Map
|
||||
* object, in insertion order.
|
||||
*
|
||||
* @param {function(any, any, Map): undefined} fun Function to execute for
|
||||
* each element, which takes a value, a key, and the Map being traversed.
|
||||
* @param {any} thisArg Value to use as this when executing callback.
|
||||
* @throws {TypeError} Invalid function.
|
||||
*/
|
||||
forEach(fun, thisArg) {
|
||||
if (typeof fun !== 'function') {
|
||||
throw new TypeError('Must be function')
|
||||
}
|
||||
for (const kv of super.entries()) {
|
||||
fun.call(this, kv[1], CborMap._decode(kv[0]), this)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Push the simple value onto the CBOR stream.
|
||||
*
|
||||
* @param {object} gen The generator to push onto.
|
||||
* @returns {boolean} True on success.
|
||||
*/
|
||||
encodeCBOR(gen) {
|
||||
if (!gen._pushInt(this.size, MT.MAP)) {
|
||||
return false
|
||||
}
|
||||
if (gen.canonical) {
|
||||
const entries = Array.from(super.entries())
|
||||
.map(kv => [Buffer.from(kv[0], 'base64'), kv[1]])
|
||||
entries.sort((a, b) => a[0].compare(b[0]))
|
||||
for (const kv of entries) {
|
||||
if (!(gen.push(kv[0]) && gen.pushAny(kv[1]))) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (const kv of super.entries()) {
|
||||
if (!(gen.push(Buffer.from(kv[0], 'base64')) && gen.pushAny(kv[1]))) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CborMap
|
||||
104
node_modules/cbor/lib/simple.js
generated
vendored
Normal file
104
node_modules/cbor/lib/simple.js
generated
vendored
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
'use strict'
|
||||
|
||||
const {MT, SIMPLE, SYMS} = require('./constants')
|
||||
|
||||
/**
|
||||
* A CBOR Simple Value that does not map onto a known constant.
|
||||
*/
|
||||
class Simple {
|
||||
/**
|
||||
* Creates an instance of Simple.
|
||||
*
|
||||
* @param {number} value The simple value's integer value.
|
||||
*/
|
||||
constructor(value) {
|
||||
if (typeof value !== 'number') {
|
||||
throw new Error(`Invalid Simple type: ${typeof value}`)
|
||||
}
|
||||
if ((value < 0) || (value > 255) || ((value | 0) !== value)) {
|
||||
throw new Error(`value must be a small positive integer: ${value}`)
|
||||
}
|
||||
this.value = value
|
||||
}
|
||||
|
||||
/**
|
||||
* Debug string for simple value.
|
||||
*
|
||||
* @returns {string} Formated string of `simple(value)`.
|
||||
*/
|
||||
toString() {
|
||||
return `simple(${this.value})`
|
||||
}
|
||||
|
||||
/**
|
||||
* Debug string for simple value.
|
||||
*
|
||||
* @param {number} depth How deep are we?
|
||||
* @param {object} opts Options.
|
||||
* @returns {string} Formatted string of `simple(value)`.
|
||||
*/
|
||||
[Symbol.for('nodejs.util.inspect.custom')](depth, opts) {
|
||||
return `simple(${this.value})`
|
||||
}
|
||||
|
||||
/**
|
||||
* Push the simple value onto the CBOR stream.
|
||||
*
|
||||
* @param {object} gen The generator to push onto.
|
||||
* @returns {boolean} True on success.
|
||||
*/
|
||||
encodeCBOR(gen) {
|
||||
return gen._pushInt(this.value, MT.SIMPLE_FLOAT)
|
||||
}
|
||||
|
||||
/**
|
||||
* Is the given object a Simple?
|
||||
*
|
||||
* @param {any} obj Object to test.
|
||||
* @returns {boolean} Is it Simple?
|
||||
*/
|
||||
static isSimple(obj) {
|
||||
return obj instanceof Simple
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode from the CBOR additional information into a JavaScript value.
|
||||
* If the CBOR item has no parent, return a "safe" symbol instead of
|
||||
* `null` or `undefined`, so that the value can be passed through a
|
||||
* stream in object mode.
|
||||
*
|
||||
* @param {number} val The CBOR additional info to convert.
|
||||
* @param {boolean} [has_parent=true] Does the CBOR item have a parent?
|
||||
* @param {boolean} [parent_indefinite=false] Is the parent element
|
||||
* indefinitely encoded?
|
||||
* @returns {(null|undefined|boolean|symbol|Simple)} The decoded value.
|
||||
* @throws {Error} Invalid BREAK.
|
||||
*/
|
||||
static decode(val, has_parent = true, parent_indefinite = false) {
|
||||
switch (val) {
|
||||
case SIMPLE.FALSE:
|
||||
return false
|
||||
case SIMPLE.TRUE:
|
||||
return true
|
||||
case SIMPLE.NULL:
|
||||
if (has_parent) {
|
||||
return null
|
||||
}
|
||||
return SYMS.NULL
|
||||
case SIMPLE.UNDEFINED:
|
||||
if (has_parent) {
|
||||
return undefined
|
||||
}
|
||||
return SYMS.UNDEFINED
|
||||
case -1:
|
||||
if (!has_parent || !parent_indefinite) {
|
||||
throw new Error('Invalid BREAK')
|
||||
}
|
||||
return SYMS.BREAK
|
||||
default:
|
||||
return new Simple(val)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Simple
|
||||
377
node_modules/cbor/lib/tagged.js
generated
vendored
Normal file
377
node_modules/cbor/lib/tagged.js
generated
vendored
Normal file
|
|
@ -0,0 +1,377 @@
|
|||
'use strict'
|
||||
|
||||
const constants = require('./constants')
|
||||
const utils = require('./utils')
|
||||
const INTERNAL_JSON = Symbol('INTERNAL_JSON')
|
||||
|
||||
function setBuffersToJSON(obj, fn) {
|
||||
// The data item tagged can be a byte string or any other data item. In the
|
||||
// latter case, the tag applies to all of the byte string data items
|
||||
// contained in the data item, except for those contained in a nested data
|
||||
// item tagged with an expected conversion.
|
||||
if (utils.isBufferish(obj)) {
|
||||
obj.toJSON = fn
|
||||
} else if (Array.isArray(obj)) {
|
||||
for (const v of obj) {
|
||||
setBuffersToJSON(v, fn)
|
||||
}
|
||||
} else if (obj && (typeof obj === 'object')) {
|
||||
// FFS, complexity in the protocol.
|
||||
|
||||
// There's some circular dependency in here.
|
||||
// eslint-disable-next-line no-use-before-define
|
||||
if (!(obj instanceof Tagged) || (obj.tag < 21) || (obj.tag > 23)) {
|
||||
for (const v of Object.values(obj)) {
|
||||
setBuffersToJSON(v, fn)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function b64this() {
|
||||
// eslint-disable-next-line no-invalid-this
|
||||
return utils.base64(this)
|
||||
}
|
||||
|
||||
function b64urlThis() {
|
||||
// eslint-disable-next-line no-invalid-this
|
||||
return utils.base64url(this)
|
||||
}
|
||||
|
||||
function hexThis() {
|
||||
// eslint-disable-next-line no-invalid-this
|
||||
return this.toString('hex')
|
||||
}
|
||||
|
||||
function swapEndian(ab, size, byteOffset, byteLength) {
|
||||
const dv = new DataView(ab)
|
||||
const [getter, setter] = {
|
||||
2: [dv.getUint16, dv.setUint16],
|
||||
4: [dv.getUint32, dv.setUint32],
|
||||
8: [dv.getBigUint64, dv.setBigUint64],
|
||||
}[size]
|
||||
|
||||
const end = byteOffset + byteLength
|
||||
for (let offset = byteOffset; offset < end; offset += size) {
|
||||
setter.call(dv, offset, getter.call(dv, offset, true))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a tagged value to a more interesting JavaScript type. Errors
|
||||
* thrown in this function will be captured into the "err" property of the
|
||||
* original Tagged instance.
|
||||
*
|
||||
* @callback TagFunction
|
||||
* @param {any} value The value inside the tag.
|
||||
* @param {Tagged} tag The enclosing Tagged instance; useful if you want to
|
||||
* modify it and return it. Also available as "this".
|
||||
* @returns {any} The transformed value.
|
||||
*/
|
||||
|
||||
/* eslint-disable jsdoc/check-types */
|
||||
/**
|
||||
* A mapping from tag number to a tag decoding function.
|
||||
*
|
||||
* @typedef {Object.<string, TagFunction>} TagMap
|
||||
*/
|
||||
/* eslint-enable jsdoc/check-types */
|
||||
|
||||
/**
|
||||
* @type {TagMap}
|
||||
* @private
|
||||
*/
|
||||
const TAGS = {
|
||||
// Standard date/time string; see Section 3.4.1
|
||||
0: v => new Date(v),
|
||||
// Epoch-based date/time; see Section 3.4.2
|
||||
1: v => new Date(v * 1000),
|
||||
// Positive bignum; see Section 3.4.3
|
||||
2: v => utils.bufferToBigInt(v),
|
||||
// Negative bignum; see Section 3.4.3
|
||||
3: v => constants.BI.MINUS_ONE - utils.bufferToBigInt(v),
|
||||
// Expected conversion to base64url encoding; see Section 3.4.5.2
|
||||
21: (v, tag) => {
|
||||
if (utils.isBufferish(v)) {
|
||||
tag[INTERNAL_JSON] = b64urlThis
|
||||
} else {
|
||||
setBuffersToJSON(v, b64urlThis)
|
||||
}
|
||||
return tag
|
||||
},
|
||||
// Expected conversion to base64 encoding; see Section 3.4.5.2
|
||||
22: (v, tag) => {
|
||||
if (utils.isBufferish(v)) {
|
||||
tag[INTERNAL_JSON] = b64this
|
||||
} else {
|
||||
setBuffersToJSON(v, b64this)
|
||||
}
|
||||
return tag
|
||||
},
|
||||
// Expected conversion to base16 encoding; see Section Section 3.4.5.2
|
||||
23: (v, tag) => {
|
||||
if (utils.isBufferish(v)) {
|
||||
tag[INTERNAL_JSON] = hexThis
|
||||
} else {
|
||||
setBuffersToJSON(v, hexThis)
|
||||
}
|
||||
return tag
|
||||
},
|
||||
// URI; see Section 3.4.5.3
|
||||
32: v => new URL(v),
|
||||
// Base64url; see Section 3.4.5.3
|
||||
33: (v, tag) => {
|
||||
// If any of the following apply:
|
||||
// - the encoded text string contains non-alphabet characters or
|
||||
// only 1 alphabet character in the last block of 4 (where
|
||||
// alphabet is defined by Section 5 of [RFC4648] for tag number 33
|
||||
// and Section 4 of [RFC4648] for tag number 34), or
|
||||
if (!v.match(/^[a-zA-Z0-9_-]+$/)) {
|
||||
throw new Error('Invalid base64url characters')
|
||||
}
|
||||
const last = v.length % 4
|
||||
if (last === 1) {
|
||||
throw new Error('Invalid base64url length')
|
||||
}
|
||||
// - the padding bits in a 2- or 3-character block are not 0, or
|
||||
if (last === 2) {
|
||||
// The last 4 bits of the last character need to be zero.
|
||||
if ('AQgw'.indexOf(v[v.length - 1]) === -1) {
|
||||
throw new Error('Invalid base64 padding')
|
||||
}
|
||||
} else if (last === 3) {
|
||||
// The last 2 bits of the last character need to be zero.
|
||||
if ('AEIMQUYcgkosw048'.indexOf(v[v.length - 1]) === -1) {
|
||||
throw new Error('Invalid base64 padding')
|
||||
}
|
||||
}
|
||||
|
||||
// Or
|
||||
// - the base64url encoding has padding characters,
|
||||
// (caught above)
|
||||
|
||||
// the string is invalid.
|
||||
return tag
|
||||
},
|
||||
// Base64; see Section 3.4.5.3
|
||||
34: (v, tag) => {
|
||||
// If any of the following apply:
|
||||
// - the encoded text string contains non-alphabet characters or
|
||||
// only 1 alphabet character in the last block of 4 (where
|
||||
// alphabet is defined by Section 5 of [RFC4648] for tag number 33
|
||||
// and Section 4 of [RFC4648] for tag number 34), or
|
||||
const m = v.match(/^[a-zA-Z0-9+/]+(?<padding>={0,2})$/)
|
||||
if (!m) {
|
||||
throw new Error('Invalid base64 characters')
|
||||
}
|
||||
if ((v.length % 4) !== 0) {
|
||||
throw new Error('Invalid base64 length')
|
||||
}
|
||||
// - the padding bits in a 2- or 3-character block are not 0, or
|
||||
if (m.groups.padding === '=') {
|
||||
// The last 4 bits of the last character need to be zero.
|
||||
if ('AQgw'.indexOf(v[v.length - 2]) === -1) {
|
||||
throw new Error('Invalid base64 padding')
|
||||
}
|
||||
} else if (m.groups.padding === '==') {
|
||||
// The last 2 bits of the last character need to be zero.
|
||||
if ('AEIMQUYcgkosw048'.indexOf(v[v.length - 3]) === -1) {
|
||||
throw new Error('Invalid base64 padding')
|
||||
}
|
||||
}
|
||||
|
||||
// - the base64 encoding has the wrong number of padding characters,
|
||||
// (caught above)
|
||||
// the string is invalid.
|
||||
return tag
|
||||
},
|
||||
// Regular expression; see Section 2.4.4.3
|
||||
35: v => new RegExp(v),
|
||||
// https://github.com/input-output-hk/cbor-sets-spec/blob/master/CBOR_SETS.md
|
||||
258: v => new Set(v),
|
||||
}
|
||||
|
||||
const TYPED_ARRAY_TAGS = {
|
||||
64: Uint8Array,
|
||||
65: Uint16Array,
|
||||
66: Uint32Array,
|
||||
// 67: BigUint64Array, Safari doesn't implement
|
||||
68: Uint8ClampedArray,
|
||||
69: Uint16Array,
|
||||
70: Uint32Array,
|
||||
// 71: BigUint64Array, Safari doesn't implement
|
||||
72: Int8Array,
|
||||
73: Int16Array,
|
||||
74: Int32Array,
|
||||
// 75: BigInt64Array, Safari doesn't implement
|
||||
// 76: reserved
|
||||
77: Int16Array,
|
||||
78: Int32Array,
|
||||
// 79: BigInt64Array, Safari doesn't implement
|
||||
// 80: not implemented, float16 array
|
||||
81: Float32Array,
|
||||
82: Float64Array,
|
||||
// 83: not implemented, float128 array
|
||||
// 84: not implemented, float16 array
|
||||
85: Float32Array,
|
||||
86: Float64Array,
|
||||
// 87: not implemented, float128 array
|
||||
}
|
||||
|
||||
// Safari
|
||||
if (typeof BigUint64Array !== 'undefined') {
|
||||
TYPED_ARRAY_TAGS[67] = BigUint64Array
|
||||
TYPED_ARRAY_TAGS[71] = BigUint64Array
|
||||
}
|
||||
if (typeof BigInt64Array !== 'undefined') {
|
||||
TYPED_ARRAY_TAGS[75] = BigInt64Array
|
||||
TYPED_ARRAY_TAGS[79] = BigInt64Array
|
||||
}
|
||||
|
||||
function _toTypedArray(val, tagged) {
|
||||
if (!utils.isBufferish(val)) {
|
||||
throw new TypeError('val not a buffer')
|
||||
}
|
||||
const {tag} = tagged
|
||||
// See https://tools.ietf.org/html/rfc8746
|
||||
const TypedClass = TYPED_ARRAY_TAGS[tag]
|
||||
if (!TypedClass) {
|
||||
throw new Error(`Invalid typed array tag: ${tag}`)
|
||||
}
|
||||
const little = tag & 0b00000100
|
||||
const float = (tag & 0b00010000) >> 4
|
||||
const sz = 2 ** (float + (tag & 0b00000011))
|
||||
|
||||
if ((!little !== utils.isBigEndian()) && (sz > 1)) {
|
||||
swapEndian(val.buffer, sz, val.byteOffset, val.byteLength)
|
||||
}
|
||||
|
||||
const ab = val.buffer.slice(val.byteOffset, val.byteOffset + val.byteLength)
|
||||
return new TypedClass(ab)
|
||||
}
|
||||
|
||||
for (const n of Object.keys(TYPED_ARRAY_TAGS)) {
|
||||
TAGS[n] = _toTypedArray
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {TagMap}
|
||||
* @private
|
||||
*/
|
||||
let current_TAGS = {}
|
||||
|
||||
/**
|
||||
* A CBOR tagged item, where the tag does not have semantics specified at the
|
||||
* moment, or those semantics threw an error during parsing. Typically this will
|
||||
* be an extension point you're not yet expecting.
|
||||
*/
|
||||
class Tagged {
|
||||
/**
|
||||
* Creates an instance of Tagged.
|
||||
*
|
||||
* @param {number} tag The number of the tag.
|
||||
* @param {any} value The value inside the tag.
|
||||
* @param {Error} [err] The error that was thrown parsing the tag, or null.
|
||||
*/
|
||||
constructor(tag, value, err) {
|
||||
this.tag = tag
|
||||
this.value = value
|
||||
this.err = err
|
||||
if (typeof this.tag !== 'number') {
|
||||
throw new Error(`Invalid tag type (${typeof this.tag})`)
|
||||
}
|
||||
if ((this.tag < 0) || ((this.tag | 0) !== this.tag)) {
|
||||
throw new Error(`Tag must be a positive integer: ${this.tag}`)
|
||||
}
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
if (this[INTERNAL_JSON]) {
|
||||
return this[INTERNAL_JSON].call(this.value)
|
||||
}
|
||||
const ret = {
|
||||
tag: this.tag,
|
||||
value: this.value,
|
||||
}
|
||||
if (this.err) {
|
||||
ret.err = this.err
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert to a String.
|
||||
*
|
||||
* @returns {string} String of the form '1(2)'.
|
||||
*/
|
||||
toString() {
|
||||
return `${this.tag}(${JSON.stringify(this.value)})`
|
||||
}
|
||||
|
||||
/**
|
||||
* Push the simple value onto the CBOR stream.
|
||||
*
|
||||
* @param {object} gen The generator to push onto.
|
||||
* @returns {boolean} True on success.
|
||||
*/
|
||||
encodeCBOR(gen) {
|
||||
gen._pushTag(this.tag)
|
||||
return gen.pushAny(this.value)
|
||||
}
|
||||
|
||||
/**
|
||||
* If we have a converter for this type, do the conversion. Some converters
|
||||
* are built-in. Additional ones can be passed in. If you want to remove
|
||||
* a built-in converter, pass a converter in whose value is 'null' instead
|
||||
* of a function.
|
||||
*
|
||||
* @param {object} converters Keys in the object are a tag number, the value
|
||||
* is a function that takes the decoded CBOR and returns a JavaScript value
|
||||
* of the appropriate type. Throw an exception in the function on errors.
|
||||
* @returns {any} The converted item.
|
||||
*/
|
||||
convert(converters) {
|
||||
let f = (converters == null) ? undefined : converters[this.tag]
|
||||
if (typeof f !== 'function') {
|
||||
f = Tagged.TAGS[this.tag]
|
||||
if (typeof f !== 'function') {
|
||||
return this
|
||||
}
|
||||
}
|
||||
try {
|
||||
return f.call(this, this.value, this)
|
||||
} catch (error) {
|
||||
if (error && error.message && (error.message.length > 0)) {
|
||||
this.err = error.message
|
||||
} else {
|
||||
this.err = error
|
||||
}
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The current set of supported tags. May be modified by plugins.
|
||||
*
|
||||
* @type {TagMap}
|
||||
* @static
|
||||
*/
|
||||
static get TAGS() {
|
||||
return current_TAGS
|
||||
}
|
||||
|
||||
static set TAGS(val) {
|
||||
current_TAGS = val
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the supported tags to the original set, before any plugins modified
|
||||
* the list.
|
||||
*/
|
||||
static reset() {
|
||||
Tagged.TAGS = {...TAGS}
|
||||
}
|
||||
}
|
||||
Tagged.INTERNAL_JSON = INTERNAL_JSON
|
||||
Tagged.reset()
|
||||
module.exports = Tagged
|
||||
306
node_modules/cbor/lib/utils.js
generated
vendored
Normal file
306
node_modules/cbor/lib/utils.js
generated
vendored
Normal file
|
|
@ -0,0 +1,306 @@
|
|||
'use strict'
|
||||
|
||||
const {Buffer} = require('buffer')
|
||||
const NoFilter = require('nofilter')
|
||||
const stream = require('stream')
|
||||
const constants = require('./constants')
|
||||
const {NUMBYTES, SHIFT32, BI, SYMS} = constants
|
||||
const MAX_SAFE_HIGH = 0x1fffff
|
||||
|
||||
/**
|
||||
* Convert a UTF8-encoded Buffer to a JS string. If possible, throw an error
|
||||
* on invalid UTF8. Byte Order Marks are not looked at or stripped.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
const td = new TextDecoder('utf8', {fatal: true, ignoreBOM: true})
|
||||
exports.utf8 = buf => td.decode(buf)
|
||||
exports.utf8.checksUTF8 = true
|
||||
|
||||
function isReadable(s) {
|
||||
// Is this a readable stream? In the webpack version, instanceof isn't
|
||||
// working correctly.
|
||||
if (s instanceof stream.Readable) {
|
||||
return true
|
||||
}
|
||||
return ['read', 'on', 'pipe'].every(f => typeof s[f] === 'function')
|
||||
}
|
||||
|
||||
exports.isBufferish = function isBufferish(b) {
|
||||
return b &&
|
||||
(typeof b === 'object') &&
|
||||
((Buffer.isBuffer(b)) ||
|
||||
(b instanceof Uint8Array) ||
|
||||
(b instanceof Uint8ClampedArray) ||
|
||||
(b instanceof ArrayBuffer) ||
|
||||
(b instanceof DataView))
|
||||
}
|
||||
|
||||
exports.bufferishToBuffer = function bufferishToBuffer(b) {
|
||||
if (Buffer.isBuffer(b)) {
|
||||
return b
|
||||
} else if (ArrayBuffer.isView(b)) {
|
||||
return Buffer.from(b.buffer, b.byteOffset, b.byteLength)
|
||||
} else if (b instanceof ArrayBuffer) {
|
||||
return Buffer.from(b)
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
exports.parseCBORint = function parseCBORint(ai, buf) {
|
||||
switch (ai) {
|
||||
case NUMBYTES.ONE:
|
||||
return buf.readUInt8(0)
|
||||
case NUMBYTES.TWO:
|
||||
return buf.readUInt16BE(0)
|
||||
case NUMBYTES.FOUR:
|
||||
return buf.readUInt32BE(0)
|
||||
case NUMBYTES.EIGHT: {
|
||||
const f = buf.readUInt32BE(0)
|
||||
const g = buf.readUInt32BE(4)
|
||||
if (f > MAX_SAFE_HIGH) {
|
||||
return (BigInt(f) * BI.SHIFT32) + BigInt(g)
|
||||
}
|
||||
return (f * SHIFT32) + g
|
||||
}
|
||||
default:
|
||||
throw new Error(`Invalid additional info for int: ${ai}`)
|
||||
}
|
||||
}
|
||||
|
||||
exports.writeHalf = function writeHalf(buf, half) {
|
||||
// Assume 0, -0, NaN, Infinity, and -Infinity have already been caught
|
||||
|
||||
// HACK: everyone settle in. This isn't going to be pretty.
|
||||
// Translate cn-cbor's C code (from Carsten Borman):
|
||||
|
||||
// uint32_t be32;
|
||||
// uint16_t be16, u16;
|
||||
// union {
|
||||
// float f;
|
||||
// uint32_t u;
|
||||
// } u32;
|
||||
// u32.f = float_val;
|
||||
|
||||
const u32 = Buffer.allocUnsafe(4)
|
||||
u32.writeFloatBE(half, 0)
|
||||
const u = u32.readUInt32BE(0)
|
||||
|
||||
// If ((u32.u & 0x1FFF) == 0) { /* worth trying half */
|
||||
|
||||
// hildjj: If the lower 13 bits aren't 0,
|
||||
// we will lose precision in the conversion.
|
||||
// mant32 = 24bits, mant16 = 11bits, 24-11 = 13
|
||||
if ((u & 0x1FFF) !== 0) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Sign, exponent, mantissa
|
||||
// int s16 = (u32.u >> 16) & 0x8000;
|
||||
// int exp = (u32.u >> 23) & 0xff;
|
||||
// int mant = u32.u & 0x7fffff;
|
||||
|
||||
let s16 = (u >> 16) & 0x8000 // Top bit is sign
|
||||
const exp = (u >> 23) & 0xff // Then 5 bits of exponent
|
||||
const mant = u & 0x7fffff
|
||||
|
||||
// Hildjj: zeros already handled. Assert if you don't believe me.
|
||||
// if (exp == 0 && mant == 0)
|
||||
// ; /* 0.0, -0.0 */
|
||||
|
||||
// else if (exp >= 113 && exp <= 142) /* normalized */
|
||||
// s16 += ((exp - 112) << 10) + (mant >> 13);
|
||||
|
||||
if ((exp >= 113) && (exp <= 142)) {
|
||||
s16 += ((exp - 112) << 10) + (mant >> 13)
|
||||
} else if ((exp >= 103) && (exp < 113)) {
|
||||
// Denormalized numbers
|
||||
// else if (exp >= 103 && exp < 113) { /* denorm, exp16 = 0 */
|
||||
// if (mant & ((1 << (126 - exp)) - 1))
|
||||
// goto float32; /* loss of precision */
|
||||
// s16 += ((mant + 0x800000) >> (126 - exp));
|
||||
|
||||
if (mant & ((1 << (126 - exp)) - 1)) {
|
||||
return false
|
||||
}
|
||||
s16 += ((mant + 0x800000) >> (126 - exp))
|
||||
} else {
|
||||
// } else if (exp == 255 && mant == 0) { /* Inf */
|
||||
// s16 += 0x7c00;
|
||||
|
||||
// hildjj: Infinity already handled
|
||||
|
||||
// } else
|
||||
// goto float32; /* loss of range */
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// Done
|
||||
// ensure_writable(3);
|
||||
// u16 = s16;
|
||||
// be16 = hton16p((const uint8_t*)&u16);
|
||||
buf.writeUInt16BE(s16)
|
||||
return true
|
||||
}
|
||||
|
||||
exports.parseHalf = function parseHalf(buf) {
|
||||
const sign = buf[0] & 0x80 ? -1 : 1
|
||||
const exp = (buf[0] & 0x7C) >> 2
|
||||
const mant = ((buf[0] & 0x03) << 8) | buf[1]
|
||||
if (!exp) {
|
||||
return sign * 5.9604644775390625e-8 * mant
|
||||
} else if (exp === 0x1f) {
|
||||
return sign * (mant ? NaN : Infinity)
|
||||
}
|
||||
return sign * (2 ** (exp - 25)) * (1024 + mant)
|
||||
}
|
||||
|
||||
exports.parseCBORfloat = function parseCBORfloat(buf) {
|
||||
switch (buf.length) {
|
||||
case 2:
|
||||
return exports.parseHalf(buf)
|
||||
case 4:
|
||||
return buf.readFloatBE(0)
|
||||
case 8:
|
||||
return buf.readDoubleBE(0)
|
||||
default:
|
||||
throw new Error(`Invalid float size: ${buf.length}`)
|
||||
}
|
||||
}
|
||||
|
||||
exports.hex = function hex(s) {
|
||||
return Buffer.from(s.replace(/^0x/, ''), 'hex')
|
||||
}
|
||||
|
||||
exports.bin = function bin(s) {
|
||||
s = s.replace(/\s/g, '')
|
||||
let start = 0
|
||||
let end = (s.length % 8) || 8
|
||||
const chunks = []
|
||||
while (end <= s.length) {
|
||||
chunks.push(parseInt(s.slice(start, end), 2))
|
||||
start = end
|
||||
end += 8
|
||||
}
|
||||
return Buffer.from(chunks)
|
||||
}
|
||||
|
||||
exports.arrayEqual = function arrayEqual(a, b) {
|
||||
if ((a == null) && (b == null)) {
|
||||
return true
|
||||
}
|
||||
if ((a == null) || (b == null)) {
|
||||
return false
|
||||
}
|
||||
return (a.length === b.length) && a.every((elem, i) => elem === b[i])
|
||||
}
|
||||
|
||||
exports.bufferToBigInt = function bufferToBigInt(buf) {
|
||||
return BigInt(`0x${buf.toString('hex')}`)
|
||||
}
|
||||
|
||||
exports.cborValueToString = function cborValueToString(val, float_bytes = -1) {
|
||||
switch (typeof val) {
|
||||
case 'symbol': {
|
||||
switch (val) {
|
||||
case SYMS.NULL:
|
||||
return 'null'
|
||||
case SYMS.UNDEFINED:
|
||||
return 'undefined'
|
||||
case SYMS.BREAK:
|
||||
return 'BREAK'
|
||||
}
|
||||
// Impossible in node 10
|
||||
/* istanbul ignore if */
|
||||
if (val.description) {
|
||||
return val.description
|
||||
}
|
||||
// On node10, Symbol doesn't have description. Parse it out of the
|
||||
// toString value, which looks like `Symbol(foo)`.
|
||||
const s = val.toString()
|
||||
const m = s.match(/^Symbol\((?<name>.*)\)/)
|
||||
/* istanbul ignore if */
|
||||
if (m && m.groups.name) {
|
||||
// Impossible in node 12+
|
||||
/* istanbul ignore next */
|
||||
return m.groups.name
|
||||
}
|
||||
return 'Symbol'
|
||||
}
|
||||
case 'string':
|
||||
return JSON.stringify(val)
|
||||
case 'bigint':
|
||||
return val.toString()
|
||||
case 'number': {
|
||||
const s = Object.is(val, -0) ? '-0' : String(val)
|
||||
return (float_bytes > 0) ? `${s}_${float_bytes}` : s
|
||||
}
|
||||
case 'object': {
|
||||
// A null should be caught above
|
||||
const buf = exports.bufferishToBuffer(val)
|
||||
if (buf) {
|
||||
const hex = buf.toString('hex')
|
||||
return (float_bytes === -Infinity) ? hex : `h'${hex}'`
|
||||
}
|
||||
if (typeof val[Symbol.for('nodejs.util.inspect.custom')] === 'function') {
|
||||
return val[Symbol.for('nodejs.util.inspect.custom')]()
|
||||
}
|
||||
// Shouldn't get non-empty arrays here
|
||||
if (Array.isArray(val)) {
|
||||
return '[]'
|
||||
}
|
||||
// This should be all that is left
|
||||
return '{}'
|
||||
}
|
||||
}
|
||||
return String(val)
|
||||
}
|
||||
|
||||
exports.guessEncoding = function guessEncoding(input, encoding) {
|
||||
if (typeof input === 'string') {
|
||||
return new NoFilter(input, (encoding == null) ? 'hex' : encoding)
|
||||
}
|
||||
const buf = exports.bufferishToBuffer(input)
|
||||
if (buf) {
|
||||
return new NoFilter(buf)
|
||||
}
|
||||
if (isReadable(input)) {
|
||||
return input
|
||||
}
|
||||
throw new Error('Unknown input type')
|
||||
}
|
||||
|
||||
const B64URL_SWAPS = {
|
||||
'=': '',
|
||||
'+': '-',
|
||||
'/': '_',
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer|Uint8Array|Uint8ClampedArray|ArrayBuffer|DataView} buf
|
||||
* Buffer to convert.
|
||||
* @returns {string} Base64url string.
|
||||
* @private
|
||||
*/
|
||||
exports.base64url = function base64url(buf) {
|
||||
return exports.bufferishToBuffer(buf)
|
||||
.toString('base64')
|
||||
.replace(/[=+/]/g, c => B64URL_SWAPS[c])
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Buffer|Uint8Array|Uint8ClampedArray|ArrayBuffer|DataView} buf
|
||||
* Buffer to convert.
|
||||
* @returns {string} Base64 string.
|
||||
* @private
|
||||
*/
|
||||
exports.base64 = function base64(buf) {
|
||||
return exports.bufferishToBuffer(buf).toString('base64')
|
||||
}
|
||||
|
||||
exports.isBigEndian = function isBigEndian() {
|
||||
const array = new Uint8Array(4)
|
||||
const view = new Uint32Array(array.buffer)
|
||||
return !((view[0] = 1) & array[0])
|
||||
}
|
||||
64
node_modules/cbor/package.json
generated
vendored
Normal file
64
node_modules/cbor/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
{
|
||||
"name": "cbor",
|
||||
"version": "8.1.0",
|
||||
"description": "Encode and parse data in the Concise Binary Object Representation (CBOR) data format (RFC8949).",
|
||||
"main": "./lib/cbor.js",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "http://github.com/hildjj/node-cbor.git",
|
||||
"directory": "packages/cbor"
|
||||
},
|
||||
"homepage": "https://github.com/hildjj/node-cbor/tree/main/packages/cbor",
|
||||
"directories": {
|
||||
"lib": "lib"
|
||||
},
|
||||
"browser": {
|
||||
"fs": false
|
||||
},
|
||||
"scripts": {
|
||||
"clean": "rimraf coverage .nyc_output/ docs",
|
||||
"lint": "eslint lib/*.js test/*.js",
|
||||
"coverage": "nyc -r lcov npm test",
|
||||
"test": "ava test/*.ava.js",
|
||||
"release": "npm version patch && git push --follow-tags && npm publish",
|
||||
"predev": "npm run coverage",
|
||||
"dev": "light-server -q -s. -w 'lib/*.js,test/*.js # npm run coverage' -o /coverage/lcov-report/index.html",
|
||||
"types": "tsc"
|
||||
},
|
||||
"keywords": [
|
||||
"coap",
|
||||
"cbor",
|
||||
"json",
|
||||
"rfc7049",
|
||||
"rfc8949"
|
||||
],
|
||||
"author": {
|
||||
"name": "Joe Hildebrand",
|
||||
"email": "joe-github@cursive.net"
|
||||
},
|
||||
"contributors": [
|
||||
"Patrick Gansterer <paroga@paroga.com> (http://paroga.com/)",
|
||||
"Artyom Yagilev <github@scorpi.org> (http://scorpi.org/)",
|
||||
"Denis Lapaev <den@lapaev.me> (http://lapaev.me/)",
|
||||
"Ruben Bridgewater <ruben@bridgewater.de>",
|
||||
"Burt Harris <Burt_Harris_cbor@azxs.33mail.com>",
|
||||
"Jakub Arbet <hi@jakubarbet.me> (https://jakubarbet.me/)"
|
||||
],
|
||||
"types": "./types/lib/cbor.d.ts",
|
||||
"dependencies": {
|
||||
"nofilter": "^3.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "*",
|
||||
"bignumber.js": "^9.0.1",
|
||||
"garbage": "~0.0.0",
|
||||
"p-event": "^4.2.0",
|
||||
"rimraf": "^3.0.2"
|
||||
},
|
||||
"license": "MIT",
|
||||
"readmeFilename": "README.md",
|
||||
"engines": {
|
||||
"node": ">=12.19"
|
||||
},
|
||||
"gitHead": "0b4f6e3fc2c92bda222e0249ec70e59d4d7bca4a"
|
||||
}
|
||||
25
node_modules/cbor/types/lib/cbor.d.ts
generated
vendored
Normal file
25
node_modules/cbor/types/lib/cbor.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
export var Commented: typeof import("./commented");
|
||||
export var Diagnose: typeof import("./diagnose");
|
||||
export var Decoder: typeof import("./decoder");
|
||||
export var Encoder: typeof import("./encoder");
|
||||
export var Simple: typeof import("./simple");
|
||||
export var Tagged: typeof import("./tagged");
|
||||
export var Map: typeof import("./map");
|
||||
export namespace leveldb {
|
||||
const decode: typeof import("./decoder").decodeFirstSync;
|
||||
const encode: typeof import("./encoder").encode;
|
||||
const buffer: boolean;
|
||||
const name: string;
|
||||
}
|
||||
export function reset(): void;
|
||||
export var comment: typeof import("./commented").comment;
|
||||
export var decodeAll: typeof import("./decoder").decodeAll;
|
||||
export var decodeAllSync: typeof import("./decoder").decodeAllSync;
|
||||
export var decodeFirst: typeof import("./decoder").decodeFirst;
|
||||
export var decodeFirstSync: typeof import("./decoder").decodeFirstSync;
|
||||
export var decode: typeof import("./decoder").decodeFirstSync;
|
||||
export var diagnose: typeof import("./diagnose").diagnose;
|
||||
export var encode: typeof import("./encoder").encode;
|
||||
export var encodeCanonical: typeof import("./encoder").encodeCanonical;
|
||||
export var encodeOne: typeof import("./encoder").encodeOne;
|
||||
export var encodeAsync: typeof import("./encoder").encodeAsync;
|
||||
111
node_modules/cbor/types/lib/commented.d.ts
generated
vendored
Normal file
111
node_modules/cbor/types/lib/commented.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,111 @@
|
|||
/// <reference types="node" />
|
||||
export = Commented;
|
||||
/**
|
||||
* Generate the expanded format of RFC 8949, section 3.2.2.
|
||||
*
|
||||
* @extends stream.Transform
|
||||
*/
|
||||
declare class Commented extends stream.Transform {
|
||||
/**
|
||||
* Comment on an input Buffer or string, creating a string passed to the
|
||||
* callback. If callback not specified, a promise is returned.
|
||||
*
|
||||
* @static
|
||||
* @param {string|Buffer|ArrayBuffer|Uint8Array|Uint8ClampedArray
|
||||
* |DataView|stream.Readable} input Something to parse.
|
||||
* @param {CommentOptions|commentCallback|string|number} [options={}]
|
||||
* Encoding, max_depth, or callback.
|
||||
* @param {commentCallback} [cb] If specified, called on completion.
|
||||
* @returns {Promise} If cb not specified.
|
||||
* @throws {Error} Input required.
|
||||
*/
|
||||
static comment(input: string | Buffer | ArrayBuffer | Uint8Array | Uint8ClampedArray | DataView | stream.Readable, options?: CommentOptions | commentCallback | string | number, cb?: commentCallback): Promise<any>;
|
||||
/**
|
||||
* Create a CBOR commenter.
|
||||
*
|
||||
* @param {CommentOptions} [options={}] Stream options.
|
||||
*/
|
||||
constructor(options?: CommentOptions);
|
||||
depth: number;
|
||||
max_depth: number;
|
||||
all: NoFilter;
|
||||
parser: Decoder;
|
||||
/**
|
||||
* @param {Buffer} v Descend into embedded CBOR.
|
||||
* @private
|
||||
*/
|
||||
private _tag_24;
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_on_error(er: any): void;
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_on_read(buf: any): void;
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_on_more(mt: any, len: any, parent_mt: any, pos: any): void;
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_on_start_string(mt: any, len: any, parent_mt: any, pos: any): void;
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_on_start(mt: any, tag: any, parent_mt: any, pos: any): void;
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_on_stop(mt: any): void;
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
private _on_value;
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_on_data(): void;
|
||||
}
|
||||
declare namespace Commented {
|
||||
export { CommentOptions, commentCallback };
|
||||
}
|
||||
import stream = require("stream");
|
||||
import NoFilter = require("nofilter");
|
||||
import Decoder = require("./decoder");
|
||||
import { Buffer } from "buffer";
|
||||
type CommentOptions = {
|
||||
/**
|
||||
* How many times to indent
|
||||
* the dashes.
|
||||
*/
|
||||
max_depth?: number;
|
||||
/**
|
||||
* Initial indentation depth.
|
||||
*/
|
||||
depth?: number;
|
||||
/**
|
||||
* If true, omit the summary
|
||||
* of the full bytes read at the end.
|
||||
*/
|
||||
no_summary?: boolean;
|
||||
/**
|
||||
* Mapping from tag number to function(v),
|
||||
* where v is the decoded value that comes after the tag, and where the
|
||||
* function returns the correctly-created value for that tag.
|
||||
*/
|
||||
tags?: object;
|
||||
/**
|
||||
* If true, prefer Uint8Arrays to
|
||||
* be generated instead of node Buffers. This might turn on some more
|
||||
* changes in the future, so forward-compatibility is not guaranteed yet.
|
||||
*/
|
||||
preferWeb?: boolean;
|
||||
/**
|
||||
* Encoding to use for input, if it
|
||||
* is a string.
|
||||
*/
|
||||
encoding?: BufferEncoding;
|
||||
};
|
||||
type commentCallback = (error?: Error, commented?: string) => void;
|
||||
63
node_modules/cbor/types/lib/constants.d.ts
generated
vendored
Normal file
63
node_modules/cbor/types/lib/constants.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
export namespace MT {
|
||||
const POS_INT: number;
|
||||
const NEG_INT: number;
|
||||
const BYTE_STRING: number;
|
||||
const UTF8_STRING: number;
|
||||
const ARRAY: number;
|
||||
const MAP: number;
|
||||
const TAG: number;
|
||||
const SIMPLE_FLOAT: number;
|
||||
}
|
||||
export type MT = number;
|
||||
export namespace TAG {
|
||||
const DATE_STRING: number;
|
||||
const DATE_EPOCH: number;
|
||||
const POS_BIGINT: number;
|
||||
const NEG_BIGINT: number;
|
||||
const DECIMAL_FRAC: number;
|
||||
const BIGFLOAT: number;
|
||||
const BASE64URL_EXPECTED: number;
|
||||
const BASE64_EXPECTED: number;
|
||||
const BASE16_EXPECTED: number;
|
||||
const CBOR: number;
|
||||
const URI: number;
|
||||
const BASE64URL: number;
|
||||
const BASE64: number;
|
||||
const REGEXP: number;
|
||||
const MIME: number;
|
||||
const SET: number;
|
||||
}
|
||||
export type TAG = number;
|
||||
export namespace NUMBYTES {
|
||||
const ZERO: number;
|
||||
const ONE: number;
|
||||
const TWO: number;
|
||||
const FOUR: number;
|
||||
const EIGHT: number;
|
||||
const INDEFINITE: number;
|
||||
}
|
||||
export type NUMBYTES = number;
|
||||
export namespace SIMPLE {
|
||||
const FALSE: number;
|
||||
const TRUE: number;
|
||||
const NULL: number;
|
||||
const UNDEFINED: number;
|
||||
}
|
||||
export type SIMPLE = number;
|
||||
export namespace SYMS {
|
||||
const NULL_1: symbol;
|
||||
export { NULL_1 as NULL };
|
||||
const UNDEFINED_1: symbol;
|
||||
export { UNDEFINED_1 as UNDEFINED };
|
||||
export const PARENT: symbol;
|
||||
export const BREAK: symbol;
|
||||
export const STREAM: symbol;
|
||||
}
|
||||
export var SHIFT32: number;
|
||||
export namespace BI {
|
||||
const MINUS_ONE: bigint;
|
||||
const NEG_MAX: bigint;
|
||||
const MAXINT32: bigint;
|
||||
const MAXINT64: bigint;
|
||||
const SHIFT32: bigint;
|
||||
}
|
||||
198
node_modules/cbor/types/lib/decoder.d.ts
generated
vendored
Normal file
198
node_modules/cbor/types/lib/decoder.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,198 @@
|
|||
/// <reference types="node" />
|
||||
export = Decoder;
|
||||
/**
|
||||
* Decode a stream of CBOR bytes by transforming them into equivalent
|
||||
* JavaScript data. Because of the limitations of Node object streams,
|
||||
* special symbols are emitted instead of NULL or UNDEFINED. Fix those
|
||||
* up by calling {@link Decoder.nullcheck}.
|
||||
*
|
||||
* @extends BinaryParseStream
|
||||
*/
|
||||
declare class Decoder extends BinaryParseStream {
|
||||
/**
|
||||
* Check the given value for a symbol encoding a NULL or UNDEFINED value in
|
||||
* the CBOR stream.
|
||||
*
|
||||
* @static
|
||||
* @param {any} val The value to check.
|
||||
* @returns {any} The corrected value.
|
||||
* @throws {Error} Nothing was found.
|
||||
* @example
|
||||
* myDecoder.on('data', val => {
|
||||
* val = Decoder.nullcheck(val)
|
||||
* // ...
|
||||
* })
|
||||
*/
|
||||
static nullcheck(val: any): any;
|
||||
/**
|
||||
* Decode the first CBOR item in the input, synchronously. This will throw
|
||||
* an exception if the input is not valid CBOR, or if there are more bytes
|
||||
* left over at the end (if options.extendedResults is not true).
|
||||
*
|
||||
* @static
|
||||
* @param {BufferLike} input If a Readable stream, must have
|
||||
* received the `readable` event already, or you will get an error
|
||||
* claiming "Insufficient data".
|
||||
* @param {DecoderOptions|string} [options={}] Options or encoding for input.
|
||||
* @returns {ExtendedResults|any} The decoded value.
|
||||
* @throws {UnexpectedDataError} Data is left over after decoding.
|
||||
* @throws {Error} Insufficient data.
|
||||
*/
|
||||
static decodeFirstSync(input: BufferLike, options?: DecoderOptions | string): ExtendedResults | any;
|
||||
/**
|
||||
* Decode all of the CBOR items in the input into an array. This will throw
|
||||
* an exception if the input is not valid CBOR; a zero-length input will
|
||||
* return an empty array.
|
||||
*
|
||||
* @static
|
||||
* @param {BufferLike} input What to parse?
|
||||
* @param {DecoderOptions|string} [options={}] Options or encoding
|
||||
* for input.
|
||||
* @returns {Array<ExtendedResults>|Array<any>} Array of all found items.
|
||||
* @throws {TypeError} No input provided.
|
||||
* @throws {Error} Insufficient data provided.
|
||||
*/
|
||||
static decodeAllSync(input: BufferLike, options?: DecoderOptions | string): Array<ExtendedResults> | Array<any>;
|
||||
/**
|
||||
* Decode the first CBOR item in the input. This will error if there are
|
||||
* more bytes left over at the end (if options.extendedResults is not true),
|
||||
* and optionally if there were no valid CBOR bytes in the input. Emits the
|
||||
* {Decoder.NOT_FOUND} Symbol in the callback if no data was found and the
|
||||
* `required` option is false.
|
||||
*
|
||||
* @static
|
||||
* @param {BufferLike} input What to parse?
|
||||
* @param {DecoderOptions|decodeCallback|string} [options={}] Options, the
|
||||
* callback, or input encoding.
|
||||
* @param {decodeCallback} [cb] Callback.
|
||||
* @returns {Promise<ExtendedResults|any>} Returned even if callback is
|
||||
* specified.
|
||||
* @throws {TypeError} No input provided.
|
||||
*/
|
||||
static decodeFirst(input: BufferLike, options?: DecoderOptions | decodeCallback | string, cb?: decodeCallback): Promise<ExtendedResults | any>;
|
||||
/**
|
||||
* @callback decodeAllCallback
|
||||
* @param {Error} error If one was generated.
|
||||
* @param {Array<ExtendedResults>|Array<any>} value All of the decoded
|
||||
* values, wrapped in an Array.
|
||||
*/
|
||||
/**
|
||||
* Decode all of the CBOR items in the input. This will error if there are
|
||||
* more bytes left over at the end.
|
||||
*
|
||||
* @static
|
||||
* @param {BufferLike} input What to parse?
|
||||
* @param {DecoderOptions|decodeAllCallback|string} [options={}]
|
||||
* Decoding options, the callback, or the input encoding.
|
||||
* @param {decodeAllCallback} [cb] Callback.
|
||||
* @returns {Promise<Array<ExtendedResults>|Array<any>>} Even if callback
|
||||
* is specified.
|
||||
* @throws {TypeError} No input specified.
|
||||
*/
|
||||
static decodeAll(input: BufferLike, options?: string | DecoderOptions | ((error: Error, value: Array<ExtendedResults> | Array<any>) => any), cb?: (error: Error, value: Array<ExtendedResults> | Array<any>) => any): Promise<Array<ExtendedResults> | Array<any>>;
|
||||
/**
|
||||
* Create a parsing stream.
|
||||
*
|
||||
* @param {DecoderOptions} [options={}] Options.
|
||||
*/
|
||||
constructor(options?: DecoderOptions);
|
||||
running: boolean;
|
||||
max_depth: number;
|
||||
tags: {
|
||||
[x: string]: Tagged.TagFunction;
|
||||
};
|
||||
preferWeb: boolean;
|
||||
extendedResults: boolean;
|
||||
required: boolean;
|
||||
preventDuplicateKeys: boolean;
|
||||
valueBytes: NoFilter;
|
||||
/**
|
||||
* Stop processing.
|
||||
*/
|
||||
close(): void;
|
||||
/**
|
||||
* Only called if extendedResults is true.
|
||||
*
|
||||
* @ignore
|
||||
*/
|
||||
_onRead(data: any): void;
|
||||
}
|
||||
declare namespace Decoder {
|
||||
export { NOT_FOUND, BufferLike, ExtendedResults, DecoderOptions, decodeCallback };
|
||||
}
|
||||
import BinaryParseStream = require("../vendor/binary-parse-stream");
|
||||
import Tagged = require("./tagged");
|
||||
import NoFilter = require("nofilter");
|
||||
/**
|
||||
* Things that can act as inputs, from which a NoFilter can be created.
|
||||
*/
|
||||
type BufferLike = string | Buffer | ArrayBuffer | Uint8Array | Uint8ClampedArray | DataView | stream.Readable;
|
||||
type DecoderOptions = {
|
||||
/**
|
||||
* The maximum depth to parse.
|
||||
* Use -1 for "until you run out of memory". Set this to a finite
|
||||
* positive number for un-trusted inputs. Most standard inputs won't nest
|
||||
* more than 100 or so levels; I've tested into the millions before
|
||||
* running out of memory.
|
||||
*/
|
||||
max_depth?: number;
|
||||
/**
|
||||
* Mapping from tag number to function(v),
|
||||
* where v is the decoded value that comes after the tag, and where the
|
||||
* function returns the correctly-created value for that tag.
|
||||
*/
|
||||
tags?: Tagged.TagMap;
|
||||
/**
|
||||
* If true, prefer Uint8Arrays to
|
||||
* be generated instead of node Buffers. This might turn on some more
|
||||
* changes in the future, so forward-compatibility is not guaranteed yet.
|
||||
*/
|
||||
preferWeb?: boolean;
|
||||
/**
|
||||
* The encoding of the input.
|
||||
* Ignored if input is a Buffer.
|
||||
*/
|
||||
encoding?: BufferEncoding;
|
||||
/**
|
||||
* Should an error be thrown when no
|
||||
* data is in the input?
|
||||
*/
|
||||
required?: boolean;
|
||||
/**
|
||||
* If true, emit extended
|
||||
* results, which will be an object with shape {@link ExtendedResults }.
|
||||
* The value will already have been null-checked.
|
||||
*/
|
||||
extendedResults?: boolean;
|
||||
/**
|
||||
* If true, error is
|
||||
* thrown if a map has duplicate keys.
|
||||
*/
|
||||
preventDuplicateKeys?: boolean;
|
||||
};
|
||||
type ExtendedResults = {
|
||||
/**
|
||||
* The value that was found.
|
||||
*/
|
||||
value: any;
|
||||
/**
|
||||
* The number of bytes of the original input that
|
||||
* were read.
|
||||
*/
|
||||
length: number;
|
||||
/**
|
||||
* The bytes of the original input that were used
|
||||
* to produce the value.
|
||||
*/
|
||||
bytes: Buffer;
|
||||
/**
|
||||
* The bytes that were left over from the original
|
||||
* input. This property only exists if {@link Decoder.decodeFirst } or
|
||||
* {@link Decoder.decodeFirstSync } was called.
|
||||
*/
|
||||
unused?: Buffer;
|
||||
};
|
||||
type decodeCallback = (error?: Error, value?: any) => void;
|
||||
declare const NOT_FOUND: unique symbol;
|
||||
import { Buffer } from "buffer";
|
||||
import stream = require("stream");
|
||||
91
node_modules/cbor/types/lib/diagnose.d.ts
generated
vendored
Normal file
91
node_modules/cbor/types/lib/diagnose.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
export = Diagnose;
|
||||
/**
|
||||
* Output the diagnostic format from a stream of CBOR bytes.
|
||||
*
|
||||
* @extends stream.Transform
|
||||
*/
|
||||
declare class Diagnose extends stream.Transform {
|
||||
/**
|
||||
* Convenience function to return a string in diagnostic format.
|
||||
*
|
||||
* @param {BufferLike} input The CBOR bytes to format.
|
||||
* @param {DiagnoseOptions |diagnoseCallback|string} [options={}]
|
||||
* Options, the callback, or the input encoding.
|
||||
* @param {diagnoseCallback} [cb] Callback.
|
||||
* @throws {TypeError} Input not provided.
|
||||
* @returns {Promise} If callback not specified.
|
||||
*/
|
||||
static diagnose(input: BufferLike, options?: DiagnoseOptions | diagnoseCallback | string, cb?: diagnoseCallback): Promise<any>;
|
||||
/**
|
||||
* Creates an instance of Diagnose.
|
||||
*
|
||||
* @param {DiagnoseOptions} [options={}] Options for creation.
|
||||
*/
|
||||
constructor(options?: DiagnoseOptions);
|
||||
float_bytes: number;
|
||||
separator: string;
|
||||
stream_errors: boolean;
|
||||
parser: Decoder;
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
_on_error(er: any): void;
|
||||
/** @private */
|
||||
private _on_more;
|
||||
/** @private */
|
||||
private _fore;
|
||||
/** @private */
|
||||
private _on_value;
|
||||
/** @private */
|
||||
private _on_start;
|
||||
/** @private */
|
||||
private _on_stop;
|
||||
/** @private */
|
||||
private _on_data;
|
||||
}
|
||||
declare namespace Diagnose {
|
||||
export { BufferLike, DiagnoseOptions, diagnoseCallback };
|
||||
}
|
||||
import stream = require("stream");
|
||||
import Decoder = require("./decoder");
|
||||
/**
|
||||
* Things that can act as inputs, from which a NoFilter can be created.
|
||||
*/
|
||||
type BufferLike = string | Buffer | ArrayBuffer | Uint8Array | Uint8ClampedArray | DataView | stream.Readable;
|
||||
type DiagnoseOptions = {
|
||||
/**
|
||||
* Output between detected objects.
|
||||
*/
|
||||
separator?: string;
|
||||
/**
|
||||
* Put error info into the
|
||||
* output stream.
|
||||
*/
|
||||
stream_errors?: boolean;
|
||||
/**
|
||||
* The maximum depth to parse.
|
||||
* Use -1 for "until you run out of memory". Set this to a finite
|
||||
* positive number for un-trusted inputs. Most standard inputs won't nest
|
||||
* more than 100 or so levels; I've tested into the millions before
|
||||
* running out of memory.
|
||||
*/
|
||||
max_depth?: number;
|
||||
/**
|
||||
* Mapping from tag number to function(v),
|
||||
* where v is the decoded value that comes after the tag, and where the
|
||||
* function returns the correctly-created value for that tag.
|
||||
*/
|
||||
tags?: object;
|
||||
/**
|
||||
* If true, prefer Uint8Arrays to
|
||||
* be generated instead of node Buffers. This might turn on some more
|
||||
* changes in the future, so forward-compatibility is not guaranteed yet.
|
||||
*/
|
||||
preferWeb?: boolean;
|
||||
/**
|
||||
* The encoding of input, ignored if
|
||||
* input is not string.
|
||||
*/
|
||||
encoding?: BufferEncoding;
|
||||
};
|
||||
type diagnoseCallback = (error?: Error, value?: string) => void;
|
||||
463
node_modules/cbor/types/lib/encoder.d.ts
generated
vendored
Normal file
463
node_modules/cbor/types/lib/encoder.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,463 @@
|
|||
/// <reference types="node" />
|
||||
export = Encoder;
|
||||
/**
|
||||
* @typedef EncodingOptions
|
||||
* @property {any[]|object} [genTypes=[]] Array of pairs of
|
||||
* `type`, `function(Encoder)` for semantic types to be encoded. Not
|
||||
* needed for Array, Date, Buffer, Map, RegExp, Set, or URL.
|
||||
* If an object, the keys are the constructor names for the types.
|
||||
* @property {boolean} [canonical=false] Should the output be
|
||||
* canonicalized.
|
||||
* @property {boolean|WeakSet} [detectLoops=false] Should object loops
|
||||
* be detected? This will currently add memory to track every part of the
|
||||
* object being encoded in a WeakSet. Do not encode
|
||||
* the same object twice on the same encoder, without calling
|
||||
* `removeLoopDetectors` in between, which will clear the WeakSet.
|
||||
* You may pass in your own WeakSet to be used; this is useful in some
|
||||
* recursive scenarios.
|
||||
* @property {("number"|"float"|"int"|"string")} [dateType="number"] -
|
||||
* how should dates be encoded? "number" means float or int, if no
|
||||
* fractional seconds.
|
||||
* @property {any} [encodeUndefined=undefined] How should an
|
||||
* "undefined" in the input be encoded. By default, just encode a CBOR
|
||||
* undefined. If this is a buffer, use those bytes without re-encoding
|
||||
* them. If this is a function, the function will be called (which is a
|
||||
* good time to throw an exception, if that's what you want), and the
|
||||
* return value will be used according to these rules. Anything else will
|
||||
* be encoded as CBOR.
|
||||
* @property {boolean} [disallowUndefinedKeys=false] Should
|
||||
* "undefined" be disallowed as a key in a Map that is serialized? If
|
||||
* this is true, encode(new Map([[undefined, 1]])) will throw an
|
||||
* exception. Note that it is impossible to get a key of undefined in a
|
||||
* normal JS object.
|
||||
* @property {boolean} [collapseBigIntegers=false] Should integers
|
||||
* that come in as ECMAscript bigint's be encoded
|
||||
* as normal CBOR integers if they fit, discarding type information?
|
||||
* @property {number} [chunkSize=4096] Number of characters or bytes
|
||||
* for each chunk, if obj is a string or Buffer, when indefinite encoding.
|
||||
* @property {boolean} [omitUndefinedProperties=false] When encoding
|
||||
* objects or Maps, do not include a key if its corresponding value is
|
||||
* `undefined`.
|
||||
*/
|
||||
/**
|
||||
* Transform JavaScript values into CBOR bytes. The `Writable` side of
|
||||
* the stream is in object mode.
|
||||
*
|
||||
* @extends stream.Transform
|
||||
*/
|
||||
declare class Encoder extends stream.Transform {
|
||||
/**
|
||||
* Encode an array and all of its elements.
|
||||
*
|
||||
* @param {Encoder} gen Encoder to use.
|
||||
* @param {any[]} obj Array to encode.
|
||||
* @param {object} [opts] Options.
|
||||
* @param {boolean} [opts.indefinite=false] Use indefinite encoding?
|
||||
* @returns {boolean} True on success.
|
||||
*/
|
||||
static pushArray(gen: Encoder, obj: any[], opts?: {
|
||||
indefinite?: boolean;
|
||||
}): boolean;
|
||||
/**
|
||||
* @param {Encoder} gen Encoder.
|
||||
* @param {Date} obj Date to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
static _pushDate(gen: Encoder, obj: Date): boolean;
|
||||
/**
|
||||
* @param {Encoder} gen Encoder.
|
||||
* @param {Buffer} obj Buffer to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
static _pushBuffer(gen: Encoder, obj: Buffer): boolean;
|
||||
/**
|
||||
* @param {Encoder} gen Encoder.
|
||||
* @param {NoFilter} obj Buffer to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
static _pushNoFilter(gen: Encoder, obj: NoFilter): boolean;
|
||||
/**
|
||||
* @param {Encoder} gen Encoder.
|
||||
* @param {RegExp} obj RegExp to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
static _pushRegexp(gen: Encoder, obj: RegExp): boolean;
|
||||
/**
|
||||
* @param {Encoder} gen Encoder.
|
||||
* @param {Set} obj Set to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
static _pushSet(gen: Encoder, obj: Set<any>): boolean;
|
||||
/**
|
||||
* @param {Encoder} gen Encoder.
|
||||
* @param {URL} obj URL to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
static _pushURL(gen: Encoder, obj: URL): boolean;
|
||||
/**
|
||||
* @param {Encoder} gen Encoder.
|
||||
* @param {object} obj Boxed String, Number, or Boolean object to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
static _pushBoxed(gen: Encoder, obj: object): boolean;
|
||||
/**
|
||||
* @param {Encoder} gen Encoder.
|
||||
* @param {Map} obj Map to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @throws {Error} Map key that is undefined.
|
||||
* @ignore
|
||||
*/
|
||||
static _pushMap(gen: Encoder, obj: Map<any, any>, opts: any): boolean;
|
||||
/**
|
||||
* @param {Encoder} gen Encoder.
|
||||
* @param {NodeJS.TypedArray} obj Array to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
static _pushTypedArray(gen: Encoder, obj: NodeJS.TypedArray): boolean;
|
||||
/**
|
||||
* @param {Encoder} gen Encoder.
|
||||
* @param { ArrayBuffer } obj Array to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
static _pushArrayBuffer(gen: Encoder, obj: ArrayBuffer): boolean;
|
||||
/**
|
||||
* Encode the given object with indefinite length. There are apparently
|
||||
* some (IMO) broken implementations of poorly-specified protocols that
|
||||
* REQUIRE indefinite-encoding. See the example for how to add this as an
|
||||
* `encodeCBOR` function to an object or class to get indefinite encoding.
|
||||
*
|
||||
* @param {Encoder} gen The encoder to use.
|
||||
* @param {string|Buffer|Array|Map|object} [obj] The object to encode. If
|
||||
* null, use "this" instead.
|
||||
* @param {EncodingOptions} [options={}] Options for encoding.
|
||||
* @returns {boolean} True on success.
|
||||
* @throws {Error} No object to encode or invalid indefinite encoding.
|
||||
* @example <caption>Force indefinite encoding:</caption>
|
||||
* const o = {
|
||||
* a: true,
|
||||
* encodeCBOR: cbor.Encoder.encodeIndefinite,
|
||||
* }
|
||||
* const m = []
|
||||
* m.encodeCBOR = cbor.Encoder.encodeIndefinite
|
||||
* cbor.encodeOne([o, m])
|
||||
*/
|
||||
static encodeIndefinite(gen: Encoder, obj?: string | Buffer | any[] | Map<any, any> | object, options?: EncodingOptions): boolean;
|
||||
/**
|
||||
* Encode one or more JavaScript objects, and return a Buffer containing the
|
||||
* CBOR bytes.
|
||||
*
|
||||
* @param {...any} objs The objects to encode.
|
||||
* @returns {Buffer} The encoded objects.
|
||||
*/
|
||||
static encode(...objs: any[]): Buffer;
|
||||
/**
|
||||
* Encode one or more JavaScript objects canonically (slower!), and return
|
||||
* a Buffer containing the CBOR bytes.
|
||||
*
|
||||
* @param {...any} objs The objects to encode.
|
||||
* @returns {Buffer} The encoded objects.
|
||||
*/
|
||||
static encodeCanonical(...objs: any[]): Buffer;
|
||||
/**
|
||||
* Encode one JavaScript object using the given options.
|
||||
*
|
||||
* @static
|
||||
* @param {any} obj The object to encode.
|
||||
* @param {EncodingOptions} [options={}] Passed to the Encoder constructor.
|
||||
* @returns {Buffer} The encoded objects.
|
||||
*/
|
||||
static encodeOne(obj: any, options?: EncodingOptions): Buffer;
|
||||
/**
|
||||
* Encode one JavaScript object using the given options in a way that
|
||||
* is more resilient to objects being larger than the highWaterMark
|
||||
* number of bytes. As with the other static encode functions, this
|
||||
* will still use a large amount of memory. Use a stream-based approach
|
||||
* directly if you need to process large and complicated inputs.
|
||||
*
|
||||
* @param {any} obj The object to encode.
|
||||
* @param {EncodingOptions} [options={}] Passed to the Encoder constructor.
|
||||
* @returns {Promise<Buffer>} A promise for the encoded buffer.
|
||||
*/
|
||||
static encodeAsync(obj: any, options?: EncodingOptions): Promise<Buffer>;
|
||||
static set SEMANTIC_TYPES(arg: {
|
||||
[x: string]: EncodeFunction;
|
||||
});
|
||||
/**
|
||||
* The currently supported set of semantic types. May be modified by plugins.
|
||||
*
|
||||
* @type {SemanticMap}
|
||||
*/
|
||||
static get SEMANTIC_TYPES(): {
|
||||
[x: string]: EncodeFunction;
|
||||
};
|
||||
/**
|
||||
* Reset the supported semantic types to the original set, before any
|
||||
* plugins modified the list.
|
||||
*/
|
||||
static reset(): void;
|
||||
/**
|
||||
* Creates an instance of Encoder.
|
||||
*
|
||||
* @param {EncodingOptions} [options={}] Options for the encoder.
|
||||
*/
|
||||
constructor(options?: EncodingOptions);
|
||||
canonical: boolean;
|
||||
encodeUndefined: any;
|
||||
disallowUndefinedKeys: boolean;
|
||||
dateType: "string" | "number" | "float" | "int";
|
||||
collapseBigIntegers: boolean;
|
||||
/** @type {WeakSet?} */
|
||||
detectLoops: WeakSet<any> | null;
|
||||
omitUndefinedProperties: boolean;
|
||||
semanticTypes: {
|
||||
[x: string]: EncodeFunction;
|
||||
};
|
||||
/**
|
||||
* @param {number} val Number(0-255) to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
_pushUInt8(val: number): boolean;
|
||||
/**
|
||||
* @param {number} val Number(0-65535) to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
_pushUInt16BE(val: number): boolean;
|
||||
/**
|
||||
* @param {number} val Number(0..2**32-1) to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
_pushUInt32BE(val: number): boolean;
|
||||
/**
|
||||
* @param {number} val Number to encode as 4-byte float.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
_pushFloatBE(val: number): boolean;
|
||||
/**
|
||||
* @param {number} val Number to encode as 8-byte double.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
_pushDoubleBE(val: number): boolean;
|
||||
/**
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
_pushNaN(): boolean;
|
||||
/**
|
||||
* @param {number} obj Positive or negative infinity.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
_pushInfinity(obj: number): boolean;
|
||||
/**
|
||||
* Choose the best float representation for a number and encode it.
|
||||
*
|
||||
* @param {number} obj A number that is known to be not-integer, but not
|
||||
* how many bytes of precision it needs.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
_pushFloat(obj: number): boolean;
|
||||
/**
|
||||
* Choose the best integer representation for a postive number and encode
|
||||
* it. If the number is over MAX_SAFE_INTEGER, fall back on float (but I
|
||||
* don't remember why).
|
||||
*
|
||||
* @param {number} obj A positive number that is known to be an integer,
|
||||
* but not how many bytes of precision it needs.
|
||||
* @param {number} mt The Major Type number to combine with the integer.
|
||||
* Not yet shifted.
|
||||
* @param {number} [orig] The number before it was transformed to positive.
|
||||
* If the mt is NEG_INT, and the positive number is over MAX_SAFE_INT,
|
||||
* then we'll encode this as a float rather than making the number
|
||||
* negative again and losing precision.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
_pushInt(obj: number, mt: number, orig?: number): boolean;
|
||||
/**
|
||||
* Choose the best integer representation for a number and encode it.
|
||||
*
|
||||
* @param {number} obj A number that is known to be an integer,
|
||||
* but not how many bytes of precision it needs.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
_pushIntNum(obj: number): boolean;
|
||||
/**
|
||||
* @param {number} obj Plain JS number to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
_pushNumber(obj: number): boolean;
|
||||
/**
|
||||
* @param {string} obj String to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
_pushString(obj: string): boolean;
|
||||
/**
|
||||
* @param {boolean} obj Bool to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
_pushBoolean(obj: boolean): boolean;
|
||||
/**
|
||||
* @param {undefined} obj Ignored.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
_pushUndefined(obj: undefined): boolean;
|
||||
/**
|
||||
* @param {null} obj Ignored.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
_pushNull(obj: null): boolean;
|
||||
/**
|
||||
* @param {number} tag Tag number to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
_pushTag(tag: number): boolean;
|
||||
/**
|
||||
* @param {bigint} obj BigInt to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @ignore
|
||||
*/
|
||||
_pushJSBigint(obj: bigint): boolean;
|
||||
/**
|
||||
* @param {object} obj Object to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @throws {Error} Loop detected.
|
||||
* @ignore
|
||||
*/
|
||||
_pushObject(obj: object, opts: any): boolean;
|
||||
/**
|
||||
* @param {any[]} objs Array of supported things.
|
||||
* @returns {Buffer} Concatenation of encodings for the supported things.
|
||||
* @ignore
|
||||
*/
|
||||
_encodeAll(objs: any[]): Buffer;
|
||||
/**
|
||||
* Add an encoding function to the list of supported semantic types. This
|
||||
* is useful for objects for which you can't add an encodeCBOR method.
|
||||
*
|
||||
* @param {string|Function} type The type to encode.
|
||||
* @param {EncodeFunction} fun The encoder to use.
|
||||
* @returns {EncodeFunction?} The previous encoder or undefined if there
|
||||
* wasn't one.
|
||||
* @throws {TypeError} Invalid function.
|
||||
*/
|
||||
addSemanticType(type: string | Function, fun: EncodeFunction): EncodeFunction | null;
|
||||
/**
|
||||
* Push any supported type onto the encoded stream.
|
||||
*
|
||||
* @param {any} obj The thing to encode.
|
||||
* @returns {boolean} True on success.
|
||||
* @throws {TypeError} Unknown type for obj.
|
||||
*/
|
||||
pushAny(obj: any): boolean;
|
||||
/**
|
||||
* Remove the loop detector WeakSet for this Encoder.
|
||||
*
|
||||
* @returns {boolean} True when the Encoder was reset, else false.
|
||||
*/
|
||||
removeLoopDetectors(): boolean;
|
||||
}
|
||||
declare namespace Encoder {
|
||||
export { EncodeFunction, SemanticMap, EncodingOptions };
|
||||
}
|
||||
import stream = require("stream");
|
||||
/**
|
||||
* Generate the CBOR for a value. If you are using this, you'll either need
|
||||
* to call {@link Encoder.write } with a Buffer, or look into the internals of
|
||||
* Encoder to reuse existing non-documented behavior.
|
||||
*/
|
||||
type EncodeFunction = (enc: Encoder, val: any) => boolean;
|
||||
import { Buffer } from "buffer";
|
||||
import NoFilter = require("nofilter");
|
||||
type EncodingOptions = {
|
||||
/**
|
||||
* Array of pairs of
|
||||
* `type`, `function(Encoder)` for semantic types to be encoded. Not
|
||||
* needed for Array, Date, Buffer, Map, RegExp, Set, or URL.
|
||||
* If an object, the keys are the constructor names for the types.
|
||||
*/
|
||||
genTypes?: any[] | object;
|
||||
/**
|
||||
* Should the output be
|
||||
* canonicalized.
|
||||
*/
|
||||
canonical?: boolean;
|
||||
/**
|
||||
* Should object loops
|
||||
* be detected? This will currently add memory to track every part of the
|
||||
* object being encoded in a WeakSet. Do not encode
|
||||
* the same object twice on the same encoder, without calling
|
||||
* `removeLoopDetectors` in between, which will clear the WeakSet.
|
||||
* You may pass in your own WeakSet to be used; this is useful in some
|
||||
* recursive scenarios.
|
||||
*/
|
||||
detectLoops?: boolean | WeakSet<any>;
|
||||
/**
|
||||
* -
|
||||
* how should dates be encoded? "number" means float or int, if no
|
||||
* fractional seconds.
|
||||
*/
|
||||
dateType?: ("number" | "float" | "int" | "string");
|
||||
/**
|
||||
* How should an
|
||||
* "undefined" in the input be encoded. By default, just encode a CBOR
|
||||
* undefined. If this is a buffer, use those bytes without re-encoding
|
||||
* them. If this is a function, the function will be called (which is a
|
||||
* good time to throw an exception, if that's what you want), and the
|
||||
* return value will be used according to these rules. Anything else will
|
||||
* be encoded as CBOR.
|
||||
*/
|
||||
encodeUndefined?: any;
|
||||
/**
|
||||
* Should
|
||||
* "undefined" be disallowed as a key in a Map that is serialized? If
|
||||
* this is true, encode(new Map([[undefined, 1]])) will throw an
|
||||
* exception. Note that it is impossible to get a key of undefined in a
|
||||
* normal JS object.
|
||||
*/
|
||||
disallowUndefinedKeys?: boolean;
|
||||
/**
|
||||
* Should integers
|
||||
* that come in as ECMAscript bigint's be encoded
|
||||
* as normal CBOR integers if they fit, discarding type information?
|
||||
*/
|
||||
collapseBigIntegers?: boolean;
|
||||
/**
|
||||
* Number of characters or bytes
|
||||
* for each chunk, if obj is a string or Buffer, when indefinite encoding.
|
||||
*/
|
||||
chunkSize?: number;
|
||||
/**
|
||||
* When encoding
|
||||
* objects or Maps, do not include a key if its corresponding value is
|
||||
* `undefined`.
|
||||
*/
|
||||
omitUndefinedProperties?: boolean;
|
||||
};
|
||||
/**
|
||||
* A mapping from tag number to a tag decoding function.
|
||||
*/
|
||||
type SemanticMap = {
|
||||
[x: string]: EncodeFunction;
|
||||
};
|
||||
38
node_modules/cbor/types/lib/map.d.ts
generated
vendored
Normal file
38
node_modules/cbor/types/lib/map.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
export = CborMap;
|
||||
/**
|
||||
* Wrapper around a JavaScript Map object that allows the keys to be
|
||||
* any complex type. The base Map object allows this, but will only
|
||||
* compare the keys by identity, not by value. CborMap translates keys
|
||||
* to CBOR first (and base64's them to ensure by-value comparison).
|
||||
*
|
||||
* This is not a subclass of Object, because it would be tough to get
|
||||
* the semantics to be an exact match.
|
||||
*
|
||||
* @extends Map
|
||||
*/
|
||||
declare class CborMap extends Map<any, any> {
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
static _encode(key: any): string;
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
static _decode(key: any): any;
|
||||
/**
|
||||
* Creates an instance of CborMap.
|
||||
*
|
||||
* @param {Iterable<any>} [iterable] An Array or other iterable
|
||||
* object whose elements are key-value pairs (arrays with two elements, e.g.
|
||||
* <code>[[ 1, 'one' ],[ 2, 'two' ]]</code>). Each key-value pair is added
|
||||
* to the new CborMap; null values are treated as undefined.
|
||||
*/
|
||||
constructor(iterable?: Iterable<any>);
|
||||
/**
|
||||
* Push the simple value onto the CBOR stream.
|
||||
*
|
||||
* @param {object} gen The generator to push onto.
|
||||
* @returns {boolean} True on success.
|
||||
*/
|
||||
encodeCBOR(gen: object): boolean;
|
||||
}
|
||||
47
node_modules/cbor/types/lib/simple.d.ts
generated
vendored
Normal file
47
node_modules/cbor/types/lib/simple.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
export = Simple;
|
||||
/**
|
||||
* A CBOR Simple Value that does not map onto a known constant.
|
||||
*/
|
||||
declare class Simple {
|
||||
/**
|
||||
* Is the given object a Simple?
|
||||
*
|
||||
* @param {any} obj Object to test.
|
||||
* @returns {boolean} Is it Simple?
|
||||
*/
|
||||
static isSimple(obj: any): boolean;
|
||||
/**
|
||||
* Decode from the CBOR additional information into a JavaScript value.
|
||||
* If the CBOR item has no parent, return a "safe" symbol instead of
|
||||
* `null` or `undefined`, so that the value can be passed through a
|
||||
* stream in object mode.
|
||||
*
|
||||
* @param {number} val The CBOR additional info to convert.
|
||||
* @param {boolean} [has_parent=true] Does the CBOR item have a parent?
|
||||
* @param {boolean} [parent_indefinite=false] Is the parent element
|
||||
* indefinitely encoded?
|
||||
* @returns {(null|undefined|boolean|symbol|Simple)} The decoded value.
|
||||
* @throws {Error} Invalid BREAK.
|
||||
*/
|
||||
static decode(val: number, has_parent?: boolean, parent_indefinite?: boolean): (null | undefined | boolean | symbol | Simple);
|
||||
/**
|
||||
* Creates an instance of Simple.
|
||||
*
|
||||
* @param {number} value The simple value's integer value.
|
||||
*/
|
||||
constructor(value: number);
|
||||
value: number;
|
||||
/**
|
||||
* Debug string for simple value.
|
||||
*
|
||||
* @returns {string} Formated string of `simple(value)`.
|
||||
*/
|
||||
toString(): string;
|
||||
/**
|
||||
* Push the simple value onto the CBOR stream.
|
||||
*
|
||||
* @param {object} gen The generator to push onto.
|
||||
* @returns {boolean} True on success.
|
||||
*/
|
||||
encodeCBOR(gen: object): boolean;
|
||||
}
|
||||
78
node_modules/cbor/types/lib/tagged.d.ts
generated
vendored
Normal file
78
node_modules/cbor/types/lib/tagged.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
export = Tagged;
|
||||
/**
|
||||
* A CBOR tagged item, where the tag does not have semantics specified at the
|
||||
* moment, or those semantics threw an error during parsing. Typically this will
|
||||
* be an extension point you're not yet expecting.
|
||||
*/
|
||||
declare class Tagged {
|
||||
static set TAGS(arg: {
|
||||
[x: string]: TagFunction;
|
||||
});
|
||||
/**
|
||||
* The current set of supported tags. May be modified by plugins.
|
||||
*
|
||||
* @type {TagMap}
|
||||
* @static
|
||||
*/
|
||||
static get TAGS(): {
|
||||
[x: string]: TagFunction;
|
||||
};
|
||||
/**
|
||||
* Reset the supported tags to the original set, before any plugins modified
|
||||
* the list.
|
||||
*/
|
||||
static reset(): void;
|
||||
/**
|
||||
* Creates an instance of Tagged.
|
||||
*
|
||||
* @param {number} tag The number of the tag.
|
||||
* @param {any} value The value inside the tag.
|
||||
* @param {Error} [err] The error that was thrown parsing the tag, or null.
|
||||
*/
|
||||
constructor(tag: number, value: any, err?: Error);
|
||||
tag: number;
|
||||
value: any;
|
||||
err: Error;
|
||||
toJSON(): any;
|
||||
/**
|
||||
* Convert to a String.
|
||||
*
|
||||
* @returns {string} String of the form '1(2)'.
|
||||
*/
|
||||
toString(): string;
|
||||
/**
|
||||
* Push the simple value onto the CBOR stream.
|
||||
*
|
||||
* @param {object} gen The generator to push onto.
|
||||
* @returns {boolean} True on success.
|
||||
*/
|
||||
encodeCBOR(gen: object): boolean;
|
||||
/**
|
||||
* If we have a converter for this type, do the conversion. Some converters
|
||||
* are built-in. Additional ones can be passed in. If you want to remove
|
||||
* a built-in converter, pass a converter in whose value is 'null' instead
|
||||
* of a function.
|
||||
*
|
||||
* @param {object} converters Keys in the object are a tag number, the value
|
||||
* is a function that takes the decoded CBOR and returns a JavaScript value
|
||||
* of the appropriate type. Throw an exception in the function on errors.
|
||||
* @returns {any} The converted item.
|
||||
*/
|
||||
convert(converters: object): any;
|
||||
}
|
||||
declare namespace Tagged {
|
||||
export { INTERNAL_JSON, TagFunction, TagMap };
|
||||
}
|
||||
/**
|
||||
* Convert a tagged value to a more interesting JavaScript type. Errors
|
||||
* thrown in this function will be captured into the "err" property of the
|
||||
* original Tagged instance.
|
||||
*/
|
||||
type TagFunction = (value: any, tag: Tagged) => any;
|
||||
declare const INTERNAL_JSON: unique symbol;
|
||||
/**
|
||||
* A mapping from tag number to a tag decoding function.
|
||||
*/
|
||||
type TagMap = {
|
||||
[x: string]: TagFunction;
|
||||
};
|
||||
21
node_modules/cbor/types/lib/utils.d.ts
generated
vendored
Normal file
21
node_modules/cbor/types/lib/utils.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
/// <reference types="node" />
|
||||
export function utf8(buf: any): string;
|
||||
export namespace utf8 {
|
||||
const checksUTF8: boolean;
|
||||
}
|
||||
export function isBufferish(b: any): boolean;
|
||||
export function bufferishToBuffer(b: any): Buffer;
|
||||
export function parseCBORint(ai: any, buf: any): any;
|
||||
export function writeHalf(buf: any, half: any): boolean;
|
||||
export function parseHalf(buf: any): number;
|
||||
export function parseCBORfloat(buf: any): any;
|
||||
export function hex(s: any): Buffer;
|
||||
export function bin(s: any): Buffer;
|
||||
export function arrayEqual(a: any, b: any): any;
|
||||
export function bufferToBigInt(buf: any): bigint;
|
||||
export function cborValueToString(val: any, float_bytes?: number): any;
|
||||
export function guessEncoding(input: any, encoding: any): any;
|
||||
export function base64url(buf: Buffer | Uint8Array | Uint8ClampedArray | ArrayBuffer | DataView): string;
|
||||
export function base64(buf: Buffer | Uint8Array | Uint8ClampedArray | ArrayBuffer | DataView): string;
|
||||
export function isBigEndian(): boolean;
|
||||
import { Buffer } from "buffer";
|
||||
34
node_modules/cbor/types/vendor/binary-parse-stream/index.d.ts
generated
vendored
Normal file
34
node_modules/cbor/types/vendor/binary-parse-stream/index.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
export = BinaryParseStream;
|
||||
/**
|
||||
* BinaryParseStream is a TransformStream that consumes buffers and outputs
|
||||
* objects on the other end. It expects your subclass to implement a `_parse`
|
||||
* method that is a generator. When your generator yields a number, it'll be
|
||||
* fed a buffer of that length from the input. When your generator returns,
|
||||
* the return value will be pushed to the output side.
|
||||
*
|
||||
* @extends stream.Transform
|
||||
*/
|
||||
declare class BinaryParseStream extends stream.Transform {
|
||||
/**
|
||||
* Creates an instance of BinaryParseStream.
|
||||
*
|
||||
* @param {stream.TransformOptions} options Stream options.
|
||||
* @memberof BinaryParseStream
|
||||
*/
|
||||
constructor(options: stream.TransformOptions);
|
||||
bs: NoFilter;
|
||||
__fresh: boolean;
|
||||
__needed: number;
|
||||
/**
|
||||
* Subclasses must override this to set their parsing behavior. Yield a
|
||||
* number to receive a Buffer of that many bytes.
|
||||
*
|
||||
* @abstract
|
||||
* @returns {Generator<number, undefined, Buffer>}
|
||||
*/
|
||||
_parse(): Generator<number, undefined, Buffer>;
|
||||
__restart(): void;
|
||||
__parser: Generator<number, undefined, Buffer>;
|
||||
}
|
||||
import stream = require("stream");
|
||||
import NoFilter = require("nofilter");
|
||||
49
node_modules/cbor/vendor/binary-parse-stream/README.md
generated
vendored
Normal file
49
node_modules/cbor/vendor/binary-parse-stream/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
# binary-parse-stream
|
||||
|
||||
Painless streaming binary protocol parsers using generators.
|
||||
|
||||
## Installation
|
||||
|
||||
npm install binary-parse-stream
|
||||
|
||||
## Synchronous
|
||||
|
||||
This module uses the exact same generator interface as [binary-parse-stream](https://github.com/nathan7/binary-parse-stream), which presents a synchronous interface to a generator parser.
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const BinaryParseStream = require('binary-parse-stream')
|
||||
const {One} = BinaryParseStream // -1
|
||||
```
|
||||
|
||||
BinaryParseStream is a TransformStream that consumes buffers and outputs objects on the other end.
|
||||
It expects your subclass to implement a `_parse` method that is a generator.
|
||||
When your generator yields a number, it'll be fed a buffer of that length from the input.
|
||||
If it yields -1, it'll be given the value of the first byte instead of a single-byte buffer.
|
||||
When your generator returns, the return value will be pushed to the output side.
|
||||
|
||||
## Example
|
||||
|
||||
The following module parses a protocol that consists of a 32-bit unsigned big-endian type parameter, an unsigned 8-bit length parameter, and a buffer of the specified length.
|
||||
It outputs `{type, buf}` objects.
|
||||
|
||||
```js
|
||||
class SillyProtocolParseStream extends BinaryParseStream {
|
||||
constructor(options) {
|
||||
super(options)
|
||||
this.count = 0
|
||||
}
|
||||
|
||||
*_parse() {
|
||||
const type = (yield 4).readUInt32BE(0, true)
|
||||
const length = yield -1
|
||||
const buf = yield length
|
||||
this.count++
|
||||
return {type, buf}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
There is also a shorter syntax for when you don't want to explicitly subclass: `BinaryParseStream.extend(function*())`.
|
||||
|
||||
96
node_modules/cbor/vendor/binary-parse-stream/index.js
generated
vendored
Normal file
96
node_modules/cbor/vendor/binary-parse-stream/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
// Tweaked version of nathan7's binary-parse-stream
|
||||
// (see https://github.com/nathan7/binary-parse-stream)
|
||||
// Uses NoFilter instead of the readable in the original. Removes
|
||||
// the ability to read -1, which was odd and un-needed.
|
||||
// License for binary-parse-stream: MIT
|
||||
|
||||
// binary-parse-stream is now unmaintained, so I have rewritten it as
|
||||
// more modern JS so I can get tsc to help check types.
|
||||
|
||||
'use strict'
|
||||
const stream = require('stream')
|
||||
const NoFilter = require('nofilter')
|
||||
|
||||
/**
|
||||
* BinaryParseStream is a TransformStream that consumes buffers and outputs
|
||||
* objects on the other end. It expects your subclass to implement a `_parse`
|
||||
* method that is a generator. When your generator yields a number, it'll be
|
||||
* fed a buffer of that length from the input. When your generator returns,
|
||||
* the return value will be pushed to the output side.
|
||||
*
|
||||
* @extends stream.Transform
|
||||
*/
|
||||
class BinaryParseStream extends stream.Transform {
|
||||
/**
|
||||
* Creates an instance of BinaryParseStream.
|
||||
*
|
||||
* @param {stream.TransformOptions} options Stream options.
|
||||
* @memberof BinaryParseStream
|
||||
*/
|
||||
constructor(options) {
|
||||
super(options)
|
||||
// Doesn't work to pass these in as opts, for some reason
|
||||
// also, work around typescript not knowing TransformStream internals
|
||||
// eslint-disable-next-line dot-notation
|
||||
this['_writableState'].objectMode = false
|
||||
// eslint-disable-next-line dot-notation
|
||||
this['_readableState'].objectMode = true
|
||||
|
||||
this.bs = new NoFilter()
|
||||
this.__restart()
|
||||
}
|
||||
|
||||
_transform(fresh, encoding, cb) {
|
||||
this.bs.write(fresh)
|
||||
|
||||
while (this.bs.length >= this.__needed) {
|
||||
let ret = null
|
||||
const chunk = (this.__needed === null) ?
|
||||
undefined :
|
||||
this.bs.read(this.__needed)
|
||||
|
||||
try {
|
||||
ret = this.__parser.next(chunk)
|
||||
} catch (e) {
|
||||
return cb(e)
|
||||
}
|
||||
|
||||
if (this.__needed) {
|
||||
this.__fresh = false
|
||||
}
|
||||
|
||||
if (ret.done) {
|
||||
this.push(ret.value)
|
||||
this.__restart()
|
||||
} else {
|
||||
this.__needed = ret.value || Infinity
|
||||
}
|
||||
}
|
||||
|
||||
return cb()
|
||||
}
|
||||
|
||||
/**
|
||||
* Subclasses must override this to set their parsing behavior. Yield a
|
||||
* number to receive a Buffer of that many bytes.
|
||||
*
|
||||
* @abstract
|
||||
* @returns {Generator<number, undefined, Buffer>}
|
||||
*/
|
||||
/* istanbul ignore next */
|
||||
*_parse() { // eslint-disable-line class-methods-use-this, require-yield
|
||||
throw new Error('Must be implemented in subclass')
|
||||
}
|
||||
|
||||
__restart() {
|
||||
this.__needed = null
|
||||
this.__parser = this._parse()
|
||||
this.__fresh = true
|
||||
}
|
||||
|
||||
_flush(cb) {
|
||||
cb(this.__fresh ? null : new Error('unexpected end of input'))
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = BinaryParseStream
|
||||
Loading…
Add table
Add a link
Reference in a new issue