Bump artifact dependencies if CODEQL_ACTION_ARTIFACT_V2_UPGRADE enabled (#2482)
Co-authored-by: Andrew Eisenberg <aeisenberg@github.com> Co-authored-by: Henry Mercer <henrymercer@github.com>
This commit is contained in:
parent
cf5b0a9041
commit
a196a714b8
5388 changed files with 2176737 additions and 71701 deletions
22
node_modules/archiver-utils/LICENSE
generated
vendored
Normal file
22
node_modules/archiver-utils/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
Copyright (c) 2015 Chris Talkington.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
6
node_modules/archiver-utils/README.md
generated
vendored
Normal file
6
node_modules/archiver-utils/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
# Archiver Utils
|
||||
|
||||
## Things of Interest
|
||||
- [Changelog](https://github.com/archiverjs/archiver-utils/releases)
|
||||
- [Contributing](https://github.com/archiverjs/archiver-utils/blob/master/CONTRIBUTING.md)
|
||||
- [MIT License](https://github.com/archiverjs/archiver-utils/blob/master/LICENSE)
|
||||
209
node_modules/archiver-utils/file.js
generated
vendored
Normal file
209
node_modules/archiver-utils/file.js
generated
vendored
Normal file
|
|
@ -0,0 +1,209 @@
|
|||
/**
|
||||
* archiver-utils
|
||||
*
|
||||
* Copyright (c) 2012-2014 Chris Talkington, contributors.
|
||||
* Licensed under the MIT license.
|
||||
* https://github.com/archiverjs/node-archiver/blob/master/LICENSE-MIT
|
||||
*/
|
||||
var fs = require('graceful-fs');
|
||||
var path = require('path');
|
||||
|
||||
var flatten = require('lodash/flatten');
|
||||
var difference = require('lodash/difference');
|
||||
var union = require('lodash/union');
|
||||
var isPlainObject = require('lodash/isPlainObject');
|
||||
|
||||
var glob = require('glob');
|
||||
|
||||
var file = module.exports = {};
|
||||
|
||||
var pathSeparatorRe = /[\/\\]/g;
|
||||
|
||||
// Process specified wildcard glob patterns or filenames against a
|
||||
// callback, excluding and uniquing files in the result set.
|
||||
var processPatterns = function(patterns, fn) {
|
||||
// Filepaths to return.
|
||||
var result = [];
|
||||
// Iterate over flattened patterns array.
|
||||
flatten(patterns).forEach(function(pattern) {
|
||||
// If the first character is ! it should be omitted
|
||||
var exclusion = pattern.indexOf('!') === 0;
|
||||
// If the pattern is an exclusion, remove the !
|
||||
if (exclusion) { pattern = pattern.slice(1); }
|
||||
// Find all matching files for this pattern.
|
||||
var matches = fn(pattern);
|
||||
if (exclusion) {
|
||||
// If an exclusion, remove matching files.
|
||||
result = difference(result, matches);
|
||||
} else {
|
||||
// Otherwise add matching files.
|
||||
result = union(result, matches);
|
||||
}
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
// True if the file path exists.
|
||||
file.exists = function() {
|
||||
var filepath = path.join.apply(path, arguments);
|
||||
return fs.existsSync(filepath);
|
||||
};
|
||||
|
||||
// Return an array of all file paths that match the given wildcard patterns.
|
||||
file.expand = function(...args) {
|
||||
// If the first argument is an options object, save those options to pass
|
||||
// into the File.prototype.glob.sync method.
|
||||
var options = isPlainObject(args[0]) ? args.shift() : {};
|
||||
// Use the first argument if it's an Array, otherwise convert the arguments
|
||||
// object to an array and use that.
|
||||
var patterns = Array.isArray(args[0]) ? args[0] : args;
|
||||
// Return empty set if there are no patterns or filepaths.
|
||||
if (patterns.length === 0) { return []; }
|
||||
// Return all matching filepaths.
|
||||
var matches = processPatterns(patterns, function(pattern) {
|
||||
// Find all matching files for this pattern.
|
||||
return glob.sync(pattern, options);
|
||||
});
|
||||
// Filter result set?
|
||||
if (options.filter) {
|
||||
matches = matches.filter(function(filepath) {
|
||||
filepath = path.join(options.cwd || '', filepath);
|
||||
try {
|
||||
if (typeof options.filter === 'function') {
|
||||
return options.filter(filepath);
|
||||
} else {
|
||||
// If the file is of the right type and exists, this should work.
|
||||
return fs.statSync(filepath)[options.filter]();
|
||||
}
|
||||
} catch(e) {
|
||||
// Otherwise, it's probably not the right type.
|
||||
return false;
|
||||
}
|
||||
});
|
||||
}
|
||||
return matches;
|
||||
};
|
||||
|
||||
// Build a multi task "files" object dynamically.
|
||||
file.expandMapping = function(patterns, destBase, options) {
|
||||
options = Object.assign({
|
||||
rename: function(destBase, destPath) {
|
||||
return path.join(destBase || '', destPath);
|
||||
}
|
||||
}, options);
|
||||
var files = [];
|
||||
var fileByDest = {};
|
||||
// Find all files matching pattern, using passed-in options.
|
||||
file.expand(options, patterns).forEach(function(src) {
|
||||
var destPath = src;
|
||||
// Flatten?
|
||||
if (options.flatten) {
|
||||
destPath = path.basename(destPath);
|
||||
}
|
||||
// Change the extension?
|
||||
if (options.ext) {
|
||||
destPath = destPath.replace(/(\.[^\/]*)?$/, options.ext);
|
||||
}
|
||||
// Generate destination filename.
|
||||
var dest = options.rename(destBase, destPath, options);
|
||||
// Prepend cwd to src path if necessary.
|
||||
if (options.cwd) { src = path.join(options.cwd, src); }
|
||||
// Normalize filepaths to be unix-style.
|
||||
dest = dest.replace(pathSeparatorRe, '/');
|
||||
src = src.replace(pathSeparatorRe, '/');
|
||||
// Map correct src path to dest path.
|
||||
if (fileByDest[dest]) {
|
||||
// If dest already exists, push this src onto that dest's src array.
|
||||
fileByDest[dest].src.push(src);
|
||||
} else {
|
||||
// Otherwise create a new src-dest file mapping object.
|
||||
files.push({
|
||||
src: [src],
|
||||
dest: dest,
|
||||
});
|
||||
// And store a reference for later use.
|
||||
fileByDest[dest] = files[files.length - 1];
|
||||
}
|
||||
});
|
||||
return files;
|
||||
};
|
||||
|
||||
// reusing bits of grunt's multi-task source normalization
|
||||
file.normalizeFilesArray = function(data) {
|
||||
var files = [];
|
||||
|
||||
data.forEach(function(obj) {
|
||||
var prop;
|
||||
if ('src' in obj || 'dest' in obj) {
|
||||
files.push(obj);
|
||||
}
|
||||
});
|
||||
|
||||
if (files.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
files = _(files).chain().forEach(function(obj) {
|
||||
if (!('src' in obj) || !obj.src) { return; }
|
||||
// Normalize .src properties to flattened array.
|
||||
if (Array.isArray(obj.src)) {
|
||||
obj.src = flatten(obj.src);
|
||||
} else {
|
||||
obj.src = [obj.src];
|
||||
}
|
||||
}).map(function(obj) {
|
||||
// Build options object, removing unwanted properties.
|
||||
var expandOptions = Object.assign({}, obj);
|
||||
delete expandOptions.src;
|
||||
delete expandOptions.dest;
|
||||
|
||||
// Expand file mappings.
|
||||
if (obj.expand) {
|
||||
return file.expandMapping(obj.src, obj.dest, expandOptions).map(function(mapObj) {
|
||||
// Copy obj properties to result.
|
||||
var result = Object.assign({}, obj);
|
||||
// Make a clone of the orig obj available.
|
||||
result.orig = Object.assign({}, obj);
|
||||
// Set .src and .dest, processing both as templates.
|
||||
result.src = mapObj.src;
|
||||
result.dest = mapObj.dest;
|
||||
// Remove unwanted properties.
|
||||
['expand', 'cwd', 'flatten', 'rename', 'ext'].forEach(function(prop) {
|
||||
delete result[prop];
|
||||
});
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
// Copy obj properties to result, adding an .orig property.
|
||||
var result = Object.assign({}, obj);
|
||||
// Make a clone of the orig obj available.
|
||||
result.orig = Object.assign({}, obj);
|
||||
|
||||
if ('src' in result) {
|
||||
// Expose an expand-on-demand getter method as .src.
|
||||
Object.defineProperty(result, 'src', {
|
||||
enumerable: true,
|
||||
get: function fn() {
|
||||
var src;
|
||||
if (!('result' in fn)) {
|
||||
src = obj.src;
|
||||
// If src is an array, flatten it. Otherwise, make it into an array.
|
||||
src = Array.isArray(src) ? flatten(src) : [src];
|
||||
// Expand src files, memoizing result.
|
||||
fn.result = file.expand(expandOptions, src);
|
||||
}
|
||||
return fn.result;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if ('dest' in result) {
|
||||
result.dest = obj.dest;
|
||||
}
|
||||
|
||||
return result;
|
||||
}).flatten().value();
|
||||
|
||||
return files;
|
||||
};
|
||||
155
node_modules/archiver-utils/index.js
generated
vendored
Normal file
155
node_modules/archiver-utils/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,155 @@
|
|||
/**
|
||||
* archiver-utils
|
||||
*
|
||||
* Copyright (c) 2015 Chris Talkington.
|
||||
* Licensed under the MIT license.
|
||||
* https://github.com/archiverjs/archiver-utils/blob/master/LICENSE
|
||||
*/
|
||||
var fs = require('graceful-fs');
|
||||
var path = require('path');
|
||||
var isStream = require('is-stream');
|
||||
var lazystream = require('lazystream');
|
||||
var normalizePath = require('normalize-path');
|
||||
var defaults = require('lodash/defaults');
|
||||
|
||||
var Stream = require('stream').Stream;
|
||||
var PassThrough = require('readable-stream').PassThrough;
|
||||
|
||||
var utils = module.exports = {};
|
||||
utils.file = require('./file.js');
|
||||
|
||||
utils.collectStream = function(source, callback) {
|
||||
var collection = [];
|
||||
var size = 0;
|
||||
|
||||
source.on('error', callback);
|
||||
|
||||
source.on('data', function(chunk) {
|
||||
collection.push(chunk);
|
||||
size += chunk.length;
|
||||
});
|
||||
|
||||
source.on('end', function() {
|
||||
var buf = Buffer.alloc(size);
|
||||
var offset = 0;
|
||||
|
||||
collection.forEach(function(data) {
|
||||
data.copy(buf, offset);
|
||||
offset += data.length;
|
||||
});
|
||||
|
||||
callback(null, buf);
|
||||
});
|
||||
};
|
||||
|
||||
utils.dateify = function(dateish) {
|
||||
dateish = dateish || new Date();
|
||||
|
||||
if (dateish instanceof Date) {
|
||||
dateish = dateish;
|
||||
} else if (typeof dateish === 'string') {
|
||||
dateish = new Date(dateish);
|
||||
} else {
|
||||
dateish = new Date();
|
||||
}
|
||||
|
||||
return dateish;
|
||||
};
|
||||
|
||||
// this is slightly different from lodash version
|
||||
utils.defaults = function(object, source, guard) {
|
||||
var args = arguments;
|
||||
args[0] = args[0] || {};
|
||||
|
||||
return defaults(...args);
|
||||
};
|
||||
|
||||
utils.isStream = function(source) {
|
||||
return isStream(source);
|
||||
};
|
||||
|
||||
utils.lazyReadStream = function(filepath) {
|
||||
return new lazystream.Readable(function() {
|
||||
return fs.createReadStream(filepath);
|
||||
});
|
||||
};
|
||||
|
||||
utils.normalizeInputSource = function(source) {
|
||||
if (source === null) {
|
||||
return Buffer.alloc(0);
|
||||
} else if (typeof source === 'string') {
|
||||
return Buffer.from(source);
|
||||
} else if (utils.isStream(source)) {
|
||||
// Always pipe through a PassThrough stream to guarantee pausing the stream if it's already flowing,
|
||||
// since it will only be processed in a (distant) future iteration of the event loop, and will lose
|
||||
// data if already flowing now.
|
||||
return source.pipe(new PassThrough());
|
||||
}
|
||||
|
||||
return source;
|
||||
};
|
||||
|
||||
utils.sanitizePath = function(filepath) {
|
||||
return normalizePath(filepath, false).replace(/^\w+:/, '').replace(/^(\.\.\/|\/)+/, '');
|
||||
};
|
||||
|
||||
utils.trailingSlashIt = function(str) {
|
||||
return str.slice(-1) !== '/' ? str + '/' : str;
|
||||
};
|
||||
|
||||
utils.unixifyPath = function(filepath) {
|
||||
return normalizePath(filepath, false).replace(/^\w+:/, '');
|
||||
};
|
||||
|
||||
utils.walkdir = function(dirpath, base, callback) {
|
||||
var results = [];
|
||||
|
||||
if (typeof base === 'function') {
|
||||
callback = base;
|
||||
base = dirpath;
|
||||
}
|
||||
|
||||
fs.readdir(dirpath, function(err, list) {
|
||||
var i = 0;
|
||||
var file;
|
||||
var filepath;
|
||||
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
(function next() {
|
||||
file = list[i++];
|
||||
|
||||
if (!file) {
|
||||
return callback(null, results);
|
||||
}
|
||||
|
||||
filepath = path.join(dirpath, file);
|
||||
|
||||
fs.stat(filepath, function(err, stats) {
|
||||
results.push({
|
||||
path: filepath,
|
||||
relative: path.relative(base, filepath).replace(/\\/g, '/'),
|
||||
stats: stats
|
||||
});
|
||||
|
||||
if (stats && stats.isDirectory()) {
|
||||
utils.walkdir(filepath, base, function(err, res) {
|
||||
if(err){
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
res.forEach(function(dirEntry) {
|
||||
results.push(dirEntry);
|
||||
});
|
||||
|
||||
next();
|
||||
});
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
});
|
||||
})();
|
||||
});
|
||||
};
|
||||
79
node_modules/archiver-utils/node_modules/is-stream/index.d.ts
generated
vendored
Normal file
79
node_modules/archiver-utils/node_modules/is-stream/index.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
import * as stream from 'stream';
|
||||
|
||||
declare const isStream: {
|
||||
/**
|
||||
@returns Whether `stream` is a [`Stream`](https://nodejs.org/api/stream.html#stream_stream).
|
||||
|
||||
@example
|
||||
```
|
||||
import * as fs from 'fs';
|
||||
import isStream = require('is-stream');
|
||||
|
||||
isStream(fs.createReadStream('unicorn.png'));
|
||||
//=> true
|
||||
|
||||
isStream({});
|
||||
//=> false
|
||||
```
|
||||
*/
|
||||
(stream: unknown): stream is stream.Stream;
|
||||
|
||||
/**
|
||||
@returns Whether `stream` is a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable).
|
||||
|
||||
@example
|
||||
```
|
||||
import * as fs from 'fs';
|
||||
import isStream = require('is-stream');
|
||||
|
||||
isStream.writable(fs.createWriteStrem('unicorn.txt'));
|
||||
//=> true
|
||||
```
|
||||
*/
|
||||
writable(stream: unknown): stream is stream.Writable;
|
||||
|
||||
/**
|
||||
@returns Whether `stream` is a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable).
|
||||
|
||||
@example
|
||||
```
|
||||
import * as fs from 'fs';
|
||||
import isStream = require('is-stream');
|
||||
|
||||
isStream.readable(fs.createReadStream('unicorn.png'));
|
||||
//=> true
|
||||
```
|
||||
*/
|
||||
readable(stream: unknown): stream is stream.Readable;
|
||||
|
||||
/**
|
||||
@returns Whether `stream` is a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex).
|
||||
|
||||
@example
|
||||
```
|
||||
import {Duplex} from 'stream';
|
||||
import isStream = require('is-stream');
|
||||
|
||||
isStream.duplex(new Duplex());
|
||||
//=> true
|
||||
```
|
||||
*/
|
||||
duplex(stream: unknown): stream is stream.Duplex;
|
||||
|
||||
/**
|
||||
@returns Whether `stream` is a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform).
|
||||
|
||||
@example
|
||||
```
|
||||
import * as fs from 'fs';
|
||||
import Stringify = require('streaming-json-stringify');
|
||||
import isStream = require('is-stream');
|
||||
|
||||
isStream.transform(Stringify());
|
||||
//=> true
|
||||
```
|
||||
*/
|
||||
transform(input: unknown): input is stream.Transform;
|
||||
};
|
||||
|
||||
export = isStream;
|
||||
28
node_modules/archiver-utils/node_modules/is-stream/index.js
generated
vendored
Normal file
28
node_modules/archiver-utils/node_modules/is-stream/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
'use strict';
|
||||
|
||||
const isStream = stream =>
|
||||
stream !== null &&
|
||||
typeof stream === 'object' &&
|
||||
typeof stream.pipe === 'function';
|
||||
|
||||
isStream.writable = stream =>
|
||||
isStream(stream) &&
|
||||
stream.writable !== false &&
|
||||
typeof stream._write === 'function' &&
|
||||
typeof stream._writableState === 'object';
|
||||
|
||||
isStream.readable = stream =>
|
||||
isStream(stream) &&
|
||||
stream.readable !== false &&
|
||||
typeof stream._read === 'function' &&
|
||||
typeof stream._readableState === 'object';
|
||||
|
||||
isStream.duplex = stream =>
|
||||
isStream.writable(stream) &&
|
||||
isStream.readable(stream);
|
||||
|
||||
isStream.transform = stream =>
|
||||
isStream.duplex(stream) &&
|
||||
typeof stream._transform === 'function';
|
||||
|
||||
module.exports = isStream;
|
||||
9
node_modules/archiver-utils/node_modules/is-stream/license
generated
vendored
Normal file
9
node_modules/archiver-utils/node_modules/is-stream/license
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
42
node_modules/archiver-utils/node_modules/is-stream/package.json
generated
vendored
Normal file
42
node_modules/archiver-utils/node_modules/is-stream/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
{
|
||||
"name": "is-stream",
|
||||
"version": "2.0.1",
|
||||
"description": "Check if something is a Node.js stream",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/is-stream",
|
||||
"funding": "https://github.com/sponsors/sindresorhus",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "https://sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava && tsd"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"keywords": [
|
||||
"stream",
|
||||
"type",
|
||||
"streams",
|
||||
"writable",
|
||||
"readable",
|
||||
"duplex",
|
||||
"transform",
|
||||
"check",
|
||||
"detect",
|
||||
"is"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@types/node": "^11.13.6",
|
||||
"ava": "^1.4.1",
|
||||
"tempy": "^0.3.0",
|
||||
"tsd": "^0.7.2",
|
||||
"xo": "^0.24.0"
|
||||
}
|
||||
}
|
||||
60
node_modules/archiver-utils/node_modules/is-stream/readme.md
generated
vendored
Normal file
60
node_modules/archiver-utils/node_modules/is-stream/readme.md
generated
vendored
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
# is-stream
|
||||
|
||||
> Check if something is a [Node.js stream](https://nodejs.org/api/stream.html)
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install is-stream
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const fs = require('fs');
|
||||
const isStream = require('is-stream');
|
||||
|
||||
isStream(fs.createReadStream('unicorn.png'));
|
||||
//=> true
|
||||
|
||||
isStream({});
|
||||
//=> false
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### isStream(stream)
|
||||
|
||||
Returns a `boolean` for whether it's a [`Stream`](https://nodejs.org/api/stream.html#stream_stream).
|
||||
|
||||
#### isStream.writable(stream)
|
||||
|
||||
Returns a `boolean` for whether it's a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable).
|
||||
|
||||
#### isStream.readable(stream)
|
||||
|
||||
Returns a `boolean` for whether it's a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable).
|
||||
|
||||
#### isStream.duplex(stream)
|
||||
|
||||
Returns a `boolean` for whether it's a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex).
|
||||
|
||||
#### isStream.transform(stream)
|
||||
|
||||
Returns a `boolean` for whether it's a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform).
|
||||
|
||||
## Related
|
||||
|
||||
- [is-file-stream](https://github.com/jamestalmage/is-file-stream) - Detect if a stream is a file stream
|
||||
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<b>
|
||||
<a href="https://tidelift.com/subscription/pkg/npm-is-stream?utm_source=npm-is-stream&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
|
||||
</b>
|
||||
<br>
|
||||
<sub>
|
||||
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
|
||||
</sub>
|
||||
</div>
|
||||
48
node_modules/archiver-utils/package.json
generated
vendored
Normal file
48
node_modules/archiver-utils/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
{
|
||||
"name": "archiver-utils",
|
||||
"version": "5.0.2",
|
||||
"license": "MIT",
|
||||
"description": "utility functions for archiver",
|
||||
"homepage": "https://github.com/archiverjs/archiver-utils#readme",
|
||||
"author": {
|
||||
"name": "Chris Talkington",
|
||||
"url": "http://christalkington.com/"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/archiverjs/archiver-utils.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/archiverjs/archiver-utils/issues"
|
||||
},
|
||||
"keywords": [
|
||||
"archiver",
|
||||
"utils"
|
||||
],
|
||||
"main": "index.js",
|
||||
"files": [
|
||||
"index.js",
|
||||
"file.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha --reporter dot"
|
||||
},
|
||||
"dependencies": {
|
||||
"glob": "^10.0.0",
|
||||
"graceful-fs": "^4.2.0",
|
||||
"is-stream": "^2.0.1",
|
||||
"lazystream": "^1.0.0",
|
||||
"lodash": "^4.17.15",
|
||||
"normalize-path": "^3.0.0",
|
||||
"readable-stream": "^4.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"chai": "4.4.1",
|
||||
"mkdirp": "3.0.1",
|
||||
"mocha": "10.3.0",
|
||||
"rimraf": "5.0.5"
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue