Update checked-in dependencies
This commit is contained in:
parent
aab34601c1
commit
d853bec339
3889 changed files with 146268 additions and 299 deletions
16
node_modules/@actions/cache/README.md
generated
vendored
16
node_modules/@actions/cache/README.md
generated
vendored
|
|
@ -6,6 +6,20 @@ See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/ac
|
|||
|
||||
Note that GitHub will remove any cache entries that have not been accessed in over 7 days. There is no limit on the number of caches you can store, but the total size of all caches in a repository is limited to 10 GB. If you exceed this limit, GitHub will save your cache but will begin evicting caches until the total size is less than 10 GB.
|
||||
|
||||
## ⚠️ Important changes
|
||||
|
||||
The cache backend service has been rewritten from the ground up for improved performance and reliability. The [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) package now integrates with the new cache service (v2) APIs.
|
||||
|
||||
The new service will gradually roll out as of **February 1st, 2025**. The legacy service will also be sunset on the same date. Changes in this release are **fully backward compatible**.
|
||||
|
||||
**All previous versions of this package will be deprecated**. We recommend upgrading to version `4.0.0` as soon as possible before **February 1st, 2025.**
|
||||
|
||||
If you do not upgrade, all workflow runs using any of the deprecated [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) packages will fail.
|
||||
|
||||
Upgrading to the recommended version should not break or require any changes to your workflows beyond updating your `package.json` to version `4.0.0`.
|
||||
|
||||
Read more about the change & access the migration guide: [reference to the announcement](https://github.com/actions/toolkit/discussions/1890).
|
||||
|
||||
## Usage
|
||||
|
||||
This package is used by the v2+ versions of our first party cache action. You can find an example implementation in the cache repo [here](https://github.com/actions/cache).
|
||||
|
|
@ -47,5 +61,3 @@ const cacheKey = await cache.restoreCache(paths, key, restoreKeys)
|
|||
A cache gets downloaded in multiple segments of fixed sizes (now `128MB` to fail-fast, previously `1GB` for a `32-bit` runner and `2GB` for a `64-bit` runner were used). Sometimes, a segment download gets stuck which causes the workflow job to be stuck forever and fail. Version `v3.0.4` of cache package introduces a segment download timeout. The segment download timeout will allow the segment download to get aborted and hence allow the job to proceed with a cache miss.
|
||||
|
||||
Default value of this timeout is 10 minutes (starting `v3.2.1` and higher, previously 60 minutes in versions between `v.3.0.4` and `v3.2.0`, both included) and can be customized by specifying an [environment variable](https://docs.github.com/en/actions/learn-github-actions/environment-variables) named `SEGMENT_DOWNLOAD_TIMEOUT_MINS` with timeout value in minutes.
|
||||
|
||||
|
||||
|
|
|
|||
4
node_modules/@actions/cache/lib/cache.d.ts
generated
vendored
4
node_modules/@actions/cache/lib/cache.d.ts
generated
vendored
|
|
@ -15,8 +15,8 @@ export declare function isFeatureAvailable(): boolean;
|
|||
* Restores cache from keys
|
||||
*
|
||||
* @param paths a list of file paths to restore from the cache
|
||||
* @param primaryKey an explicit key for restoring the cache
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
|
||||
* @param downloadOptions cache download options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
|
||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||
|
|
|
|||
229
node_modules/@actions/cache/lib/cache.js
generated
vendored
229
node_modules/@actions/cache/lib/cache.js
generated
vendored
|
|
@ -37,7 +37,10 @@ const core = __importStar(require("@actions/core"));
|
|||
const path = __importStar(require("path"));
|
||||
const utils = __importStar(require("./internal/cacheUtils"));
|
||||
const cacheHttpClient = __importStar(require("./internal/cacheHttpClient"));
|
||||
const cacheTwirpClient = __importStar(require("./internal/shared/cacheTwirpClient"));
|
||||
const config_1 = require("./internal/config");
|
||||
const tar_1 = require("./internal/tar");
|
||||
const constants_1 = require("./internal/constants");
|
||||
class ValidationError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
|
|
@ -81,15 +84,39 @@ exports.isFeatureAvailable = isFeatureAvailable;
|
|||
* Restores cache from keys
|
||||
*
|
||||
* @param paths a list of file paths to restore from the cache
|
||||
* @param primaryKey an explicit key for restoring the cache
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
|
||||
* @param downloadOptions cache download options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
|
||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||
*/
|
||||
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
|
||||
core.debug(`Cache service version: ${cacheServiceVersion}`);
|
||||
checkPaths(paths);
|
||||
switch (cacheServiceVersion) {
|
||||
case 'v2':
|
||||
return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);
|
||||
case 'v1':
|
||||
default:
|
||||
return yield restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.restoreCache = restoreCache;
|
||||
/**
|
||||
* Restores cache using the legacy Cache Service
|
||||
*
|
||||
* @param paths a list of file paths to restore from the cache
|
||||
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
|
||||
* @param options cache download options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform
|
||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||
*/
|
||||
function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
restoreKeys = restoreKeys || [];
|
||||
const keys = [primaryKey, ...restoreKeys];
|
||||
core.debug('Resolved Keys:');
|
||||
|
|
@ -151,7 +178,85 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
|
|||
return undefined;
|
||||
});
|
||||
}
|
||||
exports.restoreCache = restoreCache;
|
||||
/**
|
||||
* Restores cache using Cache Service v2
|
||||
*
|
||||
* @param paths a list of file paths to restore from the cache
|
||||
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
|
||||
* @param downloadOptions cache download options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
|
||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||
*/
|
||||
function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Override UploadOptions to force the use of Azure
|
||||
options = Object.assign(Object.assign({}, options), { useAzureSdk: true });
|
||||
restoreKeys = restoreKeys || [];
|
||||
const keys = [primaryKey, ...restoreKeys];
|
||||
core.debug('Resolved Keys:');
|
||||
core.debug(JSON.stringify(keys));
|
||||
if (keys.length > 10) {
|
||||
throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`);
|
||||
}
|
||||
for (const key of keys) {
|
||||
checkKey(key);
|
||||
}
|
||||
let archivePath = '';
|
||||
try {
|
||||
const twirpClient = cacheTwirpClient.internalCacheTwirpClient();
|
||||
const compressionMethod = yield utils.getCompressionMethod();
|
||||
const request = {
|
||||
key: primaryKey,
|
||||
restoreKeys,
|
||||
version: utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive)
|
||||
};
|
||||
const response = yield twirpClient.GetCacheEntryDownloadURL(request);
|
||||
if (!response.ok) {
|
||||
core.warning(`Cache not found for keys: ${keys.join(', ')}`);
|
||||
return undefined;
|
||||
}
|
||||
core.info(`Cache hit for: ${request.key}`);
|
||||
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
|
||||
core.info('Lookup only - skipping download');
|
||||
return response.matchedKey;
|
||||
}
|
||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||
core.debug(`Archive path: ${archivePath}`);
|
||||
core.debug(`Starting download of archive to: ${archivePath}`);
|
||||
yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options);
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||
if (core.isDebug()) {
|
||||
yield (0, tar_1.listTar)(archivePath, compressionMethod);
|
||||
}
|
||||
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
||||
core.info('Cache restored successfully');
|
||||
return response.matchedKey;
|
||||
}
|
||||
catch (error) {
|
||||
const typedError = error;
|
||||
if (typedError.name === ValidationError.name) {
|
||||
throw error;
|
||||
}
|
||||
else {
|
||||
// Supress all non-validation cache related errors because caching should be optional
|
||||
core.warning(`Failed to restore: ${error.message}`);
|
||||
}
|
||||
}
|
||||
finally {
|
||||
try {
|
||||
if (archivePath) {
|
||||
yield utils.unlinkFile(archivePath);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
core.debug(`Failed to delete archive: ${error}`);
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Saves a list of files with the specified key
|
||||
*
|
||||
|
|
@ -162,10 +267,33 @@ exports.restoreCache = restoreCache;
|
|||
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
||||
*/
|
||||
function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||
var _a, _b, _c, _d, _e;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
|
||||
core.debug(`Cache service version: ${cacheServiceVersion}`);
|
||||
checkPaths(paths);
|
||||
checkKey(key);
|
||||
switch (cacheServiceVersion) {
|
||||
case 'v2':
|
||||
return yield saveCacheV2(paths, key, options, enableCrossOsArchive);
|
||||
case 'v1':
|
||||
default:
|
||||
return yield saveCacheV1(paths, key, options, enableCrossOsArchive);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.saveCache = saveCache;
|
||||
/**
|
||||
* Save cache using the legacy Cache Service
|
||||
*
|
||||
* @param paths
|
||||
* @param key
|
||||
* @param options
|
||||
* @param enableCrossOsArchive
|
||||
* @returns
|
||||
*/
|
||||
function saveCacheV1(paths, key, options, enableCrossOsArchive = false) {
|
||||
var _a, _b, _c, _d, _e;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const compressionMethod = yield utils.getCompressionMethod();
|
||||
let cacheId = -1;
|
||||
const cachePaths = yield utils.resolvePaths(paths);
|
||||
|
|
@ -186,7 +314,7 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
|||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.debug(`File Size: ${archiveFileSize}`);
|
||||
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
|
||||
if (archiveFileSize > fileSizeLimit && !utils.isGhes()) {
|
||||
if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) {
|
||||
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
|
||||
}
|
||||
core.debug('Reserving Cache');
|
||||
|
|
@ -205,7 +333,95 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
|||
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`);
|
||||
}
|
||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
||||
yield cacheHttpClient.saveCache(cacheId, archivePath, '', options);
|
||||
}
|
||||
catch (error) {
|
||||
const typedError = error;
|
||||
if (typedError.name === ValidationError.name) {
|
||||
throw error;
|
||||
}
|
||||
else if (typedError.name === ReserveCacheError.name) {
|
||||
core.info(`Failed to save: ${typedError.message}`);
|
||||
}
|
||||
else {
|
||||
core.warning(`Failed to save: ${typedError.message}`);
|
||||
}
|
||||
}
|
||||
finally {
|
||||
// Try to delete the archive to save space
|
||||
try {
|
||||
yield utils.unlinkFile(archivePath);
|
||||
}
|
||||
catch (error) {
|
||||
core.debug(`Failed to delete archive: ${error}`);
|
||||
}
|
||||
}
|
||||
return cacheId;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Save cache using Cache Service v2
|
||||
*
|
||||
* @param paths a list of file paths to restore from the cache
|
||||
* @param key an explicit key for restoring the cache
|
||||
* @param options cache upload options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
|
||||
* @returns
|
||||
*/
|
||||
function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Override UploadOptions to force the use of Azure
|
||||
// ...options goes first because we want to override the default values
|
||||
// set in UploadOptions with these specific figures
|
||||
options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true });
|
||||
const compressionMethod = yield utils.getCompressionMethod();
|
||||
const twirpClient = cacheTwirpClient.internalCacheTwirpClient();
|
||||
let cacheId = -1;
|
||||
const cachePaths = yield utils.resolvePaths(paths);
|
||||
core.debug('Cache Paths:');
|
||||
core.debug(`${JSON.stringify(cachePaths)}`);
|
||||
if (cachePaths.length === 0) {
|
||||
throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`);
|
||||
}
|
||||
const archiveFolder = yield utils.createTempDirectory();
|
||||
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
|
||||
core.debug(`Archive Path: ${archivePath}`);
|
||||
try {
|
||||
yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod);
|
||||
if (core.isDebug()) {
|
||||
yield (0, tar_1.listTar)(archivePath, compressionMethod);
|
||||
}
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.debug(`File Size: ${archiveFileSize}`);
|
||||
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
|
||||
if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) {
|
||||
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
|
||||
}
|
||||
// Set the archive size in the options, will be used to display the upload progress
|
||||
options.archiveSizeBytes = archiveFileSize;
|
||||
core.debug('Reserving Cache');
|
||||
const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive);
|
||||
const request = {
|
||||
key,
|
||||
version
|
||||
};
|
||||
const response = yield twirpClient.CreateCacheEntry(request);
|
||||
if (!response.ok) {
|
||||
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`);
|
||||
}
|
||||
core.debug(`Attempting to upload cache located at: ${archivePath}`);
|
||||
yield cacheHttpClient.saveCache(cacheId, archivePath, response.signedUploadUrl, options);
|
||||
const finalizeRequest = {
|
||||
key,
|
||||
version,
|
||||
sizeBytes: `${archiveFileSize}`
|
||||
};
|
||||
const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest);
|
||||
core.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`);
|
||||
if (!finalizeResponse.ok) {
|
||||
throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`);
|
||||
}
|
||||
cacheId = parseInt(finalizeResponse.entryId);
|
||||
}
|
||||
catch (error) {
|
||||
const typedError = error;
|
||||
|
|
@ -231,5 +447,4 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
|||
return cacheId;
|
||||
});
|
||||
}
|
||||
exports.saveCache = saveCache;
|
||||
//# sourceMappingURL=cache.js.map
|
||||
2
node_modules/@actions/cache/lib/cache.js.map
generated
vendored
2
node_modules/@actions/cache/lib/cache.js.map
generated
vendored
File diff suppressed because one or more lines are too long
158
node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.d.ts
generated
vendored
Normal file
158
node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,158 @@
|
|||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import type { JsonValue } from "@protobuf-ts/runtime";
|
||||
import type { JsonReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
/**
|
||||
* A Timestamp represents a point in time independent of any time zone or local
|
||||
* calendar, encoded as a count of seconds and fractions of seconds at
|
||||
* nanosecond resolution. The count is relative to an epoch at UTC midnight on
|
||||
* January 1, 1970, in the proleptic Gregorian calendar which extends the
|
||||
* Gregorian calendar backwards to year one.
|
||||
*
|
||||
* All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
|
||||
* second table is needed for interpretation, using a [24-hour linear
|
||||
* smear](https://developers.google.com/time/smear).
|
||||
*
|
||||
* The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
|
||||
* restricting to that range, we ensure that we can convert to and from [RFC
|
||||
* 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
|
||||
*
|
||||
* # Examples
|
||||
*
|
||||
* Example 1: Compute Timestamp from POSIX `time()`.
|
||||
*
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds(time(NULL));
|
||||
* timestamp.set_nanos(0);
|
||||
*
|
||||
* Example 2: Compute Timestamp from POSIX `gettimeofday()`.
|
||||
*
|
||||
* struct timeval tv;
|
||||
* gettimeofday(&tv, NULL);
|
||||
*
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds(tv.tv_sec);
|
||||
* timestamp.set_nanos(tv.tv_usec * 1000);
|
||||
*
|
||||
* Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
|
||||
*
|
||||
* FILETIME ft;
|
||||
* GetSystemTimeAsFileTime(&ft);
|
||||
* UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
|
||||
*
|
||||
* // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
|
||||
* // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
|
||||
* timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
|
||||
*
|
||||
* Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
|
||||
*
|
||||
* long millis = System.currentTimeMillis();
|
||||
*
|
||||
* Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
|
||||
* .setNanos((int) ((millis % 1000) * 1000000)).build();
|
||||
*
|
||||
*
|
||||
* Example 5: Compute Timestamp from Java `Instant.now()`.
|
||||
*
|
||||
* Instant now = Instant.now();
|
||||
*
|
||||
* Timestamp timestamp =
|
||||
* Timestamp.newBuilder().setSeconds(now.getEpochSecond())
|
||||
* .setNanos(now.getNano()).build();
|
||||
*
|
||||
*
|
||||
* Example 6: Compute Timestamp from current time in Python.
|
||||
*
|
||||
* timestamp = Timestamp()
|
||||
* timestamp.GetCurrentTime()
|
||||
*
|
||||
* # JSON Mapping
|
||||
*
|
||||
* In JSON format, the Timestamp type is encoded as a string in the
|
||||
* [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
|
||||
* format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
|
||||
* where {year} is always expressed using four digits while {month}, {day},
|
||||
* {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
|
||||
* seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
|
||||
* are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
|
||||
* is required. A proto3 JSON serializer should always use UTC (as indicated by
|
||||
* "Z") when printing the Timestamp type and a proto3 JSON parser should be
|
||||
* able to accept both UTC and other timezones (as indicated by an offset).
|
||||
*
|
||||
* For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
|
||||
* 01:30 UTC on January 15, 2017.
|
||||
*
|
||||
* In JavaScript, one can convert a Date object to this format using the
|
||||
* standard
|
||||
* [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
|
||||
* method. In Python, a standard `datetime.datetime` object can be converted
|
||||
* to this format using
|
||||
* [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
|
||||
* the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
|
||||
* the Joda Time's [`ISODateTimeFormat.dateTime()`](
|
||||
* http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
|
||||
* ) to obtain a formatter capable of generating timestamps in this format.
|
||||
*
|
||||
*
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Timestamp
|
||||
*/
|
||||
export interface Timestamp {
|
||||
/**
|
||||
* Represents seconds of UTC time since Unix epoch
|
||||
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
||||
* 9999-12-31T23:59:59Z inclusive.
|
||||
*
|
||||
* @generated from protobuf field: int64 seconds = 1;
|
||||
*/
|
||||
seconds: string;
|
||||
/**
|
||||
* Non-negative fractions of a second at nanosecond resolution. Negative
|
||||
* second values with fractions must still have non-negative nanos values
|
||||
* that count forward in time. Must be from 0 to 999,999,999
|
||||
* inclusive.
|
||||
*
|
||||
* @generated from protobuf field: int32 nanos = 2;
|
||||
*/
|
||||
nanos: number;
|
||||
}
|
||||
declare class Timestamp$Type extends MessageType<Timestamp> {
|
||||
constructor();
|
||||
/**
|
||||
* Creates a new `Timestamp` for the current time.
|
||||
*/
|
||||
now(): Timestamp;
|
||||
/**
|
||||
* Converts a `Timestamp` to a JavaScript Date.
|
||||
*/
|
||||
toDate(message: Timestamp): Date;
|
||||
/**
|
||||
* Converts a JavaScript Date to a `Timestamp`.
|
||||
*/
|
||||
fromDate(date: Date): Timestamp;
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonWrite(message: Timestamp, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Timestamp): Timestamp;
|
||||
create(value?: PartialMessage<Timestamp>): Timestamp;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Timestamp): Timestamp;
|
||||
internalBinaryWrite(message: Timestamp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Timestamp
|
||||
*/
|
||||
export declare const Timestamp: Timestamp$Type;
|
||||
export {};
|
||||
136
node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.js
generated
vendored
Normal file
136
node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.js
generated
vendored
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Timestamp = void 0;
|
||||
const runtime_1 = require("@protobuf-ts/runtime");
|
||||
const runtime_2 = require("@protobuf-ts/runtime");
|
||||
const runtime_3 = require("@protobuf-ts/runtime");
|
||||
const runtime_4 = require("@protobuf-ts/runtime");
|
||||
const runtime_5 = require("@protobuf-ts/runtime");
|
||||
const runtime_6 = require("@protobuf-ts/runtime");
|
||||
const runtime_7 = require("@protobuf-ts/runtime");
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Timestamp$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.Timestamp", [
|
||||
{ no: 1, name: "seconds", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 2, name: "nanos", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Creates a new `Timestamp` for the current time.
|
||||
*/
|
||||
now() {
|
||||
const msg = this.create();
|
||||
const ms = Date.now();
|
||||
msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1000)).toString();
|
||||
msg.nanos = (ms % 1000) * 1000000;
|
||||
return msg;
|
||||
}
|
||||
/**
|
||||
* Converts a `Timestamp` to a JavaScript Date.
|
||||
*/
|
||||
toDate(message) {
|
||||
return new Date(runtime_6.PbLong.from(message.seconds).toNumber() * 1000 + Math.ceil(message.nanos / 1000000));
|
||||
}
|
||||
/**
|
||||
* Converts a JavaScript Date to a `Timestamp`.
|
||||
*/
|
||||
fromDate(date) {
|
||||
const msg = this.create();
|
||||
const ms = date.getTime();
|
||||
msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1000)).toString();
|
||||
msg.nanos = (ms % 1000) * 1000000;
|
||||
return msg;
|
||||
}
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
let ms = runtime_6.PbLong.from(message.seconds).toNumber() * 1000;
|
||||
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
|
||||
throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
|
||||
if (message.nanos < 0)
|
||||
throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative.");
|
||||
let z = "Z";
|
||||
if (message.nanos > 0) {
|
||||
let nanosStr = (message.nanos + 1000000000).toString().substring(1);
|
||||
if (nanosStr.substring(3) === "000000")
|
||||
z = "." + nanosStr.substring(0, 3) + "Z";
|
||||
else if (nanosStr.substring(6) === "000")
|
||||
z = "." + nanosStr.substring(0, 6) + "Z";
|
||||
else
|
||||
z = "." + nanosStr + "Z";
|
||||
}
|
||||
return new Date(ms).toISOString().replace(".000Z", z);
|
||||
}
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (typeof json !== "string")
|
||||
throw new Error("Unable to parse Timestamp from JSON " + (0, runtime_5.typeofJsonValue)(json) + ".");
|
||||
let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/);
|
||||
if (!matches)
|
||||
throw new Error("Unable to parse Timestamp from JSON. Invalid format.");
|
||||
let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z"));
|
||||
if (Number.isNaN(ms))
|
||||
throw new Error("Unable to parse Timestamp from JSON. Invalid value.");
|
||||
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
|
||||
throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.seconds = runtime_6.PbLong.from(ms / 1000).toString();
|
||||
target.nanos = 0;
|
||||
if (matches[7])
|
||||
target.nanos = (parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1000000000);
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { seconds: "0", nanos: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int64 seconds */ 1:
|
||||
message.seconds = reader.int64().toString();
|
||||
break;
|
||||
case /* int32 nanos */ 2:
|
||||
message.nanos = reader.int32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* int64 seconds = 1; */
|
||||
if (message.seconds !== "0")
|
||||
writer.tag(1, runtime_1.WireType.Varint).int64(message.seconds);
|
||||
/* int32 nanos = 2; */
|
||||
if (message.nanos !== 0)
|
||||
writer.tag(2, runtime_1.WireType.Varint).int32(message.nanos);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Timestamp
|
||||
*/
|
||||
exports.Timestamp = new Timestamp$Type();
|
||||
//# sourceMappingURL=timestamp.js.map
|
||||
1
node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
307
node_modules/@actions/cache/lib/generated/google/protobuf/wrappers.d.ts
generated
vendored
Normal file
307
node_modules/@actions/cache/lib/generated/google/protobuf/wrappers.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,307 @@
|
|||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import type { JsonValue } from "@protobuf-ts/runtime";
|
||||
import type { JsonReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
/**
|
||||
* Wrapper message for `double`.
|
||||
*
|
||||
* The JSON representation for `DoubleValue` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.DoubleValue
|
||||
*/
|
||||
export interface DoubleValue {
|
||||
/**
|
||||
* The double value.
|
||||
*
|
||||
* @generated from protobuf field: double value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `float`.
|
||||
*
|
||||
* The JSON representation for `FloatValue` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.FloatValue
|
||||
*/
|
||||
export interface FloatValue {
|
||||
/**
|
||||
* The float value.
|
||||
*
|
||||
* @generated from protobuf field: float value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `int64`.
|
||||
*
|
||||
* The JSON representation for `Int64Value` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Int64Value
|
||||
*/
|
||||
export interface Int64Value {
|
||||
/**
|
||||
* The int64 value.
|
||||
*
|
||||
* @generated from protobuf field: int64 value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `uint64`.
|
||||
*
|
||||
* The JSON representation for `UInt64Value` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.UInt64Value
|
||||
*/
|
||||
export interface UInt64Value {
|
||||
/**
|
||||
* The uint64 value.
|
||||
*
|
||||
* @generated from protobuf field: uint64 value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `int32`.
|
||||
*
|
||||
* The JSON representation for `Int32Value` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Int32Value
|
||||
*/
|
||||
export interface Int32Value {
|
||||
/**
|
||||
* The int32 value.
|
||||
*
|
||||
* @generated from protobuf field: int32 value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `uint32`.
|
||||
*
|
||||
* The JSON representation for `UInt32Value` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.UInt32Value
|
||||
*/
|
||||
export interface UInt32Value {
|
||||
/**
|
||||
* The uint32 value.
|
||||
*
|
||||
* @generated from protobuf field: uint32 value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `bool`.
|
||||
*
|
||||
* The JSON representation for `BoolValue` is JSON `true` and `false`.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.BoolValue
|
||||
*/
|
||||
export interface BoolValue {
|
||||
/**
|
||||
* The bool value.
|
||||
*
|
||||
* @generated from protobuf field: bool value = 1;
|
||||
*/
|
||||
value: boolean;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `string`.
|
||||
*
|
||||
* The JSON representation for `StringValue` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.StringValue
|
||||
*/
|
||||
export interface StringValue {
|
||||
/**
|
||||
* The string value.
|
||||
*
|
||||
* @generated from protobuf field: string value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `bytes`.
|
||||
*
|
||||
* The JSON representation for `BytesValue` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.BytesValue
|
||||
*/
|
||||
export interface BytesValue {
|
||||
/**
|
||||
* The bytes value.
|
||||
*
|
||||
* @generated from protobuf field: bytes value = 1;
|
||||
*/
|
||||
value: Uint8Array;
|
||||
}
|
||||
declare class DoubleValue$Type extends MessageType<DoubleValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `DoubleValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message: DoubleValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `DoubleValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: DoubleValue): DoubleValue;
|
||||
create(value?: PartialMessage<DoubleValue>): DoubleValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DoubleValue): DoubleValue;
|
||||
internalBinaryWrite(message: DoubleValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.DoubleValue
|
||||
*/
|
||||
export declare const DoubleValue: DoubleValue$Type;
|
||||
declare class FloatValue$Type extends MessageType<FloatValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `FloatValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message: FloatValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `FloatValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: FloatValue): FloatValue;
|
||||
create(value?: PartialMessage<FloatValue>): FloatValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FloatValue): FloatValue;
|
||||
internalBinaryWrite(message: FloatValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.FloatValue
|
||||
*/
|
||||
export declare const FloatValue: FloatValue$Type;
|
||||
declare class Int64Value$Type extends MessageType<Int64Value> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `Int64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: Int64Value, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `Int64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int64Value): Int64Value;
|
||||
create(value?: PartialMessage<Int64Value>): Int64Value;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int64Value): Int64Value;
|
||||
internalBinaryWrite(message: Int64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int64Value
|
||||
*/
|
||||
export declare const Int64Value: Int64Value$Type;
|
||||
declare class UInt64Value$Type extends MessageType<UInt64Value> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `UInt64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: UInt64Value, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `UInt64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt64Value): UInt64Value;
|
||||
create(value?: PartialMessage<UInt64Value>): UInt64Value;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt64Value): UInt64Value;
|
||||
internalBinaryWrite(message: UInt64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt64Value
|
||||
*/
|
||||
export declare const UInt64Value: UInt64Value$Type;
|
||||
declare class Int32Value$Type extends MessageType<Int32Value> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `Int32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: Int32Value, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `Int32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int32Value): Int32Value;
|
||||
create(value?: PartialMessage<Int32Value>): Int32Value;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int32Value): Int32Value;
|
||||
internalBinaryWrite(message: Int32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int32Value
|
||||
*/
|
||||
export declare const Int32Value: Int32Value$Type;
|
||||
declare class UInt32Value$Type extends MessageType<UInt32Value> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `UInt32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: UInt32Value, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `UInt32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt32Value): UInt32Value;
|
||||
create(value?: PartialMessage<UInt32Value>): UInt32Value;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt32Value): UInt32Value;
|
||||
internalBinaryWrite(message: UInt32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt32Value
|
||||
*/
|
||||
export declare const UInt32Value: UInt32Value$Type;
|
||||
declare class BoolValue$Type extends MessageType<BoolValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `BoolValue` to JSON bool.
|
||||
*/
|
||||
internalJsonWrite(message: BoolValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `BoolValue` from JSON bool.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BoolValue): BoolValue;
|
||||
create(value?: PartialMessage<BoolValue>): BoolValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BoolValue): BoolValue;
|
||||
internalBinaryWrite(message: BoolValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BoolValue
|
||||
*/
|
||||
export declare const BoolValue: BoolValue$Type;
|
||||
declare class StringValue$Type extends MessageType<StringValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `StringValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: StringValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `StringValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: StringValue): StringValue;
|
||||
create(value?: PartialMessage<StringValue>): StringValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StringValue): StringValue;
|
||||
internalBinaryWrite(message: StringValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.StringValue
|
||||
*/
|
||||
export declare const StringValue: StringValue$Type;
|
||||
declare class BytesValue$Type extends MessageType<BytesValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `BytesValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: BytesValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `BytesValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BytesValue): BytesValue;
|
||||
create(value?: PartialMessage<BytesValue>): BytesValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BytesValue): BytesValue;
|
||||
internalBinaryWrite(message: BytesValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BytesValue
|
||||
*/
|
||||
export declare const BytesValue: BytesValue$Type;
|
||||
export {};
|
||||
614
node_modules/@actions/cache/lib/generated/google/protobuf/wrappers.js
generated
vendored
Normal file
614
node_modules/@actions/cache/lib/generated/google/protobuf/wrappers.js
generated
vendored
Normal file
|
|
@ -0,0 +1,614 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.BytesValue = exports.StringValue = exports.BoolValue = exports.UInt32Value = exports.Int32Value = exports.UInt64Value = exports.Int64Value = exports.FloatValue = exports.DoubleValue = void 0;
|
||||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated from protobuf file "google/protobuf/wrappers.proto" (package "google.protobuf", syntax proto3)
|
||||
// tslint:disable
|
||||
//
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
//
|
||||
// Wrappers for primitive (non-message) types. These types are useful
|
||||
// for embedding primitives in the `google.protobuf.Any` type and for places
|
||||
// where we need to distinguish between the absence of a primitive
|
||||
// typed field and its default value.
|
||||
//
|
||||
// These wrappers have no meaningful use within repeated fields as they lack
|
||||
// the ability to detect presence on individual elements.
|
||||
// These wrappers have no meaningful use within a map or a oneof since
|
||||
// individual entries of a map or fields of a oneof can already detect presence.
|
||||
//
|
||||
const runtime_1 = require("@protobuf-ts/runtime");
|
||||
const runtime_2 = require("@protobuf-ts/runtime");
|
||||
const runtime_3 = require("@protobuf-ts/runtime");
|
||||
const runtime_4 = require("@protobuf-ts/runtime");
|
||||
const runtime_5 = require("@protobuf-ts/runtime");
|
||||
const runtime_6 = require("@protobuf-ts/runtime");
|
||||
const runtime_7 = require("@protobuf-ts/runtime");
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DoubleValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.DoubleValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 1 /*ScalarType.DOUBLE*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `DoubleValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(2, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `DoubleValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 1, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* double value */ 1:
|
||||
message.value = reader.double();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* double value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, runtime_3.WireType.Bit64).double(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.DoubleValue
|
||||
*/
|
||||
exports.DoubleValue = new DoubleValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FloatValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.FloatValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `FloatValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(1, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `FloatValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 1, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* float value */ 1:
|
||||
message.value = reader.float();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* float value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, runtime_3.WireType.Bit32).float(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.FloatValue
|
||||
*/
|
||||
exports.FloatValue = new FloatValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Int64Value$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.Int64Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `Int64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(runtime_1.ScalarType.INT64, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `Int64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.INT64, runtime_2.LongType.STRING, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int64 value */ 1:
|
||||
message.value = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* int64 value = 1; */
|
||||
if (message.value !== "0")
|
||||
writer.tag(1, runtime_3.WireType.Varint).int64(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int64Value
|
||||
*/
|
||||
exports.Int64Value = new Int64Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class UInt64Value$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.UInt64Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 4 /*ScalarType.UINT64*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `UInt64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(runtime_1.ScalarType.UINT64, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `UInt64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.UINT64, runtime_2.LongType.STRING, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* uint64 value */ 1:
|
||||
message.value = reader.uint64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* uint64 value = 1; */
|
||||
if (message.value !== "0")
|
||||
writer.tag(1, runtime_3.WireType.Varint).uint64(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt64Value
|
||||
*/
|
||||
exports.UInt64Value = new UInt64Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Int32Value$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.Int32Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `Int32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(5, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `Int32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 5, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int32 value */ 1:
|
||||
message.value = reader.int32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* int32 value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, runtime_3.WireType.Varint).int32(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int32Value
|
||||
*/
|
||||
exports.Int32Value = new Int32Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class UInt32Value$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.UInt32Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 13 /*ScalarType.UINT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `UInt32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(13, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `UInt32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 13, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* uint32 value */ 1:
|
||||
message.value = reader.uint32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* uint32 value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, runtime_3.WireType.Varint).uint32(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt32Value
|
||||
*/
|
||||
exports.UInt32Value = new UInt32Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class BoolValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.BoolValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `BoolValue` to JSON bool.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return message.value;
|
||||
}
|
||||
/**
|
||||
* Decode `BoolValue` from JSON bool.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 8, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: false };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool value */ 1:
|
||||
message.value = reader.bool();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool value = 1; */
|
||||
if (message.value !== false)
|
||||
writer.tag(1, runtime_3.WireType.Varint).bool(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BoolValue
|
||||
*/
|
||||
exports.BoolValue = new BoolValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class StringValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.StringValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `StringValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return message.value;
|
||||
}
|
||||
/**
|
||||
* Decode `StringValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 9, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string value */ 1:
|
||||
message.value = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string value = 1; */
|
||||
if (message.value !== "")
|
||||
writer.tag(1, runtime_3.WireType.LengthDelimited).string(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.StringValue
|
||||
*/
|
||||
exports.StringValue = new StringValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class BytesValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.BytesValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `BytesValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(12, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `BytesValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 12, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: new Uint8Array(0) };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bytes value */ 1:
|
||||
message.value = reader.bytes();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bytes value = 1; */
|
||||
if (message.value.length)
|
||||
writer.tag(1, runtime_3.WireType.LengthDelimited).bytes(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BytesValue
|
||||
*/
|
||||
exports.BytesValue = new BytesValue$Type();
|
||||
//# sourceMappingURL=wrappers.js.map
|
||||
1
node_modules/@actions/cache/lib/generated/google/protobuf/wrappers.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/generated/google/protobuf/wrappers.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
378
node_modules/@actions/cache/lib/generated/results/api/v1/cache.d.ts
generated
vendored
Normal file
378
node_modules/@actions/cache/lib/generated/results/api/v1/cache.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,378 @@
|
|||
import { ServiceType } from "@protobuf-ts/runtime-rpc";
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
import { CacheEntry } from "../../entities/v1/cacheentry";
|
||||
import { CacheMetadata } from "../../entities/v1/cachemetadata";
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
|
||||
*/
|
||||
export interface CreateCacheEntryRequest {
|
||||
/**
|
||||
* Scope and other metadata for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
|
||||
*/
|
||||
metadata?: CacheMetadata;
|
||||
/**
|
||||
* An explicit key for a cache entry
|
||||
*
|
||||
* @generated from protobuf field: string key = 2;
|
||||
*/
|
||||
key: string;
|
||||
/**
|
||||
* Hash of the compression tool, runner OS and paths cached
|
||||
*
|
||||
* @generated from protobuf field: string version = 3;
|
||||
*/
|
||||
version: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
|
||||
*/
|
||||
export interface CreateCacheEntryResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* SAS URL to upload the cache archive
|
||||
*
|
||||
* @generated from protobuf field: string signed_upload_url = 2;
|
||||
*/
|
||||
signedUploadUrl: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
|
||||
*/
|
||||
export interface FinalizeCacheEntryUploadRequest {
|
||||
/**
|
||||
* Scope and other metadata for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
|
||||
*/
|
||||
metadata?: CacheMetadata;
|
||||
/**
|
||||
* An explicit key for a cache entry
|
||||
*
|
||||
* @generated from protobuf field: string key = 2;
|
||||
*/
|
||||
key: string;
|
||||
/**
|
||||
* Size of the cache archive in Bytes
|
||||
*
|
||||
* @generated from protobuf field: int64 size_bytes = 3;
|
||||
*/
|
||||
sizeBytes: string;
|
||||
/**
|
||||
* Hash of the compression tool, runner OS and paths cached
|
||||
*
|
||||
* @generated from protobuf field: string version = 4;
|
||||
*/
|
||||
version: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
|
||||
*/
|
||||
export interface FinalizeCacheEntryUploadResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* Cache entry database ID
|
||||
*
|
||||
* @generated from protobuf field: int64 entry_id = 2;
|
||||
*/
|
||||
entryId: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
|
||||
*/
|
||||
export interface GetCacheEntryDownloadURLRequest {
|
||||
/**
|
||||
* Scope and other metadata for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
|
||||
*/
|
||||
metadata?: CacheMetadata;
|
||||
/**
|
||||
* An explicit key for a cache entry
|
||||
*
|
||||
* @generated from protobuf field: string key = 2;
|
||||
*/
|
||||
key: string;
|
||||
/**
|
||||
* Restore keys used for prefix searching
|
||||
*
|
||||
* @generated from protobuf field: repeated string restore_keys = 3;
|
||||
*/
|
||||
restoreKeys: string[];
|
||||
/**
|
||||
* Hash of the compression tool, runner OS and paths cached
|
||||
*
|
||||
* @generated from protobuf field: string version = 4;
|
||||
*/
|
||||
version: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
|
||||
*/
|
||||
export interface GetCacheEntryDownloadURLResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* SAS URL to download the cache archive
|
||||
*
|
||||
* @generated from protobuf field: string signed_download_url = 2;
|
||||
*/
|
||||
signedDownloadUrl: string;
|
||||
/**
|
||||
* Key or restore key that matches the lookup
|
||||
*
|
||||
* @generated from protobuf field: string matched_key = 3;
|
||||
*/
|
||||
matchedKey: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest
|
||||
*/
|
||||
export interface DeleteCacheEntryRequest {
|
||||
/**
|
||||
* Scope and other metadata for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
|
||||
*/
|
||||
metadata?: CacheMetadata;
|
||||
/**
|
||||
* An explicit key for a cache entry
|
||||
*
|
||||
* @generated from protobuf field: string key = 2;
|
||||
*/
|
||||
key: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse
|
||||
*/
|
||||
export interface DeleteCacheEntryResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* Cache entry database ID
|
||||
*
|
||||
* @generated from protobuf field: int64 entry_id = 2;
|
||||
*/
|
||||
entryId: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesRequest
|
||||
*/
|
||||
export interface ListCacheEntriesRequest {
|
||||
/**
|
||||
* Scope and other metadata for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
|
||||
*/
|
||||
metadata?: CacheMetadata;
|
||||
/**
|
||||
* An explicit key for a cache entry
|
||||
*
|
||||
* @generated from protobuf field: string key = 2;
|
||||
*/
|
||||
key: string;
|
||||
/**
|
||||
* Restore keys used for prefix searching
|
||||
*
|
||||
* @generated from protobuf field: repeated string restore_keys = 3;
|
||||
*/
|
||||
restoreKeys: string[];
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesResponse
|
||||
*/
|
||||
export interface ListCacheEntriesResponse {
|
||||
/**
|
||||
* Cache entries in the defined scope
|
||||
*
|
||||
* @generated from protobuf field: repeated github.actions.results.entities.v1.CacheEntry entries = 1;
|
||||
*/
|
||||
entries: CacheEntry[];
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryRequest
|
||||
*/
|
||||
export interface LookupCacheEntryRequest {
|
||||
/**
|
||||
* Scope and other metadata for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
|
||||
*/
|
||||
metadata?: CacheMetadata;
|
||||
/**
|
||||
* An explicit key for a cache entry
|
||||
*
|
||||
* @generated from protobuf field: string key = 2;
|
||||
*/
|
||||
key: string;
|
||||
/**
|
||||
* Restore keys used for prefix searching
|
||||
*
|
||||
* @generated from protobuf field: repeated string restore_keys = 3;
|
||||
*/
|
||||
restoreKeys: string[];
|
||||
/**
|
||||
* Hash of the compression tool, runner OS and paths cached
|
||||
*
|
||||
* @generated from protobuf field: string version = 4;
|
||||
*/
|
||||
version: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
|
||||
*/
|
||||
export interface LookupCacheEntryResponse {
|
||||
/**
|
||||
* Indicates whether the cache entry exists or not
|
||||
*
|
||||
* @generated from protobuf field: bool exists = 1;
|
||||
*/
|
||||
exists: boolean;
|
||||
/**
|
||||
* Matched cache entry metadata
|
||||
*
|
||||
* @generated from protobuf field: github.actions.results.entities.v1.CacheEntry entry = 2;
|
||||
*/
|
||||
entry?: CacheEntry;
|
||||
}
|
||||
declare class CreateCacheEntryRequest$Type extends MessageType<CreateCacheEntryRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<CreateCacheEntryRequest>): CreateCacheEntryRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryRequest): CreateCacheEntryRequest;
|
||||
internalBinaryWrite(message: CreateCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
|
||||
*/
|
||||
export declare const CreateCacheEntryRequest: CreateCacheEntryRequest$Type;
|
||||
declare class CreateCacheEntryResponse$Type extends MessageType<CreateCacheEntryResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<CreateCacheEntryResponse>): CreateCacheEntryResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryResponse): CreateCacheEntryResponse;
|
||||
internalBinaryWrite(message: CreateCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
|
||||
*/
|
||||
export declare const CreateCacheEntryResponse: CreateCacheEntryResponse$Type;
|
||||
declare class FinalizeCacheEntryUploadRequest$Type extends MessageType<FinalizeCacheEntryUploadRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<FinalizeCacheEntryUploadRequest>): FinalizeCacheEntryUploadRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadRequest): FinalizeCacheEntryUploadRequest;
|
||||
internalBinaryWrite(message: FinalizeCacheEntryUploadRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
|
||||
*/
|
||||
export declare const FinalizeCacheEntryUploadRequest: FinalizeCacheEntryUploadRequest$Type;
|
||||
declare class FinalizeCacheEntryUploadResponse$Type extends MessageType<FinalizeCacheEntryUploadResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<FinalizeCacheEntryUploadResponse>): FinalizeCacheEntryUploadResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadResponse): FinalizeCacheEntryUploadResponse;
|
||||
internalBinaryWrite(message: FinalizeCacheEntryUploadResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
|
||||
*/
|
||||
export declare const FinalizeCacheEntryUploadResponse: FinalizeCacheEntryUploadResponse$Type;
|
||||
declare class GetCacheEntryDownloadURLRequest$Type extends MessageType<GetCacheEntryDownloadURLRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<GetCacheEntryDownloadURLRequest>): GetCacheEntryDownloadURLRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLRequest): GetCacheEntryDownloadURLRequest;
|
||||
internalBinaryWrite(message: GetCacheEntryDownloadURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
|
||||
*/
|
||||
export declare const GetCacheEntryDownloadURLRequest: GetCacheEntryDownloadURLRequest$Type;
|
||||
declare class GetCacheEntryDownloadURLResponse$Type extends MessageType<GetCacheEntryDownloadURLResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<GetCacheEntryDownloadURLResponse>): GetCacheEntryDownloadURLResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLResponse): GetCacheEntryDownloadURLResponse;
|
||||
internalBinaryWrite(message: GetCacheEntryDownloadURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
|
||||
*/
|
||||
export declare const GetCacheEntryDownloadURLResponse: GetCacheEntryDownloadURLResponse$Type;
|
||||
declare class DeleteCacheEntryRequest$Type extends MessageType<DeleteCacheEntryRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<DeleteCacheEntryRequest>): DeleteCacheEntryRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteCacheEntryRequest): DeleteCacheEntryRequest;
|
||||
internalBinaryWrite(message: DeleteCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest
|
||||
*/
|
||||
export declare const DeleteCacheEntryRequest: DeleteCacheEntryRequest$Type;
|
||||
declare class DeleteCacheEntryResponse$Type extends MessageType<DeleteCacheEntryResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<DeleteCacheEntryResponse>): DeleteCacheEntryResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteCacheEntryResponse): DeleteCacheEntryResponse;
|
||||
internalBinaryWrite(message: DeleteCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse
|
||||
*/
|
||||
export declare const DeleteCacheEntryResponse: DeleteCacheEntryResponse$Type;
|
||||
declare class ListCacheEntriesRequest$Type extends MessageType<ListCacheEntriesRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<ListCacheEntriesRequest>): ListCacheEntriesRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesRequest): ListCacheEntriesRequest;
|
||||
internalBinaryWrite(message: ListCacheEntriesRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesRequest
|
||||
*/
|
||||
export declare const ListCacheEntriesRequest: ListCacheEntriesRequest$Type;
|
||||
declare class ListCacheEntriesResponse$Type extends MessageType<ListCacheEntriesResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<ListCacheEntriesResponse>): ListCacheEntriesResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesResponse): ListCacheEntriesResponse;
|
||||
internalBinaryWrite(message: ListCacheEntriesResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse
|
||||
*/
|
||||
export declare const ListCacheEntriesResponse: ListCacheEntriesResponse$Type;
|
||||
declare class LookupCacheEntryRequest$Type extends MessageType<LookupCacheEntryRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<LookupCacheEntryRequest>): LookupCacheEntryRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryRequest): LookupCacheEntryRequest;
|
||||
internalBinaryWrite(message: LookupCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryRequest
|
||||
*/
|
||||
export declare const LookupCacheEntryRequest: LookupCacheEntryRequest$Type;
|
||||
declare class LookupCacheEntryResponse$Type extends MessageType<LookupCacheEntryResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<LookupCacheEntryResponse>): LookupCacheEntryResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryResponse): LookupCacheEntryResponse;
|
||||
internalBinaryWrite(message: LookupCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
|
||||
*/
|
||||
export declare const LookupCacheEntryResponse: LookupCacheEntryResponse$Type;
|
||||
/**
|
||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
|
||||
*/
|
||||
export declare const CacheService: ServiceType;
|
||||
export {};
|
||||
730
node_modules/@actions/cache/lib/generated/results/api/v1/cache.js
generated
vendored
Normal file
730
node_modules/@actions/cache/lib/generated/results/api/v1/cache.js
generated
vendored
Normal file
|
|
@ -0,0 +1,730 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CacheService = exports.LookupCacheEntryResponse = exports.LookupCacheEntryRequest = exports.ListCacheEntriesResponse = exports.ListCacheEntriesRequest = exports.DeleteCacheEntryResponse = exports.DeleteCacheEntryRequest = exports.GetCacheEntryDownloadURLResponse = exports.GetCacheEntryDownloadURLRequest = exports.FinalizeCacheEntryUploadResponse = exports.FinalizeCacheEntryUploadRequest = exports.CreateCacheEntryResponse = exports.CreateCacheEntryRequest = void 0;
|
||||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3)
|
||||
// tslint:disable
|
||||
const runtime_rpc_1 = require("@protobuf-ts/runtime-rpc");
|
||||
const runtime_1 = require("@protobuf-ts/runtime");
|
||||
const runtime_2 = require("@protobuf-ts/runtime");
|
||||
const runtime_3 = require("@protobuf-ts/runtime");
|
||||
const runtime_4 = require("@protobuf-ts/runtime");
|
||||
const runtime_5 = require("@protobuf-ts/runtime");
|
||||
const cacheentry_1 = require("../../entities/v1/cacheentry");
|
||||
const cachemetadata_1 = require("../../entities/v1/cachemetadata");
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CreateCacheEntryRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.CreateCacheEntryRequest", [
|
||||
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
|
||||
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { key: "", version: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
|
||||
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
|
||||
break;
|
||||
case /* string key */ 2:
|
||||
message.key = reader.string();
|
||||
break;
|
||||
case /* string version */ 3:
|
||||
message.version = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
|
||||
if (message.metadata)
|
||||
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* string key = 2; */
|
||||
if (message.key !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||
/* string version = 3; */
|
||||
if (message.version !== "")
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.version);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
|
||||
*/
|
||||
exports.CreateCacheEntryRequest = new CreateCacheEntryRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CreateCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.CreateCacheEntryResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, signedUploadUrl: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* string signed_upload_url */ 2:
|
||||
message.signedUploadUrl = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||
/* string signed_upload_url = 2; */
|
||||
if (message.signedUploadUrl !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
|
||||
*/
|
||||
exports.CreateCacheEntryResponse = new CreateCacheEntryResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeCacheEntryUploadRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [
|
||||
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
|
||||
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { key: "", sizeBytes: "0", version: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
|
||||
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
|
||||
break;
|
||||
case /* string key */ 2:
|
||||
message.key = reader.string();
|
||||
break;
|
||||
case /* int64 size_bytes */ 3:
|
||||
message.sizeBytes = reader.int64().toString();
|
||||
break;
|
||||
case /* string version */ 4:
|
||||
message.version = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
|
||||
if (message.metadata)
|
||||
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* string key = 2; */
|
||||
if (message.key !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||
/* int64 size_bytes = 3; */
|
||||
if (message.sizeBytes !== "0")
|
||||
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
|
||||
/* string version = 4; */
|
||||
if (message.version !== "")
|
||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
|
||||
*/
|
||||
exports.FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeCacheEntryUploadResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, entryId: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 entry_id */ 2:
|
||||
message.entryId = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||
/* int64 entry_id = 2; */
|
||||
if (message.entryId !== "0")
|
||||
writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
|
||||
*/
|
||||
exports.FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetCacheEntryDownloadURLRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [
|
||||
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
|
||||
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { key: "", restoreKeys: [], version: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
|
||||
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
|
||||
break;
|
||||
case /* string key */ 2:
|
||||
message.key = reader.string();
|
||||
break;
|
||||
case /* repeated string restore_keys */ 3:
|
||||
message.restoreKeys.push(reader.string());
|
||||
break;
|
||||
case /* string version */ 4:
|
||||
message.version = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
|
||||
if (message.metadata)
|
||||
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* string key = 2; */
|
||||
if (message.key !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||
/* repeated string restore_keys = 3; */
|
||||
for (let i = 0; i < message.restoreKeys.length; i++)
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]);
|
||||
/* string version = 4; */
|
||||
if (message.version !== "")
|
||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
|
||||
*/
|
||||
exports.GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "matched_key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, signedDownloadUrl: "", matchedKey: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* string signed_download_url */ 2:
|
||||
message.signedDownloadUrl = reader.string();
|
||||
break;
|
||||
case /* string matched_key */ 3:
|
||||
message.matchedKey = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||
/* string signed_download_url = 2; */
|
||||
if (message.signedDownloadUrl !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl);
|
||||
/* string matched_key = 3; */
|
||||
if (message.matchedKey !== "")
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
|
||||
*/
|
||||
exports.GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DeleteCacheEntryRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.DeleteCacheEntryRequest", [
|
||||
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
|
||||
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { key: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
|
||||
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
|
||||
break;
|
||||
case /* string key */ 2:
|
||||
message.key = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
|
||||
if (message.metadata)
|
||||
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* string key = 2; */
|
||||
if (message.key !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest
|
||||
*/
|
||||
exports.DeleteCacheEntryRequest = new DeleteCacheEntryRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DeleteCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.DeleteCacheEntryResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, entryId: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 entry_id */ 2:
|
||||
message.entryId = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||
/* int64 entry_id = 2; */
|
||||
if (message.entryId !== "0")
|
||||
writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse
|
||||
*/
|
||||
exports.DeleteCacheEntryResponse = new DeleteCacheEntryResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListCacheEntriesRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListCacheEntriesRequest", [
|
||||
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
|
||||
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { key: "", restoreKeys: [] };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
|
||||
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
|
||||
break;
|
||||
case /* string key */ 2:
|
||||
message.key = reader.string();
|
||||
break;
|
||||
case /* repeated string restore_keys */ 3:
|
||||
message.restoreKeys.push(reader.string());
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
|
||||
if (message.metadata)
|
||||
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* string key = 2; */
|
||||
if (message.key !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||
/* repeated string restore_keys = 3; */
|
||||
for (let i = 0; i < message.restoreKeys.length; i++)
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesRequest
|
||||
*/
|
||||
exports.ListCacheEntriesRequest = new ListCacheEntriesRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListCacheEntriesResponse", [
|
||||
{ no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => cacheentry_1.CacheEntry }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { entries: [] };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* repeated github.actions.results.entities.v1.CacheEntry entries */ 1:
|
||||
message.entries.push(cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options));
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* repeated github.actions.results.entities.v1.CacheEntry entries = 1; */
|
||||
for (let i = 0; i < message.entries.length; i++)
|
||||
cacheentry_1.CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse
|
||||
*/
|
||||
exports.ListCacheEntriesResponse = new ListCacheEntriesResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class LookupCacheEntryRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.LookupCacheEntryRequest", [
|
||||
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
|
||||
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { key: "", restoreKeys: [], version: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
|
||||
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
|
||||
break;
|
||||
case /* string key */ 2:
|
||||
message.key = reader.string();
|
||||
break;
|
||||
case /* repeated string restore_keys */ 3:
|
||||
message.restoreKeys.push(reader.string());
|
||||
break;
|
||||
case /* string version */ 4:
|
||||
message.version = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
|
||||
if (message.metadata)
|
||||
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* string key = 2; */
|
||||
if (message.key !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||
/* repeated string restore_keys = 3; */
|
||||
for (let i = 0; i < message.restoreKeys.length; i++)
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]);
|
||||
/* string version = 4; */
|
||||
if (message.version !== "")
|
||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryRequest
|
||||
*/
|
||||
exports.LookupCacheEntryRequest = new LookupCacheEntryRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.LookupCacheEntryResponse", [
|
||||
{ no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "entry", kind: "message", T: () => cacheentry_1.CacheEntry }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { exists: false };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool exists */ 1:
|
||||
message.exists = reader.bool();
|
||||
break;
|
||||
case /* github.actions.results.entities.v1.CacheEntry entry */ 2:
|
||||
message.entry = cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options, message.entry);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool exists = 1; */
|
||||
if (message.exists !== false)
|
||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.exists);
|
||||
/* github.actions.results.entities.v1.CacheEntry entry = 2; */
|
||||
if (message.entry)
|
||||
cacheentry_1.CacheEntry.internalBinaryWrite(message.entry, writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
|
||||
*/
|
||||
exports.LookupCacheEntryResponse = new LookupCacheEntryResponse$Type();
|
||||
/**
|
||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
|
||||
*/
|
||||
exports.CacheService = new runtime_rpc_1.ServiceType("github.actions.results.api.v1.CacheService", [
|
||||
{ name: "CreateCacheEntry", options: {}, I: exports.CreateCacheEntryRequest, O: exports.CreateCacheEntryResponse },
|
||||
{ name: "FinalizeCacheEntryUpload", options: {}, I: exports.FinalizeCacheEntryUploadRequest, O: exports.FinalizeCacheEntryUploadResponse },
|
||||
{ name: "GetCacheEntryDownloadURL", options: {}, I: exports.GetCacheEntryDownloadURLRequest, O: exports.GetCacheEntryDownloadURLResponse },
|
||||
{ name: "DeleteCacheEntry", options: {}, I: exports.DeleteCacheEntryRequest, O: exports.DeleteCacheEntryResponse },
|
||||
{ name: "ListCacheEntries", options: {}, I: exports.ListCacheEntriesRequest, O: exports.ListCacheEntriesResponse },
|
||||
{ name: "LookupCacheEntry", options: {}, I: exports.LookupCacheEntryRequest, O: exports.LookupCacheEntryResponse }
|
||||
]);
|
||||
//# sourceMappingURL=cache.js.map
|
||||
1
node_modules/@actions/cache/lib/generated/results/api/v1/cache.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/generated/results/api/v1/cache.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
53
node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.d.ts
generated
vendored
Normal file
53
node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
/// <reference types="node" />
|
||||
import { TwirpContext, TwirpServer } from "twirp-ts";
|
||||
import { CreateCacheEntryRequest, CreateCacheEntryResponse, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest, GetCacheEntryDownloadURLResponse, DeleteCacheEntryRequest, DeleteCacheEntryResponse, ListCacheEntriesRequest, ListCacheEntriesResponse, LookupCacheEntryRequest, LookupCacheEntryResponse } from "./cache";
|
||||
interface Rpc {
|
||||
request(service: string, method: string, contentType: "application/json" | "application/protobuf", data: object | Uint8Array): Promise<object | Uint8Array>;
|
||||
}
|
||||
export interface CacheServiceClient {
|
||||
CreateCacheEntry(request: CreateCacheEntryRequest): Promise<CreateCacheEntryResponse>;
|
||||
FinalizeCacheEntryUpload(request: FinalizeCacheEntryUploadRequest): Promise<FinalizeCacheEntryUploadResponse>;
|
||||
GetCacheEntryDownloadURL(request: GetCacheEntryDownloadURLRequest): Promise<GetCacheEntryDownloadURLResponse>;
|
||||
DeleteCacheEntry(request: DeleteCacheEntryRequest): Promise<DeleteCacheEntryResponse>;
|
||||
ListCacheEntries(request: ListCacheEntriesRequest): Promise<ListCacheEntriesResponse>;
|
||||
LookupCacheEntry(request: LookupCacheEntryRequest): Promise<LookupCacheEntryResponse>;
|
||||
}
|
||||
export declare class CacheServiceClientJSON implements CacheServiceClient {
|
||||
private readonly rpc;
|
||||
constructor(rpc: Rpc);
|
||||
CreateCacheEntry(request: CreateCacheEntryRequest): Promise<CreateCacheEntryResponse>;
|
||||
FinalizeCacheEntryUpload(request: FinalizeCacheEntryUploadRequest): Promise<FinalizeCacheEntryUploadResponse>;
|
||||
GetCacheEntryDownloadURL(request: GetCacheEntryDownloadURLRequest): Promise<GetCacheEntryDownloadURLResponse>;
|
||||
DeleteCacheEntry(request: DeleteCacheEntryRequest): Promise<DeleteCacheEntryResponse>;
|
||||
ListCacheEntries(request: ListCacheEntriesRequest): Promise<ListCacheEntriesResponse>;
|
||||
LookupCacheEntry(request: LookupCacheEntryRequest): Promise<LookupCacheEntryResponse>;
|
||||
}
|
||||
export declare class CacheServiceClientProtobuf implements CacheServiceClient {
|
||||
private readonly rpc;
|
||||
constructor(rpc: Rpc);
|
||||
CreateCacheEntry(request: CreateCacheEntryRequest): Promise<CreateCacheEntryResponse>;
|
||||
FinalizeCacheEntryUpload(request: FinalizeCacheEntryUploadRequest): Promise<FinalizeCacheEntryUploadResponse>;
|
||||
GetCacheEntryDownloadURL(request: GetCacheEntryDownloadURLRequest): Promise<GetCacheEntryDownloadURLResponse>;
|
||||
DeleteCacheEntry(request: DeleteCacheEntryRequest): Promise<DeleteCacheEntryResponse>;
|
||||
ListCacheEntries(request: ListCacheEntriesRequest): Promise<ListCacheEntriesResponse>;
|
||||
LookupCacheEntry(request: LookupCacheEntryRequest): Promise<LookupCacheEntryResponse>;
|
||||
}
|
||||
export interface CacheServiceTwirp<T extends TwirpContext = TwirpContext> {
|
||||
CreateCacheEntry(ctx: T, request: CreateCacheEntryRequest): Promise<CreateCacheEntryResponse>;
|
||||
FinalizeCacheEntryUpload(ctx: T, request: FinalizeCacheEntryUploadRequest): Promise<FinalizeCacheEntryUploadResponse>;
|
||||
GetCacheEntryDownloadURL(ctx: T, request: GetCacheEntryDownloadURLRequest): Promise<GetCacheEntryDownloadURLResponse>;
|
||||
DeleteCacheEntry(ctx: T, request: DeleteCacheEntryRequest): Promise<DeleteCacheEntryResponse>;
|
||||
ListCacheEntries(ctx: T, request: ListCacheEntriesRequest): Promise<ListCacheEntriesResponse>;
|
||||
LookupCacheEntry(ctx: T, request: LookupCacheEntryRequest): Promise<LookupCacheEntryResponse>;
|
||||
}
|
||||
export declare enum CacheServiceMethod {
|
||||
CreateCacheEntry = "CreateCacheEntry",
|
||||
FinalizeCacheEntryUpload = "FinalizeCacheEntryUpload",
|
||||
GetCacheEntryDownloadURL = "GetCacheEntryDownloadURL",
|
||||
DeleteCacheEntry = "DeleteCacheEntry",
|
||||
ListCacheEntries = "ListCacheEntries",
|
||||
LookupCacheEntry = "LookupCacheEntry"
|
||||
}
|
||||
export declare const CacheServiceMethodList: CacheServiceMethod[];
|
||||
export declare function createCacheServiceServer<T extends TwirpContext = TwirpContext>(service: CacheServiceTwirp<T>): TwirpServer<CacheServiceTwirp<TwirpContext<import("http").IncomingMessage, import("http").ServerResponse<import("http").IncomingMessage>>>, T>;
|
||||
export {};
|
||||
602
node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.js
generated
vendored
Normal file
602
node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.js
generated
vendored
Normal file
|
|
@ -0,0 +1,602 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createCacheServiceServer = exports.CacheServiceMethodList = exports.CacheServiceMethod = exports.CacheServiceClientProtobuf = exports.CacheServiceClientJSON = void 0;
|
||||
const twirp_ts_1 = require("twirp-ts");
|
||||
const cache_1 = require("./cache");
|
||||
class CacheServiceClientJSON {
|
||||
constructor(rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateCacheEntry.bind(this);
|
||||
this.FinalizeCacheEntryUpload.bind(this);
|
||||
this.GetCacheEntryDownloadURL.bind(this);
|
||||
this.DeleteCacheEntry.bind(this);
|
||||
this.ListCacheEntries.bind(this);
|
||||
this.LookupCacheEntry.bind(this);
|
||||
}
|
||||
CreateCacheEntry(request) {
|
||||
const data = cache_1.CreateCacheEntryRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/json", data);
|
||||
return promise.then((data) => cache_1.CreateCacheEntryResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
FinalizeCacheEntryUpload(request) {
|
||||
const data = cache_1.FinalizeCacheEntryUploadRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/json", data);
|
||||
return promise.then((data) => cache_1.FinalizeCacheEntryUploadResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
GetCacheEntryDownloadURL(request) {
|
||||
const data = cache_1.GetCacheEntryDownloadURLRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/json", data);
|
||||
return promise.then((data) => cache_1.GetCacheEntryDownloadURLResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
DeleteCacheEntry(request) {
|
||||
const data = cache_1.DeleteCacheEntryRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "DeleteCacheEntry", "application/json", data);
|
||||
return promise.then((data) => cache_1.DeleteCacheEntryResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
ListCacheEntries(request) {
|
||||
const data = cache_1.ListCacheEntriesRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "ListCacheEntries", "application/json", data);
|
||||
return promise.then((data) => cache_1.ListCacheEntriesResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
LookupCacheEntry(request) {
|
||||
const data = cache_1.LookupCacheEntryRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "LookupCacheEntry", "application/json", data);
|
||||
return promise.then((data) => cache_1.LookupCacheEntryResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
}
|
||||
exports.CacheServiceClientJSON = CacheServiceClientJSON;
|
||||
class CacheServiceClientProtobuf {
|
||||
constructor(rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateCacheEntry.bind(this);
|
||||
this.FinalizeCacheEntryUpload.bind(this);
|
||||
this.GetCacheEntryDownloadURL.bind(this);
|
||||
this.DeleteCacheEntry.bind(this);
|
||||
this.ListCacheEntries.bind(this);
|
||||
this.LookupCacheEntry.bind(this);
|
||||
}
|
||||
CreateCacheEntry(request) {
|
||||
const data = cache_1.CreateCacheEntryRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/protobuf", data);
|
||||
return promise.then((data) => cache_1.CreateCacheEntryResponse.fromBinary(data));
|
||||
}
|
||||
FinalizeCacheEntryUpload(request) {
|
||||
const data = cache_1.FinalizeCacheEntryUploadRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/protobuf", data);
|
||||
return promise.then((data) => cache_1.FinalizeCacheEntryUploadResponse.fromBinary(data));
|
||||
}
|
||||
GetCacheEntryDownloadURL(request) {
|
||||
const data = cache_1.GetCacheEntryDownloadURLRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/protobuf", data);
|
||||
return promise.then((data) => cache_1.GetCacheEntryDownloadURLResponse.fromBinary(data));
|
||||
}
|
||||
DeleteCacheEntry(request) {
|
||||
const data = cache_1.DeleteCacheEntryRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "DeleteCacheEntry", "application/protobuf", data);
|
||||
return promise.then((data) => cache_1.DeleteCacheEntryResponse.fromBinary(data));
|
||||
}
|
||||
ListCacheEntries(request) {
|
||||
const data = cache_1.ListCacheEntriesRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "ListCacheEntries", "application/protobuf", data);
|
||||
return promise.then((data) => cache_1.ListCacheEntriesResponse.fromBinary(data));
|
||||
}
|
||||
LookupCacheEntry(request) {
|
||||
const data = cache_1.LookupCacheEntryRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "LookupCacheEntry", "application/protobuf", data);
|
||||
return promise.then((data) => cache_1.LookupCacheEntryResponse.fromBinary(data));
|
||||
}
|
||||
}
|
||||
exports.CacheServiceClientProtobuf = CacheServiceClientProtobuf;
|
||||
var CacheServiceMethod;
|
||||
(function (CacheServiceMethod) {
|
||||
CacheServiceMethod["CreateCacheEntry"] = "CreateCacheEntry";
|
||||
CacheServiceMethod["FinalizeCacheEntryUpload"] = "FinalizeCacheEntryUpload";
|
||||
CacheServiceMethod["GetCacheEntryDownloadURL"] = "GetCacheEntryDownloadURL";
|
||||
CacheServiceMethod["DeleteCacheEntry"] = "DeleteCacheEntry";
|
||||
CacheServiceMethod["ListCacheEntries"] = "ListCacheEntries";
|
||||
CacheServiceMethod["LookupCacheEntry"] = "LookupCacheEntry";
|
||||
})(CacheServiceMethod || (exports.CacheServiceMethod = CacheServiceMethod = {}));
|
||||
exports.CacheServiceMethodList = [
|
||||
CacheServiceMethod.CreateCacheEntry,
|
||||
CacheServiceMethod.FinalizeCacheEntryUpload,
|
||||
CacheServiceMethod.GetCacheEntryDownloadURL,
|
||||
CacheServiceMethod.DeleteCacheEntry,
|
||||
CacheServiceMethod.ListCacheEntries,
|
||||
CacheServiceMethod.LookupCacheEntry,
|
||||
];
|
||||
function createCacheServiceServer(service) {
|
||||
return new twirp_ts_1.TwirpServer({
|
||||
service,
|
||||
packageName: "github.actions.results.api.v1",
|
||||
serviceName: "CacheService",
|
||||
methodList: exports.CacheServiceMethodList,
|
||||
matchRoute: matchCacheServiceRoute,
|
||||
});
|
||||
}
|
||||
exports.createCacheServiceServer = createCacheServiceServer;
|
||||
function matchCacheServiceRoute(method, events) {
|
||||
switch (method) {
|
||||
case "CreateCacheEntry":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "CreateCacheEntry" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleCacheServiceCreateCacheEntryRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "FinalizeCacheEntryUpload":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "FinalizeCacheEntryUpload" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleCacheServiceFinalizeCacheEntryUploadRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "GetCacheEntryDownloadURL":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "GetCacheEntryDownloadURL" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleCacheServiceGetCacheEntryDownloadURLRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "DeleteCacheEntry":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "DeleteCacheEntry" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleCacheServiceDeleteCacheEntryRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "ListCacheEntries":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "ListCacheEntries" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleCacheServiceListCacheEntriesRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "LookupCacheEntry":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "LookupCacheEntry" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleCacheServiceLookupCacheEntryRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
default:
|
||||
events.onNotFound();
|
||||
const msg = `no handler found`;
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleCacheServiceCreateCacheEntryRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleCacheServiceCreateCacheEntryJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleCacheServiceCreateCacheEntryProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleCacheServiceFinalizeCacheEntryUploadRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleCacheServiceFinalizeCacheEntryUploadJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleCacheServiceFinalizeCacheEntryUploadProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleCacheServiceGetCacheEntryDownloadURLRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleCacheServiceGetCacheEntryDownloadURLJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleCacheServiceGetCacheEntryDownloadURLProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleCacheServiceDeleteCacheEntryRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleCacheServiceDeleteCacheEntryJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleCacheServiceDeleteCacheEntryProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleCacheServiceListCacheEntriesRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleCacheServiceListCacheEntriesJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleCacheServiceListCacheEntriesProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleCacheServiceLookupCacheEntryRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleCacheServiceLookupCacheEntryJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleCacheServiceCreateCacheEntryJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = cache_1.CreateCacheEntryRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.CreateCacheEntry(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.CreateCacheEntry(ctx, request);
|
||||
}
|
||||
return JSON.stringify(cache_1.CreateCacheEntryResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceFinalizeCacheEntryUploadJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = cache_1.FinalizeCacheEntryUploadRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.FinalizeCacheEntryUpload(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.FinalizeCacheEntryUpload(ctx, request);
|
||||
}
|
||||
return JSON.stringify(cache_1.FinalizeCacheEntryUploadResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceGetCacheEntryDownloadURLJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = cache_1.GetCacheEntryDownloadURLRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.GetCacheEntryDownloadURL(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.GetCacheEntryDownloadURL(ctx, request);
|
||||
}
|
||||
return JSON.stringify(cache_1.GetCacheEntryDownloadURLResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceDeleteCacheEntryJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = cache_1.DeleteCacheEntryRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.DeleteCacheEntry(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.DeleteCacheEntry(ctx, request);
|
||||
}
|
||||
return JSON.stringify(cache_1.DeleteCacheEntryResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceListCacheEntriesJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = cache_1.ListCacheEntriesRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.ListCacheEntries(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.ListCacheEntries(ctx, request);
|
||||
}
|
||||
return JSON.stringify(cache_1.ListCacheEntriesResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceLookupCacheEntryJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = cache_1.LookupCacheEntryRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.LookupCacheEntry(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.LookupCacheEntry(ctx, request);
|
||||
}
|
||||
return JSON.stringify(cache_1.LookupCacheEntryResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceCreateCacheEntryProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = cache_1.CreateCacheEntryRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.CreateCacheEntry(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.CreateCacheEntry(ctx, request);
|
||||
}
|
||||
return Buffer.from(cache_1.CreateCacheEntryResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceFinalizeCacheEntryUploadProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = cache_1.FinalizeCacheEntryUploadRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.FinalizeCacheEntryUpload(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.FinalizeCacheEntryUpload(ctx, request);
|
||||
}
|
||||
return Buffer.from(cache_1.FinalizeCacheEntryUploadResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceGetCacheEntryDownloadURLProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = cache_1.GetCacheEntryDownloadURLRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.GetCacheEntryDownloadURL(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.GetCacheEntryDownloadURL(ctx, request);
|
||||
}
|
||||
return Buffer.from(cache_1.GetCacheEntryDownloadURLResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceDeleteCacheEntryProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = cache_1.DeleteCacheEntryRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.DeleteCacheEntry(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.DeleteCacheEntry(ctx, request);
|
||||
}
|
||||
return Buffer.from(cache_1.DeleteCacheEntryResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceListCacheEntriesProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = cache_1.ListCacheEntriesRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.ListCacheEntries(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.ListCacheEntries(ctx, request);
|
||||
}
|
||||
return Buffer.from(cache_1.ListCacheEntriesResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = cache_1.LookupCacheEntryRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.LookupCacheEntry(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.LookupCacheEntry(ctx, request);
|
||||
}
|
||||
return Buffer.from(cache_1.LookupCacheEntryResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=cache.twirp.js.map
|
||||
1
node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
71
node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.d.ts
generated
vendored
Normal file
71
node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
import { Timestamp } from "../../../google/protobuf/timestamp";
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.entities.v1.CacheEntry
|
||||
*/
|
||||
export interface CacheEntry {
|
||||
/**
|
||||
* An explicit key for a cache entry
|
||||
*
|
||||
* @generated from protobuf field: string key = 1;
|
||||
*/
|
||||
key: string;
|
||||
/**
|
||||
* SHA256 hex digest of the cache archive
|
||||
*
|
||||
* @generated from protobuf field: string hash = 2;
|
||||
*/
|
||||
hash: string;
|
||||
/**
|
||||
* Cache entry size in bytes
|
||||
*
|
||||
* @generated from protobuf field: int64 size_bytes = 3;
|
||||
*/
|
||||
sizeBytes: string;
|
||||
/**
|
||||
* Access scope
|
||||
*
|
||||
* @generated from protobuf field: string scope = 4;
|
||||
*/
|
||||
scope: string;
|
||||
/**
|
||||
* Version SHA256 hex digest
|
||||
*
|
||||
* @generated from protobuf field: string version = 5;
|
||||
*/
|
||||
version: string;
|
||||
/**
|
||||
* When the cache entry was created
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
|
||||
*/
|
||||
createdAt?: Timestamp;
|
||||
/**
|
||||
* When the cache entry was last accessed
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7;
|
||||
*/
|
||||
lastAccessedAt?: Timestamp;
|
||||
/**
|
||||
* When the cache entry is set to expire
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 8;
|
||||
*/
|
||||
expiresAt?: Timestamp;
|
||||
}
|
||||
declare class CacheEntry$Type extends MessageType<CacheEntry> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<CacheEntry>): CacheEntry;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheEntry): CacheEntry;
|
||||
internalBinaryWrite(message: CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry
|
||||
*/
|
||||
export declare const CacheEntry: CacheEntry$Type;
|
||||
export {};
|
||||
106
node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.js
generated
vendored
Normal file
106
node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.js
generated
vendored
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CacheEntry = void 0;
|
||||
const runtime_1 = require("@protobuf-ts/runtime");
|
||||
const runtime_2 = require("@protobuf-ts/runtime");
|
||||
const runtime_3 = require("@protobuf-ts/runtime");
|
||||
const runtime_4 = require("@protobuf-ts/runtime");
|
||||
const runtime_5 = require("@protobuf-ts/runtime");
|
||||
const timestamp_1 = require("../../../google/protobuf/timestamp");
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CacheEntry$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.entities.v1.CacheEntry", [
|
||||
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
|
||||
{ no: 7, name: "last_accessed_at", kind: "message", T: () => timestamp_1.Timestamp },
|
||||
{ no: 8, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string key */ 1:
|
||||
message.key = reader.string();
|
||||
break;
|
||||
case /* string hash */ 2:
|
||||
message.hash = reader.string();
|
||||
break;
|
||||
case /* int64 size_bytes */ 3:
|
||||
message.sizeBytes = reader.int64().toString();
|
||||
break;
|
||||
case /* string scope */ 4:
|
||||
message.scope = reader.string();
|
||||
break;
|
||||
case /* string version */ 5:
|
||||
message.version = reader.string();
|
||||
break;
|
||||
case /* google.protobuf.Timestamp created_at */ 6:
|
||||
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||
break;
|
||||
case /* google.protobuf.Timestamp last_accessed_at */ 7:
|
||||
message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
|
||||
break;
|
||||
case /* google.protobuf.Timestamp expires_at */ 8:
|
||||
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string key = 1; */
|
||||
if (message.key !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key);
|
||||
/* string hash = 2; */
|
||||
if (message.hash !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash);
|
||||
/* int64 size_bytes = 3; */
|
||||
if (message.sizeBytes !== "0")
|
||||
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
|
||||
/* string scope = 4; */
|
||||
if (message.scope !== "")
|
||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope);
|
||||
/* string version = 5; */
|
||||
if (message.version !== "")
|
||||
writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version);
|
||||
/* google.protobuf.Timestamp created_at = 6; */
|
||||
if (message.createdAt)
|
||||
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* google.protobuf.Timestamp last_accessed_at = 7; */
|
||||
if (message.lastAccessedAt)
|
||||
timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* google.protobuf.Timestamp expires_at = 8; */
|
||||
if (message.expiresAt)
|
||||
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry
|
||||
*/
|
||||
exports.CacheEntry = new CacheEntry$Type();
|
||||
//# sourceMappingURL=cacheentry.js.map
|
||||
1
node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"cacheentry.js","sourceRoot":"","sources":["../../../../../src/generated/results/entities/v1/cacheentry.ts"],"names":[],"mappings":";;;AAKA,kDAAgD;AAGhD,kDAA2D;AAE3D,kDAA8D;AAC9D,kDAAoD;AACpD,kDAAmD;AACnD,kEAA+D;AAsD/D,2FAA2F;AAC3F,MAAM,eAAgB,SAAQ,qBAAuB;IACjD;QACI,KAAK,CAAC,+CAA+C,EAAE;YACnD,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YAClE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YACnE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,oBAAoB,EAAE;YACxE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YACpE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YACtE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,qBAAS,EAAE;YAClE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,kBAAkB,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,qBAAS,EAAE;YACxE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,qBAAS,EAAE;SACrE,CAAC,CAAC;IACP,CAAC;IACD,MAAM,CAAC,KAAkC;QACrC,MAAM,OAAO,GAAG,EAAE,GAAG,EAAE,EAAE,EAAE,IAAI,EAAE,EAAE,EAAE,SAAS,EAAE,GAAG,EAAE,KAAK,EAAE,EAAE,EAAE,OAAO,EAAE,EAAE,EAAE,CAAC;QAC9E,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,sBAAY,EAAE,EAAE,UAAU,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5F,IAAI,KAAK,KAAK,SAAS;YACnB,IAAA,gCAAsB,EAAa,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC;QAC7D,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,kBAAkB,CAAC,MAAqB,EAAE,MAAc,EAAE,OAA0B,EAAE,MAAmB;QACrG,IAAI,OAAO,GAAG,MAAM,aAAN,MAAM,cAAN,MAAM,GAAI,IAAI,CAAC,MAAM,EAAE,EAAE,GAAG,GAAG,MAAM,CAAC,GAAG,GAAG,MAAM,CAAC;QACjE,OAAO,MAAM,CAAC,GAAG,GAAG,GAAG,EAAE;YACrB,IAAI,CAAC,OAAO,EAAE,QAAQ,CAAC,GAAG,MAAM,CAAC,GAAG,EAAE,CAAC;YACvC,QAAQ,OAAO,EAAE;gBACb,KAAK,gBAAgB,CAAC,CAAC;oBACnB,OAAO,CAAC,GAAG,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAC9B,MAAM;gBACV,KAAK,iBAAiB,CAAC,CAAC;oBACpB,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAC/B,MAAM;gBACV,KAAK,sBAAsB,CAAC,CAAC;oBACzB,OAAO,CAAC,SAAS,GAAG,MAAM,CAAC,KAAK,EAAE,CAAC,QAAQ,EAAE,CAAC;oBAC9C,MAAM;gBACV,KAAK,kBAAkB,CAAC,CAAC;oBACrB,OAAO,CAAC,KAAK,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAChC,MAAM;gBACV,KAAK,oBAAoB,CAAC,CAAC;oBACvB,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAClC,MAAM;gBACV,KAAK,0CAA0C,CAAC,CAAC;oBAC7C,OAAO,CAAC,SAAS,GAAG,qBAAS,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,EAAE,OAAO,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;oBACtG,MAAM;gBACV,KAAK,gDAAgD,CAAC,CAAC;oBACnD,OAAO,CAAC,cAAc,GAAG,qBAAS,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,EAAE,OAAO,EAAE,OAAO,CAAC,cAAc,CAAC,CAAC;oBAChH,MAAM;gBACV,KAAK,0CAA0C,CAAC,CAAC;oBAC7C,OAAO,CAAC,SAAS,GAAG,qBAAS,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,EAAE,OAAO,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;oBACtG,MAAM;gBACV;oBACI,IAAI,CAAC,GAAG,OAAO,CAAC,gBAAgB,CAAC;oBACjC,IAAI,CAAC,KAAK,OAAO;wBACb,MAAM,IAAI,UAAU,CAAC,KAAK,CAAC,iBAAiB,OAAO,eAAe,QAAQ,SAAS,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;oBACxG,IAAI,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC9B,IAAI,CAAC,KAAK,KAAK;wBACX,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;aACvG;SACJ;QACD,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,mBAAmB,CAAC,OAAmB,EAAE,MAAqB,EAAE,OAA2B;QACvF,qBAAqB;QACrB,IAAI,OAAO,CAAC,GAAG,KAAK,EAAE;YAClB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAChE,sBAAsB;QACtB,IAAI,OAAO,CAAC,IAAI,KAAK,EAAE;YACnB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACjE,2BAA2B;QAC3B,IAAI,OAAO,CAAC,SAAS,KAAK,GAAG;YACzB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;QAC5D,uBAAuB;QACvB,IAAI,OAAO,CAAC,KAAK,KAAK,EAAE;YACpB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QAClE,yBAAyB;QACzB,IAAI,OAAO,CAAC,OAAO,KAAK,EAAE;YACtB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QACpE,+CAA+C;QAC/C,IAAI,OAAO,CAAC,SAAS;YACjB,qBAAS,CAAC,mBAAmB,CAAC,OAAO,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;QACrH,qDAAqD;QACrD,IAAI,OAAO,CAAC,cAAc;YACtB,qBAAS,CAAC,mBAAmB,CAAC,OAAO,CAAC,cAAc,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;QAC1H,+CAA+C;QAC/C,IAAI,OAAO,CAAC,SAAS;YACjB,qBAAS,CAAC,mBAAmB,CAAC,OAAO,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;QACrH,IAAI,CAAC,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACnC,IAAI,CAAC,KAAK,KAAK;YACX,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;QAClF,OAAO,MAAM,CAAC;IAClB,CAAC;CACJ;AACD;;GAEG;AACU,QAAA,UAAU,GAAG,IAAI,eAAe,EAAE,CAAC"}
|
||||
35
node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.d.ts
generated
vendored
Normal file
35
node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
import { CacheScope } from "./cachescope";
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.entities.v1.CacheMetadata
|
||||
*/
|
||||
export interface CacheMetadata {
|
||||
/**
|
||||
* Backend repository id
|
||||
*
|
||||
* @generated from protobuf field: int64 repository_id = 1;
|
||||
*/
|
||||
repositoryId: string;
|
||||
/**
|
||||
* Scopes for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: repeated github.actions.results.entities.v1.CacheScope scope = 2;
|
||||
*/
|
||||
scope: CacheScope[];
|
||||
}
|
||||
declare class CacheMetadata$Type extends MessageType<CacheMetadata> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<CacheMetadata>): CacheMetadata;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheMetadata): CacheMetadata;
|
||||
internalBinaryWrite(message: CacheMetadata, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheMetadata
|
||||
*/
|
||||
export declare const CacheMetadata: CacheMetadata$Type;
|
||||
export {};
|
||||
64
node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js
generated
vendored
Normal file
64
node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js
generated
vendored
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CacheMetadata = void 0;
|
||||
const runtime_1 = require("@protobuf-ts/runtime");
|
||||
const runtime_2 = require("@protobuf-ts/runtime");
|
||||
const runtime_3 = require("@protobuf-ts/runtime");
|
||||
const runtime_4 = require("@protobuf-ts/runtime");
|
||||
const runtime_5 = require("@protobuf-ts/runtime");
|
||||
const cachescope_1 = require("./cachescope");
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CacheMetadata$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.entities.v1.CacheMetadata", [
|
||||
{ no: 1, name: "repository_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 2, name: "scope", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => cachescope_1.CacheScope }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { repositoryId: "0", scope: [] };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int64 repository_id */ 1:
|
||||
message.repositoryId = reader.int64().toString();
|
||||
break;
|
||||
case /* repeated github.actions.results.entities.v1.CacheScope scope */ 2:
|
||||
message.scope.push(cachescope_1.CacheScope.internalBinaryRead(reader, reader.uint32(), options));
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* int64 repository_id = 1; */
|
||||
if (message.repositoryId !== "0")
|
||||
writer.tag(1, runtime_1.WireType.Varint).int64(message.repositoryId);
|
||||
/* repeated github.actions.results.entities.v1.CacheScope scope = 2; */
|
||||
for (let i = 0; i < message.scope.length; i++)
|
||||
cachescope_1.CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheMetadata
|
||||
*/
|
||||
exports.CacheMetadata = new CacheMetadata$Type();
|
||||
//# sourceMappingURL=cachemetadata.js.map
|
||||
1
node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"cachemetadata.js","sourceRoot":"","sources":["../../../../../src/generated/results/entities/v1/cachemetadata.ts"],"names":[],"mappings":";;;AAKA,kDAAgD;AAGhD,kDAA2D;AAE3D,kDAA8D;AAC9D,kDAAoD;AACpD,kDAAmD;AACnD,6CAA0C;AAkB1C,2FAA2F;AAC3F,MAAM,kBAAmB,SAAQ,qBAA0B;IACvD;QACI,KAAK,CAAC,kDAAkD,EAAE;YACtD,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,eAAe,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,oBAAoB,EAAE;YAC3E,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,MAAM,EAAE,CAAC,CAAC,qBAAqB,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,uBAAU,EAAE;SAClG,CAAC,CAAC;IACP,CAAC;IACD,MAAM,CAAC,KAAqC;QACxC,MAAM,OAAO,GAAG,EAAE,YAAY,EAAE,GAAG,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC;QACjD,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,sBAAY,EAAE,EAAE,UAAU,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5F,IAAI,KAAK,KAAK,SAAS;YACnB,IAAA,gCAAsB,EAAgB,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC;QAChE,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,kBAAkB,CAAC,MAAqB,EAAE,MAAc,EAAE,OAA0B,EAAE,MAAsB;QACxG,IAAI,OAAO,GAAG,MAAM,aAAN,MAAM,cAAN,MAAM,GAAI,IAAI,CAAC,MAAM,EAAE,EAAE,GAAG,GAAG,MAAM,CAAC,GAAG,GAAG,MAAM,CAAC;QACjE,OAAO,MAAM,CAAC,GAAG,GAAG,GAAG,EAAE;YACrB,IAAI,CAAC,OAAO,EAAE,QAAQ,CAAC,GAAG,MAAM,CAAC,GAAG,EAAE,CAAC;YACvC,QAAQ,OAAO,EAAE;gBACb,KAAK,yBAAyB,CAAC,CAAC;oBAC5B,OAAO,CAAC,YAAY,GAAG,MAAM,CAAC,KAAK,EAAE,CAAC,QAAQ,EAAE,CAAC;oBACjD,MAAM;gBACV,KAAK,kEAAkE,CAAC,CAAC;oBACrE,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,uBAAU,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,EAAE,OAAO,CAAC,CAAC,CAAC;oBACpF,MAAM;gBACV;oBACI,IAAI,CAAC,GAAG,OAAO,CAAC,gBAAgB,CAAC;oBACjC,IAAI,CAAC,KAAK,OAAO;wBACb,MAAM,IAAI,UAAU,CAAC,KAAK,CAAC,iBAAiB,OAAO,eAAe,QAAQ,SAAS,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;oBACxG,IAAI,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC9B,IAAI,CAAC,KAAK,KAAK;wBACX,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;aACvG;SACJ;QACD,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,mBAAmB,CAAC,OAAsB,EAAE,MAAqB,EAAE,OAA2B;QAC1F,8BAA8B;QAC9B,IAAI,OAAO,CAAC,YAAY,KAAK,GAAG;YAC5B,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC/D,uEAAuE;QACvE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE;YACzC,uBAAU,CAAC,mBAAmB,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;QACrH,IAAI,CAAC,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACnC,IAAI,CAAC,KAAK,KAAK;YACX,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;QAClF,OAAO,MAAM,CAAC;IAClB,CAAC;CACJ;AACD;;GAEG;AACU,QAAA,aAAa,GAAG,IAAI,kBAAkB,EAAE,CAAC"}
|
||||
34
node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.d.ts
generated
vendored
Normal file
34
node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.entities.v1.CacheScope
|
||||
*/
|
||||
export interface CacheScope {
|
||||
/**
|
||||
* Determines the scope of the cache entry
|
||||
*
|
||||
* @generated from protobuf field: string scope = 1;
|
||||
*/
|
||||
scope: string;
|
||||
/**
|
||||
* None: 0 | Read: 1 | Write: 2 | All: (1|2)
|
||||
*
|
||||
* @generated from protobuf field: int64 permission = 2;
|
||||
*/
|
||||
permission: string;
|
||||
}
|
||||
declare class CacheScope$Type extends MessageType<CacheScope> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<CacheScope>): CacheScope;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheScope): CacheScope;
|
||||
internalBinaryWrite(message: CacheScope, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheScope
|
||||
*/
|
||||
export declare const CacheScope: CacheScope$Type;
|
||||
export {};
|
||||
63
node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js
generated
vendored
Normal file
63
node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js
generated
vendored
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CacheScope = void 0;
|
||||
const runtime_1 = require("@protobuf-ts/runtime");
|
||||
const runtime_2 = require("@protobuf-ts/runtime");
|
||||
const runtime_3 = require("@protobuf-ts/runtime");
|
||||
const runtime_4 = require("@protobuf-ts/runtime");
|
||||
const runtime_5 = require("@protobuf-ts/runtime");
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CacheScope$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.entities.v1.CacheScope", [
|
||||
{ no: 1, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "permission", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { scope: "", permission: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string scope */ 1:
|
||||
message.scope = reader.string();
|
||||
break;
|
||||
case /* int64 permission */ 2:
|
||||
message.permission = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string scope = 1; */
|
||||
if (message.scope !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.scope);
|
||||
/* int64 permission = 2; */
|
||||
if (message.permission !== "0")
|
||||
writer.tag(2, runtime_1.WireType.Varint).int64(message.permission);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheScope
|
||||
*/
|
||||
exports.CacheScope = new CacheScope$Type();
|
||||
//# sourceMappingURL=cachescope.js.map
|
||||
1
node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"cachescope.js","sourceRoot":"","sources":["../../../../../src/generated/results/entities/v1/cachescope.ts"],"names":[],"mappings":";;;AAKA,kDAAgD;AAGhD,kDAA2D;AAE3D,kDAA8D;AAC9D,kDAAoD;AACpD,kDAAmD;AAkBnD,2FAA2F;AAC3F,MAAM,eAAgB,SAAQ,qBAAuB;IACjD;QACI,KAAK,CAAC,+CAA+C,EAAE;YACnD,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YACpE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,oBAAoB,EAAE;SAC3E,CAAC,CAAC;IACP,CAAC;IACD,MAAM,CAAC,KAAkC;QACrC,MAAM,OAAO,GAAG,EAAE,KAAK,EAAE,EAAE,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC;QAC/C,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,sBAAY,EAAE,EAAE,UAAU,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5F,IAAI,KAAK,KAAK,SAAS;YACnB,IAAA,gCAAsB,EAAa,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC;QAC7D,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,kBAAkB,CAAC,MAAqB,EAAE,MAAc,EAAE,OAA0B,EAAE,MAAmB;QACrG,IAAI,OAAO,GAAG,MAAM,aAAN,MAAM,cAAN,MAAM,GAAI,IAAI,CAAC,MAAM,EAAE,EAAE,GAAG,GAAG,MAAM,CAAC,GAAG,GAAG,MAAM,CAAC;QACjE,OAAO,MAAM,CAAC,GAAG,GAAG,GAAG,EAAE;YACrB,IAAI,CAAC,OAAO,EAAE,QAAQ,CAAC,GAAG,MAAM,CAAC,GAAG,EAAE,CAAC;YACvC,QAAQ,OAAO,EAAE;gBACb,KAAK,kBAAkB,CAAC,CAAC;oBACrB,OAAO,CAAC,KAAK,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAChC,MAAM;gBACV,KAAK,sBAAsB,CAAC,CAAC;oBACzB,OAAO,CAAC,UAAU,GAAG,MAAM,CAAC,KAAK,EAAE,CAAC,QAAQ,EAAE,CAAC;oBAC/C,MAAM;gBACV;oBACI,IAAI,CAAC,GAAG,OAAO,CAAC,gBAAgB,CAAC;oBACjC,IAAI,CAAC,KAAK,OAAO;wBACb,MAAM,IAAI,UAAU,CAAC,KAAK,CAAC,iBAAiB,OAAO,eAAe,QAAQ,SAAS,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;oBACxG,IAAI,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC9B,IAAI,CAAC,KAAK,KAAK;wBACX,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;aACvG;SACJ;QACD,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,mBAAmB,CAAC,OAAmB,EAAE,MAAqB,EAAE,OAA2B;QACvF,uBAAuB;QACvB,IAAI,OAAO,CAAC,KAAK,KAAK,EAAE;YACpB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QAClE,2BAA2B;QAC3B,IAAI,OAAO,CAAC,UAAU,KAAK,GAAG;YAC1B,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;QAC7D,IAAI,CAAC,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACnC,IAAI,CAAC,KAAK,KAAK;YACX,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;QAClF,OAAO,MAAM,CAAC;IAClB,CAAC;CACJ;AACD;;GAEG;AACU,QAAA,UAAU,GAAG,IAAI,eAAe,EAAE,CAAC"}
|
||||
4
node_modules/@actions/cache/lib/internal/cacheHttpClient.d.ts
generated
vendored
4
node_modules/@actions/cache/lib/internal/cacheHttpClient.d.ts
generated
vendored
|
|
@ -1,8 +1,6 @@
|
|||
import { CompressionMethod } from './constants';
|
||||
import { ArtifactCacheEntry, InternalCacheOptions, ReserveCacheResponse, ITypedResponseWithError } from './contracts';
|
||||
import { DownloadOptions, UploadOptions } from '../options';
|
||||
export declare function getCacheVersion(paths: string[], compressionMethod?: CompressionMethod, enableCrossOsArchive?: boolean): string;
|
||||
export declare function getCacheEntry(keys: string[], paths: string[], options?: InternalCacheOptions): Promise<ArtifactCacheEntry | null>;
|
||||
export declare function downloadCache(archiveLocation: string, archivePath: string, options?: DownloadOptions): Promise<void>;
|
||||
export declare function reserveCache(key: string, paths: string[], options?: InternalCacheOptions): Promise<ITypedResponseWithError<ReserveCacheResponse>>;
|
||||
export declare function saveCache(cacheId: number, archivePath: string, options?: UploadOptions): Promise<void>;
|
||||
export declare function saveCache(cacheId: number, archivePath: string, signedUploadURL?: string, options?: UploadOptions): Promise<void>;
|
||||
|
|
|
|||
66
node_modules/@actions/cache/lib/internal/cacheHttpClient.js
generated
vendored
66
node_modules/@actions/cache/lib/internal/cacheHttpClient.js
generated
vendored
|
|
@ -32,20 +32,21 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0;
|
||||
exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const http_client_1 = require("@actions/http-client");
|
||||
const auth_1 = require("@actions/http-client/lib/auth");
|
||||
const crypto = __importStar(require("crypto"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const url_1 = require("url");
|
||||
const utils = __importStar(require("./cacheUtils"));
|
||||
const uploadUtils_1 = require("./uploadUtils");
|
||||
const downloadUtils_1 = require("./downloadUtils");
|
||||
const options_1 = require("../options");
|
||||
const requestUtils_1 = require("./requestUtils");
|
||||
const versionSalt = '1.0';
|
||||
const config_1 = require("./config");
|
||||
const user_agent_1 = require("./shared/user-agent");
|
||||
function getCacheApiUrl(resource) {
|
||||
const baseUrl = process.env['ACTIONS_CACHE_URL'] || '';
|
||||
const baseUrl = (0, config_1.getCacheServiceURL)();
|
||||
if (!baseUrl) {
|
||||
throw new Error('Cache Service Url not found, unable to restore cache.');
|
||||
}
|
||||
|
|
@ -67,29 +68,12 @@ function getRequestOptions() {
|
|||
function createHttpClient() {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '';
|
||||
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
||||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||
return new http_client_1.HttpClient((0, user_agent_1.getUserAgentString)(), [bearerCredentialHandler], getRequestOptions());
|
||||
}
|
||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice();
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
components.push(compressionMethod);
|
||||
}
|
||||
// Only check for windows platforms if enableCrossOsArchive is false
|
||||
if (process.platform === 'win32' && !enableCrossOsArchive) {
|
||||
components.push('windows-only');
|
||||
}
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
||||
}
|
||||
exports.getCacheVersion = getCacheVersion;
|
||||
function getCacheEntry(keys, paths, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const httpClient = createHttpClient();
|
||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||
const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
||||
const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||
// Cache not found
|
||||
|
|
@ -160,7 +144,7 @@ exports.downloadCache = downloadCache;
|
|||
function reserveCache(key, paths, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const httpClient = createHttpClient();
|
||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||
const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||
const reserveCacheRequest = {
|
||||
key,
|
||||
version,
|
||||
|
|
@ -242,20 +226,30 @@ function commitCache(httpClient, cacheId, filesize) {
|
|||
}));
|
||||
});
|
||||
}
|
||||
function saveCache(cacheId, archivePath, options) {
|
||||
function saveCache(cacheId, archivePath, signedUploadURL, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const httpClient = createHttpClient();
|
||||
core.debug('Upload cache');
|
||||
yield uploadFile(httpClient, cacheId, archivePath, options);
|
||||
// Commit Cache
|
||||
core.debug('Commiting cache');
|
||||
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
|
||||
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
|
||||
if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) {
|
||||
throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
|
||||
const uploadOptions = (0, options_1.getUploadOptions)(options);
|
||||
if (uploadOptions.useAzureSdk) {
|
||||
// Use Azure storage SDK to upload caches directly to Azure
|
||||
if (!signedUploadURL) {
|
||||
throw new Error('Azure Storage SDK can only be used when a signed URL is provided.');
|
||||
}
|
||||
yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options);
|
||||
}
|
||||
else {
|
||||
const httpClient = createHttpClient();
|
||||
core.debug('Upload cache');
|
||||
yield uploadFile(httpClient, cacheId, archivePath, options);
|
||||
// Commit Cache
|
||||
core.debug('Commiting cache');
|
||||
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
|
||||
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
|
||||
if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) {
|
||||
throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
|
||||
}
|
||||
core.info('Cache saved successfully');
|
||||
}
|
||||
core.info('Cache saved successfully');
|
||||
});
|
||||
}
|
||||
exports.saveCache = saveCache;
|
||||
|
|
|
|||
2
node_modules/@actions/cache/lib/internal/cacheHttpClient.js.map
generated
vendored
2
node_modules/@actions/cache/lib/internal/cacheHttpClient.js.map
generated
vendored
File diff suppressed because one or more lines are too long
3
node_modules/@actions/cache/lib/internal/cacheUtils.d.ts
generated
vendored
3
node_modules/@actions/cache/lib/internal/cacheUtils.d.ts
generated
vendored
|
|
@ -9,4 +9,5 @@ export declare function getCompressionMethod(): Promise<CompressionMethod>;
|
|||
export declare function getCacheFileName(compressionMethod: CompressionMethod): string;
|
||||
export declare function getGnuTarPathOnWindows(): Promise<string>;
|
||||
export declare function assertDefined<T>(name: string, value?: T): T;
|
||||
export declare function isGhes(): boolean;
|
||||
export declare function getCacheVersion(paths: string[], compressionMethod?: CompressionMethod, enableCrossOsArchive?: boolean): string;
|
||||
export declare function getRuntimeToken(): string;
|
||||
|
|
|
|||
34
node_modules/@actions/cache/lib/internal/cacheUtils.js
generated
vendored
34
node_modules/@actions/cache/lib/internal/cacheUtils.js
generated
vendored
|
|
@ -39,7 +39,7 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
|
|||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isGhes = exports.assertDefined = exports.getGnuTarPathOnWindows = exports.getCacheFileName = exports.getCompressionMethod = exports.unlinkFile = exports.resolvePaths = exports.getArchiveFileSizeInBytes = exports.createTempDirectory = void 0;
|
||||
exports.getRuntimeToken = exports.getCacheVersion = exports.assertDefined = exports.getGnuTarPathOnWindows = exports.getCacheFileName = exports.getCompressionMethod = exports.unlinkFile = exports.resolvePaths = exports.getArchiveFileSizeInBytes = exports.createTempDirectory = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const glob = __importStar(require("@actions/glob"));
|
||||
|
|
@ -50,6 +50,7 @@ const path = __importStar(require("path"));
|
|||
const semver = __importStar(require("semver"));
|
||||
const util = __importStar(require("util"));
|
||||
const constants_1 = require("./constants");
|
||||
const versionSalt = '1.0';
|
||||
// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23
|
||||
function createTempDirectory() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
|
|
@ -187,12 +188,29 @@ function assertDefined(name, value) {
|
|||
return value;
|
||||
}
|
||||
exports.assertDefined = assertDefined;
|
||||
function isGhes() {
|
||||
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
|
||||
const isGitHubHost = hostname === 'GITHUB.COM';
|
||||
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
|
||||
return !isGitHubHost && !isGheHost;
|
||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice();
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
components.push(compressionMethod);
|
||||
}
|
||||
// Only check for windows platforms if enableCrossOsArchive is false
|
||||
if (process.platform === 'win32' && !enableCrossOsArchive) {
|
||||
components.push('windows-only');
|
||||
}
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
||||
}
|
||||
exports.isGhes = isGhes;
|
||||
exports.getCacheVersion = getCacheVersion;
|
||||
function getRuntimeToken() {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN'];
|
||||
if (!token) {
|
||||
throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable');
|
||||
}
|
||||
return token;
|
||||
}
|
||||
exports.getRuntimeToken = getRuntimeToken;
|
||||
//# sourceMappingURL=cacheUtils.js.map
|
||||
2
node_modules/@actions/cache/lib/internal/cacheUtils.js.map
generated
vendored
2
node_modules/@actions/cache/lib/internal/cacheUtils.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"cacheUtils.js","sourceRoot":"","sources":["../../src/internal/cacheUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,oDAAqC;AACrC,oDAAqC;AACrC,gDAAiC;AACjC,+CAAgC;AAChC,uCAAwB;AACxB,2CAA4B;AAC5B,+CAAgC;AAChC,2CAA4B;AAC5B,2CAIoB;AAEpB,8FAA8F;AAC9F,SAAsB,mBAAmB;;QACvC,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;QAE/C,IAAI,aAAa,GAAW,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;QAE5D,IAAI,CAAC,aAAa,EAAE;YAClB,IAAI,YAAoB,CAAA;YACxB,IAAI,UAAU,EAAE;gBACd,8CAA8C;gBAC9C,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,MAAM,CAAA;aACpD;iBAAM;gBACL,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;oBACjC,YAAY,GAAG,QAAQ,CAAA;iBACxB;qBAAM;oBACL,YAAY,GAAG,OAAO,CAAA;iBACvB;aACF;YACD,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,EAAE,MAAM,CAAC,CAAA;SAC3D;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;QAC1D,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;QACrB,OAAO,IAAI,CAAA;IACb,CAAC;CAAA;AAvBD,kDAuBC;AAED,SAAgB,yBAAyB,CAAC,QAAgB;IACxD,OAAO,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAA;AACnC,CAAC;AAFD,8DAEC;AAED,SAAsB,YAAY,CAAC,QAAkB;;;;QACnD,MAAM,KAAK,GAAa,EAAE,CAAA;QAC1B,MAAM,SAAS,GAAG,MAAA,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,mCAAI,OAAO,CAAC,GAAG,EAAE,CAAA;QAClE,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;YACrD,mBAAmB,EAAE,KAAK;SAC3B,CAAC,CAAA;;YAEF,KAAyB,eAAA,KAAA,cAAA,OAAO,CAAC,aAAa,EAAE,CAAA,IAAA,sDAAE;gBAAzB,cAAuB;gBAAvB,WAAuB;gBAArC,MAAM,IAAI,KAAA,CAAA;gBACnB,MAAM,YAAY,GAAG,IAAI;qBACtB,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC;qBACzB,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,CAAA;gBACjD,IAAI,CAAC,KAAK,CAAC,YAAY,YAAY,EAAE,CAAC,CAAA;gBACtC,4FAA4F;gBAC5F,IAAI,YAAY,KAAK,EAAE,EAAE;oBACvB,qEAAqE;oBACrE,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;iBAChB;qBAAM;oBACL,KAAK,CAAC,IAAI,CAAC,GAAG,YAAY,EAAE,CAAC,CAAA;iBAC9B;aACF;;;;;;;;;QAED,OAAO,KAAK,CAAA;;CACb;AAtBD,oCAsBC;AAED,SAAsB,UAAU,CAAC,QAAqB;;QACpD,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAA;IAC5C,CAAC;CAAA;AAFD,gCAEC;AAED,SAAe,UAAU,CACvB,GAAW,EACX,iBAA2B,EAAE;;QAE7B,IAAI,aAAa,GAAG,EAAE,CAAA;QACtB,cAAc,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;QAChC,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAA;QACzD,IAAI;YACF,MAAM,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,EAAE,EAAE,cAAc,EAAE;gBACxC,gBAAgB,EAAE,IAAI;gBACtB,MAAM,EAAE,IAAI;gBACZ,SAAS,EAAE;oBACT,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;oBACpE,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;iBACrE;aACF,CAAC,CAAA;SACH;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,CAAA;SACxB;QAED,aAAa,GAAG,aAAa,CAAC,IAAI,EAAE,CAAA;QACpC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QACzB,OAAO,aAAa,CAAA;IACtB,CAAC;CAAA;AAED,0DAA0D;AAC1D,SAAsB,oBAAoB;;QACxC,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,MAAM,EAAE,CAAC,SAAS,CAAC,CAAC,CAAA;QAC3D,MAAM,OAAO,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QAC3C,IAAI,CAAC,KAAK,CAAC,iBAAiB,OAAO,EAAE,CAAC,CAAA;QAEtC,IAAI,aAAa,KAAK,EAAE,EAAE;YACxB,OAAO,6BAAiB,CAAC,IAAI,CAAA;SAC9B;aAAM;YACL,OAAO,6BAAiB,CAAC,eAAe,CAAA;SACzC;IACH,CAAC;CAAA;AAVD,oDAUC;AAED,SAAgB,gBAAgB,CAAC,iBAAoC;IACnE,OAAO,iBAAiB,KAAK,6BAAiB,CAAC,IAAI;QACjD,CAAC,CAAC,yBAAa,CAAC,IAAI;QACpB,CAAC,CAAC,yBAAa,CAAC,IAAI,CAAA;AACxB,CAAC;AAJD,4CAIC;AAED,SAAsB,sBAAsB;;QAC1C,IAAI,EAAE,CAAC,UAAU,CAAC,+BAAmB,CAAC,EAAE;YACtC,OAAO,+BAAmB,CAAA;SAC3B;QACD,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,KAAK,CAAC,CAAA;QAC7C,OAAO,aAAa,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAA;IAC/E,CAAC;CAAA;AAND,wDAMC;AAED,SAAgB,aAAa,CAAI,IAAY,EAAE,KAAS;IACtD,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,KAAK,CAAC,YAAY,IAAI,0BAA0B,CAAC,CAAA;KACxD;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAND,sCAMC;AAED,SAAgB,MAAM;IACpB,MAAM,KAAK,GAAG,IAAI,GAAG,CACnB,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,oBAAoB,CACzD,CAAA;IAED,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,WAAW,EAAE,CAAA;IACvD,MAAM,YAAY,GAAG,QAAQ,KAAK,YAAY,CAAA;IAC9C,MAAM,SAAS,GACb,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,QAAQ,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAA;IAEtE,OAAO,CAAC,YAAY,IAAI,CAAC,SAAS,CAAA;AACpC,CAAC;AAXD,wBAWC"}
|
||||
{"version":3,"file":"cacheUtils.js","sourceRoot":"","sources":["../../src/internal/cacheUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,oDAAqC;AACrC,oDAAqC;AACrC,gDAAiC;AACjC,+CAAgC;AAChC,uCAAwB;AACxB,2CAA4B;AAC5B,+CAAgC;AAChC,2CAA4B;AAC5B,2CAIoB;AAEpB,MAAM,WAAW,GAAG,KAAK,CAAA;AAEzB,8FAA8F;AAC9F,SAAsB,mBAAmB;;QACvC,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;QAE/C,IAAI,aAAa,GAAW,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;QAE5D,IAAI,CAAC,aAAa,EAAE;YAClB,IAAI,YAAoB,CAAA;YACxB,IAAI,UAAU,EAAE;gBACd,8CAA8C;gBAC9C,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,MAAM,CAAA;aACpD;iBAAM;gBACL,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;oBACjC,YAAY,GAAG,QAAQ,CAAA;iBACxB;qBAAM;oBACL,YAAY,GAAG,OAAO,CAAA;iBACvB;aACF;YACD,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,EAAE,MAAM,CAAC,CAAA;SAC3D;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;QAC1D,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;QACrB,OAAO,IAAI,CAAA;IACb,CAAC;CAAA;AAvBD,kDAuBC;AAED,SAAgB,yBAAyB,CAAC,QAAgB;IACxD,OAAO,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAA;AACnC,CAAC;AAFD,8DAEC;AAED,SAAsB,YAAY,CAAC,QAAkB;;;;QACnD,MAAM,KAAK,GAAa,EAAE,CAAA;QAC1B,MAAM,SAAS,GAAG,MAAA,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,mCAAI,OAAO,CAAC,GAAG,EAAE,CAAA;QAClE,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;YACrD,mBAAmB,EAAE,KAAK;SAC3B,CAAC,CAAA;;YAEF,KAAyB,eAAA,KAAA,cAAA,OAAO,CAAC,aAAa,EAAE,CAAA,IAAA,sDAAE;gBAAzB,cAAuB;gBAAvB,WAAuB;gBAArC,MAAM,IAAI,KAAA,CAAA;gBACnB,MAAM,YAAY,GAAG,IAAI;qBACtB,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC;qBACzB,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,CAAA;gBACjD,IAAI,CAAC,KAAK,CAAC,YAAY,YAAY,EAAE,CAAC,CAAA;gBACtC,4FAA4F;gBAC5F,IAAI,YAAY,KAAK,EAAE,EAAE;oBACvB,qEAAqE;oBACrE,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;iBAChB;qBAAM;oBACL,KAAK,CAAC,IAAI,CAAC,GAAG,YAAY,EAAE,CAAC,CAAA;iBAC9B;aACF;;;;;;;;;QAED,OAAO,KAAK,CAAA;;CACb;AAtBD,oCAsBC;AAED,SAAsB,UAAU,CAAC,QAAqB;;QACpD,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAA;IAC5C,CAAC;CAAA;AAFD,gCAEC;AAED,SAAe,UAAU,CACvB,GAAW,EACX,iBAA2B,EAAE;;QAE7B,IAAI,aAAa,GAAG,EAAE,CAAA;QACtB,cAAc,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;QAChC,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAA;QACzD,IAAI;YACF,MAAM,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,EAAE,EAAE,cAAc,EAAE;gBACxC,gBAAgB,EAAE,IAAI;gBACtB,MAAM,EAAE,IAAI;gBACZ,SAAS,EAAE;oBACT,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;oBACpE,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;iBACrE;aACF,CAAC,CAAA;SACH;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,CAAA;SACxB;QAED,aAAa,GAAG,aAAa,CAAC,IAAI,EAAE,CAAA;QACpC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QACzB,OAAO,aAAa,CAAA;IACtB,CAAC;CAAA;AAED,0DAA0D;AAC1D,SAAsB,oBAAoB;;QACxC,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,MAAM,EAAE,CAAC,SAAS,CAAC,CAAC,CAAA;QAC3D,MAAM,OAAO,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QAC3C,IAAI,CAAC,KAAK,CAAC,iBAAiB,OAAO,EAAE,CAAC,CAAA;QAEtC,IAAI,aAAa,KAAK,EAAE,EAAE;YACxB,OAAO,6BAAiB,CAAC,IAAI,CAAA;SAC9B;aAAM;YACL,OAAO,6BAAiB,CAAC,eAAe,CAAA;SACzC;IACH,CAAC;CAAA;AAVD,oDAUC;AAED,SAAgB,gBAAgB,CAAC,iBAAoC;IACnE,OAAO,iBAAiB,KAAK,6BAAiB,CAAC,IAAI;QACjD,CAAC,CAAC,yBAAa,CAAC,IAAI;QACpB,CAAC,CAAC,yBAAa,CAAC,IAAI,CAAA;AACxB,CAAC;AAJD,4CAIC;AAED,SAAsB,sBAAsB;;QAC1C,IAAI,EAAE,CAAC,UAAU,CAAC,+BAAmB,CAAC,EAAE;YACtC,OAAO,+BAAmB,CAAA;SAC3B;QACD,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,KAAK,CAAC,CAAA;QAC7C,OAAO,aAAa,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAA;IAC/E,CAAC;CAAA;AAND,wDAMC;AAED,SAAgB,aAAa,CAAI,IAAY,EAAE,KAAS;IACtD,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,KAAK,CAAC,YAAY,IAAI,0BAA0B,CAAC,CAAA;KACxD;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAND,sCAMC;AAED,SAAgB,eAAe,CAC7B,KAAe,EACf,iBAAqC,EACrC,oBAAoB,GAAG,KAAK;IAE5B,8BAA8B;IAC9B,MAAM,UAAU,GAAG,KAAK,CAAC,KAAK,EAAE,CAAA;IAEhC,qDAAqD;IACrD,6CAA6C;IAC7C,IAAI,iBAAiB,EAAE;QACrB,UAAU,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAA;KACnC;IAED,oEAAoE;IACpE,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,IAAI,CAAC,oBAAoB,EAAE;QACzD,UAAU,CAAC,IAAI,CAAC,cAAc,CAAC,CAAA;KAChC;IAED,uEAAuE;IACvE,UAAU,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;IAE5B,OAAO,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;AAC/E,CAAC;AAvBD,0CAuBC;AAED,SAAgB,eAAe;IAC7B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,EAAE;QACV,MAAM,IAAI,KAAK,CAAC,sDAAsD,CAAC,CAAA;KACxE;IACD,OAAO,KAAK,CAAA;AACd,CAAC;AAND,0CAMC"}
|
||||
3
node_modules/@actions/cache/lib/internal/config.d.ts
generated
vendored
Normal file
3
node_modules/@actions/cache/lib/internal/config.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
export declare function isGhes(): boolean;
|
||||
export declare function getCacheServiceVersion(): string;
|
||||
export declare function getCacheServiceURL(): string;
|
||||
37
node_modules/@actions/cache/lib/internal/config.js
generated
vendored
Normal file
37
node_modules/@actions/cache/lib/internal/config.js
generated
vendored
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getCacheServiceURL = exports.getCacheServiceVersion = exports.isGhes = void 0;
|
||||
function isGhes() {
|
||||
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
|
||||
const isGitHubHost = hostname === 'GITHUB.COM';
|
||||
const isGheHost = hostname.endsWith('.GHE.COM');
|
||||
const isLocalHost = hostname.endsWith('.LOCALHOST');
|
||||
return !isGitHubHost && !isGheHost && !isLocalHost;
|
||||
}
|
||||
exports.isGhes = isGhes;
|
||||
function getCacheServiceVersion() {
|
||||
// Cache service v2 is not supported on GHES. We will default to
|
||||
// cache service v1 even if the feature flag was enabled by user.
|
||||
if (isGhes())
|
||||
return 'v1';
|
||||
return process.env['ACTIONS_CACHE_SERVICE_V2'] ? 'v2' : 'v1';
|
||||
}
|
||||
exports.getCacheServiceVersion = getCacheServiceVersion;
|
||||
function getCacheServiceURL() {
|
||||
const version = getCacheServiceVersion();
|
||||
// Based on the version of the cache service, we will determine which
|
||||
// URL to use.
|
||||
switch (version) {
|
||||
case 'v1':
|
||||
return (process.env['ACTIONS_CACHE_URL'] ||
|
||||
process.env['ACTIONS_RESULTS_URL'] ||
|
||||
'');
|
||||
case 'v2':
|
||||
return process.env['ACTIONS_RESULTS_URL'] || '';
|
||||
default:
|
||||
throw new Error(`Unsupported cache service version: ${version}`);
|
||||
}
|
||||
}
|
||||
exports.getCacheServiceURL = getCacheServiceURL;
|
||||
//# sourceMappingURL=config.js.map
|
||||
1
node_modules/@actions/cache/lib/internal/config.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/internal/config.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"config.js","sourceRoot":"","sources":["../../src/internal/config.ts"],"names":[],"mappings":";;;AAAA,SAAgB,MAAM;IACpB,MAAM,KAAK,GAAG,IAAI,GAAG,CACnB,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,oBAAoB,CACzD,CAAA;IAED,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,WAAW,EAAE,CAAA;IACvD,MAAM,YAAY,GAAG,QAAQ,KAAK,YAAY,CAAA;IAC9C,MAAM,SAAS,GAAG,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAA;IAC/C,MAAM,WAAW,GAAG,QAAQ,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAA;IAEnD,OAAO,CAAC,YAAY,IAAI,CAAC,SAAS,IAAI,CAAC,WAAW,CAAA;AACpD,CAAC;AAXD,wBAWC;AAED,SAAgB,sBAAsB;IACpC,gEAAgE;IAChE,iEAAiE;IACjE,IAAI,MAAM,EAAE;QAAE,OAAO,IAAI,CAAA;IAEzB,OAAO,OAAO,CAAC,GAAG,CAAC,0BAA0B,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAA;AAC9D,CAAC;AAND,wDAMC;AAED,SAAgB,kBAAkB;IAChC,MAAM,OAAO,GAAG,sBAAsB,EAAE,CAAA;IAExC,qEAAqE;IACrE,cAAc;IACd,QAAQ,OAAO,EAAE;QACf,KAAK,IAAI;YACP,OAAO,CACL,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;gBAChC,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC;gBAClC,EAAE,CACH,CAAA;QACH,KAAK,IAAI;YACP,OAAO,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC,IAAI,EAAE,CAAA;QACjD;YACE,MAAM,IAAI,KAAK,CAAC,sCAAsC,OAAO,EAAE,CAAC,CAAA;KACnE;AACH,CAAC;AAjBD,gDAiBC"}
|
||||
1
node_modules/@actions/cache/lib/internal/constants.d.ts
generated
vendored
1
node_modules/@actions/cache/lib/internal/constants.d.ts
generated
vendored
|
|
@ -18,3 +18,4 @@ export declare const GnuTarPathOnWindows: string;
|
|||
export declare const SystemTarPathOnWindows: string;
|
||||
export declare const TarFilename = "cache.tar";
|
||||
export declare const ManifestFilename = "manifest.txt";
|
||||
export declare const CacheFileSizeLimit: number;
|
||||
|
|
|
|||
3
node_modules/@actions/cache/lib/internal/constants.js
generated
vendored
3
node_modules/@actions/cache/lib/internal/constants.js
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ManifestFilename = exports.TarFilename = exports.SystemTarPathOnWindows = exports.GnuTarPathOnWindows = exports.SocketTimeout = exports.DefaultRetryDelay = exports.DefaultRetryAttempts = exports.ArchiveToolType = exports.CompressionMethod = exports.CacheFilename = void 0;
|
||||
exports.CacheFileSizeLimit = exports.ManifestFilename = exports.TarFilename = exports.SystemTarPathOnWindows = exports.GnuTarPathOnWindows = exports.SocketTimeout = exports.DefaultRetryDelay = exports.DefaultRetryAttempts = exports.ArchiveToolType = exports.CompressionMethod = exports.CacheFilename = void 0;
|
||||
var CacheFilename;
|
||||
(function (CacheFilename) {
|
||||
CacheFilename["Gzip"] = "cache.tgz";
|
||||
|
|
@ -33,4 +33,5 @@ exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\ta
|
|||
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
|
||||
exports.TarFilename = 'cache.tar';
|
||||
exports.ManifestFilename = 'manifest.txt';
|
||||
exports.CacheFileSizeLimit = 10 * Math.pow(1024, 3); // 10GiB per repository
|
||||
//# sourceMappingURL=constants.js.map
|
||||
2
node_modules/@actions/cache/lib/internal/constants.js.map
generated
vendored
2
node_modules/@actions/cache/lib/internal/constants.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/internal/constants.ts"],"names":[],"mappings":";;;AAAA,IAAY,aAGX;AAHD,WAAY,aAAa;IACvB,mCAAkB,CAAA;IAClB,oCAAmB,CAAA;AACrB,CAAC,EAHW,aAAa,6BAAb,aAAa,QAGxB;AAED,IAAY,iBAMX;AAND,WAAY,iBAAiB;IAC3B,kCAAa,CAAA;IACb,+CAA+C;IAC/C,6EAA6E;IAC7E,0DAAqC,CAAA;IACrC,kCAAa,CAAA;AACf,CAAC,EANW,iBAAiB,iCAAjB,iBAAiB,QAM5B;AAED,IAAY,eAGX;AAHD,WAAY,eAAe;IACzB,8BAAW,CAAA;IACX,8BAAW,CAAA;AACb,CAAC,EAHW,eAAe,+BAAf,eAAe,QAG1B;AAED,wCAAwC;AAC3B,QAAA,oBAAoB,GAAG,CAAC,CAAA;AAErC,4DAA4D;AAC/C,QAAA,iBAAiB,GAAG,IAAI,CAAA;AAErC,6EAA6E;AAC7E,+EAA+E;AAC/E,cAAc;AACD,QAAA,aAAa,GAAG,IAAI,CAAA;AAEjC,uDAAuD;AAC1C,QAAA,mBAAmB,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,0BAA0B,CAAA;AAE3F,uDAAuD;AAC1C,QAAA,sBAAsB,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,8BAA8B,CAAA;AAEpF,QAAA,WAAW,GAAG,WAAW,CAAA;AAEzB,QAAA,gBAAgB,GAAG,cAAc,CAAA"}
|
||||
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/internal/constants.ts"],"names":[],"mappings":";;;AAAA,IAAY,aAGX;AAHD,WAAY,aAAa;IACvB,mCAAkB,CAAA;IAClB,oCAAmB,CAAA;AACrB,CAAC,EAHW,aAAa,6BAAb,aAAa,QAGxB;AAED,IAAY,iBAMX;AAND,WAAY,iBAAiB;IAC3B,kCAAa,CAAA;IACb,+CAA+C;IAC/C,6EAA6E;IAC7E,0DAAqC,CAAA;IACrC,kCAAa,CAAA;AACf,CAAC,EANW,iBAAiB,iCAAjB,iBAAiB,QAM5B;AAED,IAAY,eAGX;AAHD,WAAY,eAAe;IACzB,8BAAW,CAAA;IACX,8BAAW,CAAA;AACb,CAAC,EAHW,eAAe,+BAAf,eAAe,QAG1B;AAED,wCAAwC;AAC3B,QAAA,oBAAoB,GAAG,CAAC,CAAA;AAErC,4DAA4D;AAC/C,QAAA,iBAAiB,GAAG,IAAI,CAAA;AAErC,6EAA6E;AAC7E,+EAA+E;AAC/E,cAAc;AACD,QAAA,aAAa,GAAG,IAAI,CAAA;AAEjC,uDAAuD;AAC1C,QAAA,mBAAmB,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,0BAA0B,CAAA;AAE3F,uDAAuD;AAC1C,QAAA,sBAAsB,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,8BAA8B,CAAA;AAEpF,QAAA,WAAW,GAAG,WAAW,CAAA;AAEzB,QAAA,gBAAgB,GAAG,cAAc,CAAA;AAEjC,QAAA,kBAAkB,GAAG,EAAE,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA,CAAC,uBAAuB"}
|
||||
6
node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.d.ts
generated
vendored
Normal file
6
node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
import { CacheServiceClientJSON } from '../../generated/results/api/v1/cache.twirp';
|
||||
export declare function internalCacheTwirpClient(options?: {
|
||||
maxAttempts?: number;
|
||||
retryIntervalMs?: number;
|
||||
retryMultiplier?: number;
|
||||
}): CacheServiceClientJSON;
|
||||
160
node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js
generated
vendored
Normal file
160
node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js
generated
vendored
Normal file
|
|
@ -0,0 +1,160 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.internalCacheTwirpClient = void 0;
|
||||
const core_1 = require("@actions/core");
|
||||
const user_agent_1 = require("./user-agent");
|
||||
const errors_1 = require("./errors");
|
||||
const config_1 = require("../config");
|
||||
const cacheUtils_1 = require("../cacheUtils");
|
||||
const auth_1 = require("@actions/http-client/lib/auth");
|
||||
const http_client_1 = require("@actions/http-client");
|
||||
const cache_twirp_1 = require("../../generated/results/api/v1/cache.twirp");
|
||||
/**
|
||||
* This class is a wrapper around the CacheServiceClientJSON class generated by Twirp.
|
||||
*
|
||||
* It adds retry logic to the request method, which is not present in the generated client.
|
||||
*
|
||||
* This class is used to interact with cache service v2.
|
||||
*/
|
||||
class CacheServiceClient {
|
||||
constructor(userAgent, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) {
|
||||
this.maxAttempts = 5;
|
||||
this.baseRetryIntervalMilliseconds = 3000;
|
||||
this.retryMultiplier = 1.5;
|
||||
const token = (0, cacheUtils_1.getRuntimeToken)();
|
||||
this.baseUrl = (0, config_1.getCacheServiceURL)();
|
||||
if (maxAttempts) {
|
||||
this.maxAttempts = maxAttempts;
|
||||
}
|
||||
if (baseRetryIntervalMilliseconds) {
|
||||
this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds;
|
||||
}
|
||||
if (retryMultiplier) {
|
||||
this.retryMultiplier = retryMultiplier;
|
||||
}
|
||||
this.httpClient = new http_client_1.HttpClient(userAgent, [
|
||||
new auth_1.BearerCredentialHandler(token)
|
||||
]);
|
||||
}
|
||||
// This function satisfies the Rpc interface. It is compatible with the JSON
|
||||
// JSON generated client.
|
||||
request(service, method, contentType, data) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href;
|
||||
(0, core_1.debug)(`[Request] ${method} ${url}`);
|
||||
const headers = {
|
||||
'Content-Type': contentType
|
||||
};
|
||||
try {
|
||||
const { body } = yield this.retryableRequest(() => __awaiter(this, void 0, void 0, function* () { return this.httpClient.post(url, JSON.stringify(data), headers); }));
|
||||
return body;
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error(`Failed to ${method}: ${error.message}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
retryableRequest(operation) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let attempt = 0;
|
||||
let errorMessage = '';
|
||||
let rawBody = '';
|
||||
while (attempt < this.maxAttempts) {
|
||||
let isRetryable = false;
|
||||
try {
|
||||
const response = yield operation();
|
||||
const statusCode = response.message.statusCode;
|
||||
rawBody = yield response.readBody();
|
||||
(0, core_1.debug)(`[Response] - ${response.message.statusCode}`);
|
||||
(0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`);
|
||||
const body = JSON.parse(rawBody);
|
||||
(0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`);
|
||||
if (this.isSuccessStatusCode(statusCode)) {
|
||||
return { response, body };
|
||||
}
|
||||
isRetryable = this.isRetryableHttpStatusCode(statusCode);
|
||||
errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`;
|
||||
if (body.msg) {
|
||||
if (errors_1.UsageError.isUsageErrorMessage(body.msg)) {
|
||||
throw new errors_1.UsageError();
|
||||
}
|
||||
errorMessage = `${errorMessage}: ${body.msg}`;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
(0, core_1.debug)(`Raw Body: ${rawBody}`);
|
||||
}
|
||||
if (error instanceof errors_1.UsageError) {
|
||||
throw error;
|
||||
}
|
||||
if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) {
|
||||
throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code);
|
||||
}
|
||||
isRetryable = true;
|
||||
errorMessage = error.message;
|
||||
}
|
||||
if (!isRetryable) {
|
||||
throw new Error(`Received non-retryable error: ${errorMessage}`);
|
||||
}
|
||||
if (attempt + 1 === this.maxAttempts) {
|
||||
throw new Error(`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`);
|
||||
}
|
||||
const retryTimeMilliseconds = this.getExponentialRetryTimeMilliseconds(attempt);
|
||||
(0, core_1.info)(`Attempt ${attempt + 1} of ${this.maxAttempts} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`);
|
||||
yield this.sleep(retryTimeMilliseconds);
|
||||
attempt++;
|
||||
}
|
||||
throw new Error(`Request failed`);
|
||||
});
|
||||
}
|
||||
isSuccessStatusCode(statusCode) {
|
||||
if (!statusCode)
|
||||
return false;
|
||||
return statusCode >= 200 && statusCode < 300;
|
||||
}
|
||||
isRetryableHttpStatusCode(statusCode) {
|
||||
if (!statusCode)
|
||||
return false;
|
||||
const retryableStatusCodes = [
|
||||
http_client_1.HttpCodes.BadGateway,
|
||||
http_client_1.HttpCodes.GatewayTimeout,
|
||||
http_client_1.HttpCodes.InternalServerError,
|
||||
http_client_1.HttpCodes.ServiceUnavailable,
|
||||
http_client_1.HttpCodes.TooManyRequests
|
||||
];
|
||||
return retryableStatusCodes.includes(statusCode);
|
||||
}
|
||||
sleep(milliseconds) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise(resolve => setTimeout(resolve, milliseconds));
|
||||
});
|
||||
}
|
||||
getExponentialRetryTimeMilliseconds(attempt) {
|
||||
if (attempt < 0) {
|
||||
throw new Error('attempt should be a positive integer');
|
||||
}
|
||||
if (attempt === 0) {
|
||||
return this.baseRetryIntervalMilliseconds;
|
||||
}
|
||||
const minTime = this.baseRetryIntervalMilliseconds * Math.pow(this.retryMultiplier, attempt);
|
||||
const maxTime = minTime * this.retryMultiplier;
|
||||
// returns a random number between minTime and maxTime (exclusive)
|
||||
return Math.trunc(Math.random() * (maxTime - minTime) + minTime);
|
||||
}
|
||||
}
|
||||
function internalCacheTwirpClient(options) {
|
||||
const client = new CacheServiceClient((0, user_agent_1.getUserAgentString)(), options === null || options === void 0 ? void 0 : options.maxAttempts, options === null || options === void 0 ? void 0 : options.retryIntervalMs, options === null || options === void 0 ? void 0 : options.retryMultiplier);
|
||||
return new cache_twirp_1.CacheServiceClientJSON(client);
|
||||
}
|
||||
exports.internalCacheTwirpClient = internalCacheTwirpClient;
|
||||
//# sourceMappingURL=cacheTwirpClient.js.map
|
||||
1
node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"cacheTwirpClient.js","sourceRoot":"","sources":["../../../src/internal/shared/cacheTwirpClient.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,wCAAyC;AACzC,6CAA+C;AAC/C,qCAAiD;AACjD,sCAA4C;AAC5C,8CAA6C;AAC7C,wDAAqE;AACrE,sDAA8E;AAC9E,4EAAiF;AAYjF;;;;;;GAMG;AACH,MAAM,kBAAkB;IAOtB,YACE,SAAiB,EACjB,WAAoB,EACpB,6BAAsC,EACtC,eAAwB;QARlB,gBAAW,GAAG,CAAC,CAAA;QACf,kCAA6B,GAAG,IAAI,CAAA;QACpC,oBAAe,GAAG,GAAG,CAAA;QAQ3B,MAAM,KAAK,GAAG,IAAA,4BAAe,GAAE,CAAA;QAC/B,IAAI,CAAC,OAAO,GAAG,IAAA,2BAAkB,GAAE,CAAA;QACnC,IAAI,WAAW,EAAE;YACf,IAAI,CAAC,WAAW,GAAG,WAAW,CAAA;SAC/B;QACD,IAAI,6BAA6B,EAAE;YACjC,IAAI,CAAC,6BAA6B,GAAG,6BAA6B,CAAA;SACnE;QACD,IAAI,eAAe,EAAE;YACnB,IAAI,CAAC,eAAe,GAAG,eAAe,CAAA;SACvC;QAED,IAAI,CAAC,UAAU,GAAG,IAAI,wBAAU,CAAC,SAAS,EAAE;YAC1C,IAAI,8BAAuB,CAAC,KAAK,CAAC;SACnC,CAAC,CAAA;IACJ,CAAC;IAED,4EAA4E;IAC5E,yBAAyB;IACnB,OAAO,CACX,OAAe,EACf,MAAc,EACd,WAAwD,EACxD,IAAyB;;YAEzB,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,UAAU,OAAO,IAAI,MAAM,EAAE,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC,IAAI,CAAA;YACrE,IAAA,YAAK,EAAC,aAAa,MAAM,IAAI,GAAG,EAAE,CAAC,CAAA;YACnC,MAAM,OAAO,GAAG;gBACd,cAAc,EAAE,WAAW;aAC5B,CAAA;YACD,IAAI;gBACF,MAAM,EAAC,IAAI,EAAC,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,GAAS,EAAE,gDACpD,OAAA,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,OAAO,CAAC,CAAA,GAAA,CACzD,CAAA;gBAED,OAAO,IAAI,CAAA;aACZ;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,aAAa,MAAM,KAAK,KAAK,CAAC,OAAO,EAAE,CAAC,CAAA;aACzD;QACH,CAAC;KAAA;IAEK,gBAAgB,CACpB,SAA4C;;YAE5C,IAAI,OAAO,GAAG,CAAC,CAAA;YACf,IAAI,YAAY,GAAG,EAAE,CAAA;YACrB,IAAI,OAAO,GAAG,EAAE,CAAA;YAChB,OAAO,OAAO,GAAG,IAAI,CAAC,WAAW,EAAE;gBACjC,IAAI,WAAW,GAAG,KAAK,CAAA;gBAEvB,IAAI;oBACF,MAAM,QAAQ,GAAG,MAAM,SAAS,EAAE,CAAA;oBAClC,MAAM,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAA;oBAC9C,OAAO,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBACnC,IAAA,YAAK,EAAC,gBAAgB,QAAQ,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;oBACpD,IAAA,YAAK,EAAC,YAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAA;oBACtE,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;oBAChC,IAAA,YAAK,EAAC,SAAS,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAA;oBAC/C,IAAI,IAAI,CAAC,mBAAmB,CAAC,UAAU,CAAC,EAAE;wBACxC,OAAO,EAAC,QAAQ,EAAE,IAAI,EAAC,CAAA;qBACxB;oBACD,WAAW,GAAG,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,CAAA;oBACxD,YAAY,GAAG,oBAAoB,UAAU,KAAK,QAAQ,CAAC,OAAO,CAAC,aAAa,EAAE,CAAA;oBAClF,IAAI,IAAI,CAAC,GAAG,EAAE;wBACZ,IAAI,mBAAU,CAAC,mBAAmB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;4BAC5C,MAAM,IAAI,mBAAU,EAAE,CAAA;yBACvB;wBAED,YAAY,GAAG,GAAG,YAAY,KAAK,IAAI,CAAC,GAAG,EAAE,CAAA;qBAC9C;iBACF;gBAAC,OAAO,KAAK,EAAE;oBACd,IAAI,KAAK,YAAY,WAAW,EAAE;wBAChC,IAAA,YAAK,EAAC,aAAa,OAAO,EAAE,CAAC,CAAA;qBAC9B;oBAED,IAAI,KAAK,YAAY,mBAAU,EAAE;wBAC/B,MAAM,KAAK,CAAA;qBACZ;oBAED,IAAI,qBAAY,CAAC,kBAAkB,CAAC,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,IAAI,CAAC,EAAE;wBAChD,MAAM,IAAI,qBAAY,CAAC,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,IAAI,CAAC,CAAA;qBACpC;oBAED,WAAW,GAAG,IAAI,CAAA;oBAClB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAA;iBAC7B;gBAED,IAAI,CAAC,WAAW,EAAE;oBAChB,MAAM,IAAI,KAAK,CAAC,iCAAiC,YAAY,EAAE,CAAC,CAAA;iBACjE;gBAED,IAAI,OAAO,GAAG,CAAC,KAAK,IAAI,CAAC,WAAW,EAAE;oBACpC,MAAM,IAAI,KAAK,CACb,gCAAgC,IAAI,CAAC,WAAW,cAAc,YAAY,EAAE,CAC7E,CAAA;iBACF;gBAED,MAAM,qBAAqB,GACzB,IAAI,CAAC,mCAAmC,CAAC,OAAO,CAAC,CAAA;gBACnD,IAAA,WAAI,EACF,WAAW,OAAO,GAAG,CAAC,OACpB,IAAI,CAAC,WACP,uBAAuB,YAAY,yBAAyB,qBAAqB,QAAQ,CAC1F,CAAA;gBACD,MAAM,IAAI,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAA;gBACvC,OAAO,EAAE,CAAA;aACV;YAED,MAAM,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAA;QACnC,CAAC;KAAA;IAED,mBAAmB,CAAC,UAAmB;QACrC,IAAI,CAAC,UAAU;YAAE,OAAO,KAAK,CAAA;QAC7B,OAAO,UAAU,IAAI,GAAG,IAAI,UAAU,GAAG,GAAG,CAAA;IAC9C,CAAC;IAED,yBAAyB,CAAC,UAAmB;QAC3C,IAAI,CAAC,UAAU;YAAE,OAAO,KAAK,CAAA;QAE7B,MAAM,oBAAoB,GAAG;YAC3B,uBAAS,CAAC,UAAU;YACpB,uBAAS,CAAC,cAAc;YACxB,uBAAS,CAAC,mBAAmB;YAC7B,uBAAS,CAAC,kBAAkB;YAC5B,uBAAS,CAAC,eAAe;SAC1B,CAAA;QAED,OAAO,oBAAoB,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAA;IAClD,CAAC;IAEK,KAAK,CAAC,YAAoB;;YAC9B,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAA;QAClE,CAAC;KAAA;IAED,mCAAmC,CAAC,OAAe;QACjD,IAAI,OAAO,GAAG,CAAC,EAAE;YACf,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAA;SACxD;QAED,IAAI,OAAO,KAAK,CAAC,EAAE;YACjB,OAAO,IAAI,CAAC,6BAA6B,CAAA;SAC1C;QAED,MAAM,OAAO,GACX,IAAI,CAAC,6BAA6B,GAAG,SAAA,IAAI,CAAC,eAAe,EAAI,OAAO,CAAA,CAAA;QACtE,MAAM,OAAO,GAAG,OAAO,GAAG,IAAI,CAAC,eAAe,CAAA;QAE9C,kEAAkE;QAClE,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,OAAO,GAAG,OAAO,CAAC,GAAG,OAAO,CAAC,CAAA;IAClE,CAAC;CACF;AAED,SAAgB,wBAAwB,CAAC,OAIxC;IACC,MAAM,MAAM,GAAG,IAAI,kBAAkB,CACnC,IAAA,+BAAkB,GAAE,EACpB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,EACpB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,eAAe,EACxB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,eAAe,CACzB,CAAA;IACD,OAAO,IAAI,oCAAsB,CAAC,MAAM,CAAC,CAAA;AAC3C,CAAC;AAZD,4DAYC"}
|
||||
22
node_modules/@actions/cache/lib/internal/shared/errors.d.ts
generated
vendored
Normal file
22
node_modules/@actions/cache/lib/internal/shared/errors.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
export declare class FilesNotFoundError extends Error {
|
||||
files: string[];
|
||||
constructor(files?: string[]);
|
||||
}
|
||||
export declare class InvalidResponseError extends Error {
|
||||
constructor(message: string);
|
||||
}
|
||||
export declare class CacheNotFoundError extends Error {
|
||||
constructor(message?: string);
|
||||
}
|
||||
export declare class GHESNotSupportedError extends Error {
|
||||
constructor(message?: string);
|
||||
}
|
||||
export declare class NetworkError extends Error {
|
||||
code: string;
|
||||
constructor(code: string);
|
||||
static isNetworkErrorCode: (code?: string) => boolean;
|
||||
}
|
||||
export declare class UsageError extends Error {
|
||||
constructor();
|
||||
static isUsageErrorMessage: (msg?: string) => boolean;
|
||||
}
|
||||
70
node_modules/@actions/cache/lib/internal/shared/errors.js
generated
vendored
Normal file
70
node_modules/@actions/cache/lib/internal/shared/errors.js
generated
vendored
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.UsageError = exports.NetworkError = exports.GHESNotSupportedError = exports.CacheNotFoundError = exports.InvalidResponseError = exports.FilesNotFoundError = void 0;
|
||||
class FilesNotFoundError extends Error {
|
||||
constructor(files = []) {
|
||||
let message = 'No files were found to upload';
|
||||
if (files.length > 0) {
|
||||
message += `: ${files.join(', ')}`;
|
||||
}
|
||||
super(message);
|
||||
this.files = files;
|
||||
this.name = 'FilesNotFoundError';
|
||||
}
|
||||
}
|
||||
exports.FilesNotFoundError = FilesNotFoundError;
|
||||
class InvalidResponseError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = 'InvalidResponseError';
|
||||
}
|
||||
}
|
||||
exports.InvalidResponseError = InvalidResponseError;
|
||||
class CacheNotFoundError extends Error {
|
||||
constructor(message = 'Cache not found') {
|
||||
super(message);
|
||||
this.name = 'CacheNotFoundError';
|
||||
}
|
||||
}
|
||||
exports.CacheNotFoundError = CacheNotFoundError;
|
||||
class GHESNotSupportedError extends Error {
|
||||
constructor(message = '@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.') {
|
||||
super(message);
|
||||
this.name = 'GHESNotSupportedError';
|
||||
}
|
||||
}
|
||||
exports.GHESNotSupportedError = GHESNotSupportedError;
|
||||
class NetworkError extends Error {
|
||||
constructor(code) {
|
||||
const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`;
|
||||
super(message);
|
||||
this.code = code;
|
||||
this.name = 'NetworkError';
|
||||
}
|
||||
}
|
||||
exports.NetworkError = NetworkError;
|
||||
NetworkError.isNetworkErrorCode = (code) => {
|
||||
if (!code)
|
||||
return false;
|
||||
return [
|
||||
'ECONNRESET',
|
||||
'ENOTFOUND',
|
||||
'ETIMEDOUT',
|
||||
'ECONNREFUSED',
|
||||
'EHOSTUNREACH'
|
||||
].includes(code);
|
||||
};
|
||||
class UsageError extends Error {
|
||||
constructor() {
|
||||
const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`;
|
||||
super(message);
|
||||
this.name = 'UsageError';
|
||||
}
|
||||
}
|
||||
exports.UsageError = UsageError;
|
||||
UsageError.isUsageErrorMessage = (msg) => {
|
||||
if (!msg)
|
||||
return false;
|
||||
return msg.includes('insufficient usage');
|
||||
};
|
||||
//# sourceMappingURL=errors.js.map
|
||||
1
node_modules/@actions/cache/lib/internal/shared/errors.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/internal/shared/errors.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"errors.js","sourceRoot":"","sources":["../../../src/internal/shared/errors.ts"],"names":[],"mappings":";;;AAAA,MAAa,kBAAmB,SAAQ,KAAK;IAG3C,YAAY,QAAkB,EAAE;QAC9B,IAAI,OAAO,GAAG,+BAA+B,CAAA;QAC7C,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;YACpB,OAAO,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAA;SACnC;QAED,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;QAClB,IAAI,CAAC,IAAI,GAAG,oBAAoB,CAAA;IAClC,CAAC;CACF;AAbD,gDAaC;AAED,MAAa,oBAAqB,SAAQ,KAAK;IAC7C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,sBAAsB,CAAA;IACpC,CAAC;CACF;AALD,oDAKC;AAED,MAAa,kBAAmB,SAAQ,KAAK;IAC3C,YAAY,OAAO,GAAG,iBAAiB;QACrC,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,oBAAoB,CAAA;IAClC,CAAC;CACF;AALD,gDAKC;AAED,MAAa,qBAAsB,SAAQ,KAAK;IAC9C,YACE,OAAO,GAAG,mHAAmH;QAE7H,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,uBAAuB,CAAA;IACrC,CAAC;CACF;AAPD,sDAOC;AAED,MAAa,YAAa,SAAQ,KAAK;IAGrC,YAAY,IAAY;QACtB,MAAM,OAAO,GAAG,2BAA2B,IAAI,kRAAkR,CAAA;QACjU,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,IAAI,GAAG,cAAc,CAAA;IAC5B,CAAC;;AARH,oCAoBC;AAVQ,+BAAkB,GAAG,CAAC,IAAa,EAAW,EAAE;IACrD,IAAI,CAAC,IAAI;QAAE,OAAO,KAAK,CAAA;IACvB,OAAO;QACL,YAAY;QACZ,WAAW;QACX,WAAW;QACX,cAAc;QACd,cAAc;KACf,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAA;AAClB,CAAC,CAAA;AAGH,MAAa,UAAW,SAAQ,KAAK;IACnC;QACE,MAAM,OAAO,GAAG,iSAAiS,CAAA;QACjT,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,YAAY,CAAA;IAC1B,CAAC;;AALH,gCAWC;AAJQ,8BAAmB,GAAG,CAAC,GAAY,EAAW,EAAE;IACrD,IAAI,CAAC,GAAG;QAAE,OAAO,KAAK,CAAA;IACtB,OAAO,GAAG,CAAC,QAAQ,CAAC,oBAAoB,CAAC,CAAA;AAC3C,CAAC,CAAA"}
|
||||
4
node_modules/@actions/cache/lib/internal/shared/user-agent.d.ts
generated
vendored
Normal file
4
node_modules/@actions/cache/lib/internal/shared/user-agent.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
|
||||
*/
|
||||
export declare function getUserAgentString(): string;
|
||||
13
node_modules/@actions/cache/lib/internal/shared/user-agent.js
generated
vendored
Normal file
13
node_modules/@actions/cache/lib/internal/shared/user-agent.js
generated
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getUserAgentString = void 0;
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports
|
||||
const packageJson = require('../../../package.json');
|
||||
/**
|
||||
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
|
||||
*/
|
||||
function getUserAgentString() {
|
||||
return `@actions/cache-${packageJson.version}`;
|
||||
}
|
||||
exports.getUserAgentString = getUserAgentString;
|
||||
//# sourceMappingURL=user-agent.js.map
|
||||
1
node_modules/@actions/cache/lib/internal/shared/user-agent.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/internal/shared/user-agent.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"user-agent.js","sourceRoot":"","sources":["../../../src/internal/shared/user-agent.ts"],"names":[],"mappings":";;;AAAA,qGAAqG;AACrG,MAAM,WAAW,GAAG,OAAO,CAAC,uBAAuB,CAAC,CAAA;AAEpD;;GAEG;AACH,SAAgB,kBAAkB;IAChC,OAAO,kBAAkB,WAAW,CAAC,OAAO,EAAE,CAAA;AAChD,CAAC;AAFD,gDAEC"}
|
||||
60
node_modules/@actions/cache/lib/internal/uploadUtils.d.ts
generated
vendored
Normal file
60
node_modules/@actions/cache/lib/internal/uploadUtils.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
import { BlobUploadCommonResponse } from '@azure/storage-blob';
|
||||
import { TransferProgressEvent } from '@azure/ms-rest-js';
|
||||
import { UploadOptions } from '../options';
|
||||
/**
|
||||
* Class for tracking the upload state and displaying stats.
|
||||
*/
|
||||
export declare class UploadProgress {
|
||||
contentLength: number;
|
||||
sentBytes: number;
|
||||
startTime: number;
|
||||
displayedComplete: boolean;
|
||||
timeoutHandle?: ReturnType<typeof setTimeout>;
|
||||
constructor(contentLength: number);
|
||||
/**
|
||||
* Sets the number of bytes sent
|
||||
*
|
||||
* @param sentBytes the number of bytes sent
|
||||
*/
|
||||
setSentBytes(sentBytes: number): void;
|
||||
/**
|
||||
* Returns the total number of bytes transferred.
|
||||
*/
|
||||
getTransferredBytes(): number;
|
||||
/**
|
||||
* Returns true if the upload is complete.
|
||||
*/
|
||||
isDone(): boolean;
|
||||
/**
|
||||
* Prints the current upload stats. Once the upload completes, this will print one
|
||||
* last line and then stop.
|
||||
*/
|
||||
display(): void;
|
||||
/**
|
||||
* Returns a function used to handle TransferProgressEvents.
|
||||
*/
|
||||
onProgress(): (progress: TransferProgressEvent) => void;
|
||||
/**
|
||||
* Starts the timer that displays the stats.
|
||||
*
|
||||
* @param delayInMs the delay between each write
|
||||
*/
|
||||
startDisplayTimer(delayInMs?: number): void;
|
||||
/**
|
||||
* Stops the timer that displays the stats. As this typically indicates the upload
|
||||
* is complete, this will display one last line, unless the last line has already
|
||||
* been written.
|
||||
*/
|
||||
stopDisplayTimer(): void;
|
||||
}
|
||||
/**
|
||||
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
|
||||
* This function will display progress information to the console. Concurrency of the
|
||||
* upload is determined by the calling functions.
|
||||
*
|
||||
* @param signedUploadURL
|
||||
* @param archivePath
|
||||
* @param options
|
||||
* @returns
|
||||
*/
|
||||
export declare function uploadCacheArchiveSDK(signedUploadURL: string, archivePath: string, options?: UploadOptions): Promise<BlobUploadCommonResponse>;
|
||||
167
node_modules/@actions/cache/lib/internal/uploadUtils.js
generated
vendored
Normal file
167
node_modules/@actions/cache/lib/internal/uploadUtils.js
generated
vendored
Normal file
|
|
@ -0,0 +1,167 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.uploadCacheArchiveSDK = exports.UploadProgress = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const storage_blob_1 = require("@azure/storage-blob");
|
||||
const errors_1 = require("./shared/errors");
|
||||
/**
|
||||
* Class for tracking the upload state and displaying stats.
|
||||
*/
|
||||
class UploadProgress {
|
||||
constructor(contentLength) {
|
||||
this.contentLength = contentLength;
|
||||
this.sentBytes = 0;
|
||||
this.displayedComplete = false;
|
||||
this.startTime = Date.now();
|
||||
}
|
||||
/**
|
||||
* Sets the number of bytes sent
|
||||
*
|
||||
* @param sentBytes the number of bytes sent
|
||||
*/
|
||||
setSentBytes(sentBytes) {
|
||||
this.sentBytes = sentBytes;
|
||||
}
|
||||
/**
|
||||
* Returns the total number of bytes transferred.
|
||||
*/
|
||||
getTransferredBytes() {
|
||||
return this.sentBytes;
|
||||
}
|
||||
/**
|
||||
* Returns true if the upload is complete.
|
||||
*/
|
||||
isDone() {
|
||||
return this.getTransferredBytes() === this.contentLength;
|
||||
}
|
||||
/**
|
||||
* Prints the current upload stats. Once the upload completes, this will print one
|
||||
* last line and then stop.
|
||||
*/
|
||||
display() {
|
||||
if (this.displayedComplete) {
|
||||
return;
|
||||
}
|
||||
const transferredBytes = this.sentBytes;
|
||||
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
|
||||
const elapsedTime = Date.now() - this.startTime;
|
||||
const uploadSpeed = (transferredBytes /
|
||||
(1024 * 1024) /
|
||||
(elapsedTime / 1000)).toFixed(1);
|
||||
core.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`);
|
||||
if (this.isDone()) {
|
||||
this.displayedComplete = true;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Returns a function used to handle TransferProgressEvents.
|
||||
*/
|
||||
onProgress() {
|
||||
return (progress) => {
|
||||
this.setSentBytes(progress.loadedBytes);
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Starts the timer that displays the stats.
|
||||
*
|
||||
* @param delayInMs the delay between each write
|
||||
*/
|
||||
startDisplayTimer(delayInMs = 1000) {
|
||||
const displayCallback = () => {
|
||||
this.display();
|
||||
if (!this.isDone()) {
|
||||
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
|
||||
}
|
||||
};
|
||||
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
|
||||
}
|
||||
/**
|
||||
* Stops the timer that displays the stats. As this typically indicates the upload
|
||||
* is complete, this will display one last line, unless the last line has already
|
||||
* been written.
|
||||
*/
|
||||
stopDisplayTimer() {
|
||||
if (this.timeoutHandle) {
|
||||
clearTimeout(this.timeoutHandle);
|
||||
this.timeoutHandle = undefined;
|
||||
}
|
||||
this.display();
|
||||
}
|
||||
}
|
||||
exports.UploadProgress = UploadProgress;
|
||||
/**
|
||||
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
|
||||
* This function will display progress information to the console. Concurrency of the
|
||||
* upload is determined by the calling functions.
|
||||
*
|
||||
* @param signedUploadURL
|
||||
* @param archivePath
|
||||
* @param options
|
||||
* @returns
|
||||
*/
|
||||
function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
|
||||
const blockBlobClient = blobClient.getBlockBlobClient();
|
||||
const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0);
|
||||
// Specify data transfer options
|
||||
const uploadOptions = {
|
||||
blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize,
|
||||
concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency,
|
||||
maxSingleShotSize: 128 * 1024 * 1024,
|
||||
onProgress: uploadProgress.onProgress()
|
||||
};
|
||||
try {
|
||||
uploadProgress.startDisplayTimer();
|
||||
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
|
||||
const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
|
||||
// TODO: better management of non-retryable errors
|
||||
if (response._response.status >= 400) {
|
||||
throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`);
|
||||
}
|
||||
return response;
|
||||
}
|
||||
catch (error) {
|
||||
core.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`);
|
||||
throw error;
|
||||
}
|
||||
finally {
|
||||
uploadProgress.stopDisplayTimer();
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.uploadCacheArchiveSDK = uploadCacheArchiveSDK;
|
||||
//# sourceMappingURL=uploadUtils.js.map
|
||||
1
node_modules/@actions/cache/lib/internal/uploadUtils.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/internal/uploadUtils.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"uploadUtils.js","sourceRoot":"","sources":["../../src/internal/uploadUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,sDAK4B;AAE5B,4CAAoD;AAGpD;;GAEG;AACH,MAAa,cAAc;IAOzB,YAAY,aAAqB;QAC/B,IAAI,CAAC,aAAa,GAAG,aAAa,CAAA;QAClC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAA;QAClB,IAAI,CAAC,iBAAiB,GAAG,KAAK,CAAA;QAC9B,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;IAC7B,CAAC;IAED;;;;OAIG;IACH,YAAY,CAAC,SAAiB;QAC5B,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;IAC5B,CAAC;IAED;;OAEG;IACH,mBAAmB;QACjB,OAAO,IAAI,CAAC,SAAS,CAAA;IACvB,CAAC;IAED;;OAEG;IACH,MAAM;QACJ,OAAO,IAAI,CAAC,mBAAmB,EAAE,KAAK,IAAI,CAAC,aAAa,CAAA;IAC1D,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,IAAI,IAAI,CAAC,iBAAiB,EAAE;YAC1B,OAAM;SACP;QAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,SAAS,CAAA;QACvC,MAAM,UAAU,GAAG,CAAC,GAAG,GAAG,CAAC,gBAAgB,GAAG,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,OAAO,CACxE,CAAC,CACF,CAAA;QACD,MAAM,WAAW,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,SAAS,CAAA;QAC/C,MAAM,WAAW,GAAG,CAClB,gBAAgB;YAChB,CAAC,IAAI,GAAG,IAAI,CAAC;YACb,CAAC,WAAW,GAAG,IAAI,CAAC,CACrB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;QAEZ,IAAI,CAAC,IAAI,CACP,QAAQ,gBAAgB,OAAO,IAAI,CAAC,aAAa,KAAK,UAAU,OAAO,WAAW,UAAU,CAC7F,CAAA;QAED,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE;YACjB,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAA;SAC9B;IACH,CAAC;IAED;;OAEG;IACH,UAAU;QACR,OAAO,CAAC,QAA+B,EAAE,EAAE;YACzC,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAA;QACzC,CAAC,CAAA;IACH,CAAC;IAED;;;;OAIG;IACH,iBAAiB,CAAC,SAAS,GAAG,IAAI;QAChC,MAAM,eAAe,GAAG,GAAS,EAAE;YACjC,IAAI,CAAC,OAAO,EAAE,CAAA;YAEd,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE;gBAClB,IAAI,CAAC,aAAa,GAAG,UAAU,CAAC,eAAe,EAAE,SAAS,CAAC,CAAA;aAC5D;QACH,CAAC,CAAA;QAED,IAAI,CAAC,aAAa,GAAG,UAAU,CAAC,eAAe,EAAE,SAAS,CAAC,CAAA;IAC7D,CAAC;IAED;;;;OAIG;IACH,gBAAgB;QACd,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,YAAY,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;YAChC,IAAI,CAAC,aAAa,GAAG,SAAS,CAAA;SAC/B;QAED,IAAI,CAAC,OAAO,EAAE,CAAA;IAChB,CAAC;CACF;AAzGD,wCAyGC;AAED;;;;;;;;;GASG;AACH,SAAsB,qBAAqB,CACzC,eAAuB,EACvB,WAAmB,EACnB,OAAuB;;;QAEvB,MAAM,UAAU,GAAe,IAAI,yBAAU,CAAC,eAAe,CAAC,CAAA;QAC9D,MAAM,eAAe,GAAoB,UAAU,CAAC,kBAAkB,EAAE,CAAA;QACxE,MAAM,cAAc,GAAG,IAAI,cAAc,CAAC,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,mCAAI,CAAC,CAAC,CAAA;QAEzE,gCAAgC;QAChC,MAAM,aAAa,GAAmC;YACpD,SAAS,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,eAAe;YACnC,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,iBAAiB;YACvC,iBAAiB,EAAE,GAAG,GAAG,IAAI,GAAG,IAAI;YACpC,UAAU,EAAE,cAAc,CAAC,UAAU,EAAE;SACxC,CAAA;QAED,IAAI;YACF,cAAc,CAAC,iBAAiB,EAAE,CAAA;YAElC,IAAI,CAAC,KAAK,CACR,eAAe,UAAU,CAAC,IAAI,IAAI,UAAU,CAAC,WAAW,IAAI,UAAU,CAAC,aAAa,EAAE,CACvF,CAAA;YAED,MAAM,QAAQ,GAAG,MAAM,eAAe,CAAC,UAAU,CAC/C,WAAW,EACX,aAAa,CACd,CAAA;YAED,kDAAkD;YAClD,IAAI,QAAQ,CAAC,SAAS,CAAC,MAAM,IAAI,GAAG,EAAE;gBACpC,MAAM,IAAI,6BAAoB,CAC5B,yDAAyD,QAAQ,CAAC,SAAS,CAAC,MAAM,EAAE,CACrF,CAAA;aACF;YAED,OAAO,QAAQ,CAAA;SAChB;QAAC,OAAO,KAAK,EAAE;YACd,IAAI,CAAC,OAAO,CACV,kEAAkE,KAAK,CAAC,OAAO,EAAE,CAClF,CAAA;YACD,MAAM,KAAK,CAAA;SACZ;gBAAS;YACR,cAAc,CAAC,gBAAgB,EAAE,CAAA;SAClC;;CACF;AA7CD,sDA6CC"}
|
||||
12
node_modules/@actions/cache/lib/options.d.ts
generated
vendored
12
node_modules/@actions/cache/lib/options.d.ts
generated
vendored
|
|
@ -2,6 +2,14 @@
|
|||
* Options to control cache upload
|
||||
*/
|
||||
export interface UploadOptions {
|
||||
/**
|
||||
* Indicates whether to use the Azure Blob SDK to download caches
|
||||
* that are stored on Azure Blob Storage to improve reliability and
|
||||
* performance
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
useAzureSdk?: boolean;
|
||||
/**
|
||||
* Number of parallel cache upload
|
||||
*
|
||||
|
|
@ -14,6 +22,10 @@ export interface UploadOptions {
|
|||
* @default 32MB
|
||||
*/
|
||||
uploadChunkSize?: number;
|
||||
/**
|
||||
* Archive size in bytes
|
||||
*/
|
||||
archiveSizeBytes?: number;
|
||||
}
|
||||
/**
|
||||
* Options to control cache download
|
||||
|
|
|
|||
17
node_modules/@actions/cache/lib/options.js
generated
vendored
17
node_modules/@actions/cache/lib/options.js
generated
vendored
|
|
@ -31,11 +31,16 @@ const core = __importStar(require("@actions/core"));
|
|||
* @param copy the original upload options
|
||||
*/
|
||||
function getUploadOptions(copy) {
|
||||
// Defaults if not overriden
|
||||
const result = {
|
||||
useAzureSdk: false,
|
||||
uploadConcurrency: 4,
|
||||
uploadChunkSize: 32 * 1024 * 1024
|
||||
};
|
||||
if (copy) {
|
||||
if (typeof copy.useAzureSdk === 'boolean') {
|
||||
result.useAzureSdk = copy.useAzureSdk;
|
||||
}
|
||||
if (typeof copy.uploadConcurrency === 'number') {
|
||||
result.uploadConcurrency = copy.uploadConcurrency;
|
||||
}
|
||||
|
|
@ -43,6 +48,18 @@ function getUploadOptions(copy) {
|
|||
result.uploadChunkSize = copy.uploadChunkSize;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Add env var overrides
|
||||
*/
|
||||
// Cap the uploadConcurrency at 32
|
||||
result.uploadConcurrency = !isNaN(Number(process.env['CACHE_UPLOAD_CONCURRENCY']))
|
||||
? Math.min(32, Number(process.env['CACHE_UPLOAD_CONCURRENCY']))
|
||||
: result.uploadConcurrency;
|
||||
// Cap the uploadChunkSize at 128MiB
|
||||
result.uploadChunkSize = !isNaN(Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']))
|
||||
? Math.min(128 * 1024 * 1024, Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']) * 1024 * 1024)
|
||||
: result.uploadChunkSize;
|
||||
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
|
||||
core.debug(`Upload concurrency: ${result.uploadConcurrency}`);
|
||||
core.debug(`Upload chunk size: ${result.uploadChunkSize}`);
|
||||
return result;
|
||||
|
|
|
|||
2
node_modules/@actions/cache/lib/options.js.map
generated
vendored
2
node_modules/@actions/cache/lib/options.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"options.js","sourceRoot":"","sources":["../src/options.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AAwErC;;;;GAIG;AACH,SAAgB,gBAAgB,CAAC,IAAoB;IACnD,MAAM,MAAM,GAAkB;QAC5B,iBAAiB,EAAE,CAAC;QACpB,eAAe,EAAE,EAAE,GAAG,IAAI,GAAG,IAAI;KAClC,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,iBAAiB,KAAK,QAAQ,EAAE;YAC9C,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAA;SAClD;QAED,IAAI,OAAO,IAAI,CAAC,eAAe,KAAK,QAAQ,EAAE;YAC5C,MAAM,CAAC,eAAe,GAAG,IAAI,CAAC,eAAe,CAAA;SAC9C;KACF;IAED,IAAI,CAAC,KAAK,CAAC,uBAAuB,MAAM,CAAC,iBAAiB,EAAE,CAAC,CAAA;IAC7D,IAAI,CAAC,KAAK,CAAC,sBAAsB,MAAM,CAAC,eAAe,EAAE,CAAC,CAAA;IAE1D,OAAO,MAAM,CAAA;AACf,CAAC;AApBD,4CAoBC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,IAAsB;IACvD,MAAM,MAAM,GAAoB;QAC9B,WAAW,EAAE,KAAK;QAClB,uBAAuB,EAAE,IAAI;QAC7B,mBAAmB,EAAE,CAAC;QACtB,WAAW,EAAE,KAAK;QAClB,kBAAkB,EAAE,MAAM;QAC1B,UAAU,EAAE,KAAK;KAClB,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,SAAS,EAAE;YACzC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,uBAAuB,KAAK,SAAS,EAAE;YACrD,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAA;SAC9D;QAED,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,QAAQ,EAAE;YAChD,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAA;SACtD;QAED,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,QAAQ,EAAE;YACxC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,kBAAkB,KAAK,QAAQ,EAAE;YAC/C,MAAM,CAAC,kBAAkB,GAAG,IAAI,CAAC,kBAAkB,CAAA;SACpD;QAED,IAAI,OAAO,IAAI,CAAC,UAAU,KAAK,SAAS,EAAE;YACxC,MAAM,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU,CAAA;SACpC;KACF;IACD,MAAM,0BAA0B,GAC9B,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,CAAA;IAE9C,IACE,0BAA0B;QAC1B,CAAC,KAAK,CAAC,MAAM,CAAC,0BAA0B,CAAC,CAAC;QAC1C,QAAQ,CAAC,MAAM,CAAC,0BAA0B,CAAC,CAAC,EAC5C;QACA,MAAM,CAAC,kBAAkB,GAAG,MAAM,CAAC,0BAA0B,CAAC,GAAG,EAAE,GAAG,IAAI,CAAA;KAC3E;IACD,IAAI,CAAC,KAAK,CAAC,kBAAkB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,mBAAmB,EAAE,CAAC,CAAA;IACjE,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IACzD,IAAI,CAAC,KAAK,CACR,gDAAgD,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,EAAE,CAC/F,CAAA;IACD,IAAI,CAAC,KAAK,CAAC,kCAAkC,MAAM,CAAC,kBAAkB,EAAE,CAAC,CAAA;IACzE,IAAI,CAAC,KAAK,CAAC,gBAAgB,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;IAE/C,OAAO,MAAM,CAAA;AACf,CAAC;AAvDD,gDAuDC"}
|
||||
{"version":3,"file":"options.js","sourceRoot":"","sources":["../src/options.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AAoFrC;;;;GAIG;AACH,SAAgB,gBAAgB,CAAC,IAAoB;IACnD,4BAA4B;IAC5B,MAAM,MAAM,GAAkB;QAC5B,WAAW,EAAE,KAAK;QAClB,iBAAiB,EAAE,CAAC;QACpB,eAAe,EAAE,EAAE,GAAG,IAAI,GAAG,IAAI;KAClC,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,SAAS,EAAE;YACzC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,iBAAiB,KAAK,QAAQ,EAAE;YAC9C,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAA;SAClD;QAED,IAAI,OAAO,IAAI,CAAC,eAAe,KAAK,QAAQ,EAAE;YAC5C,MAAM,CAAC,eAAe,GAAG,IAAI,CAAC,eAAe,CAAA;SAC9C;KACF;IAED;;OAEG;IACH,kCAAkC;IAClC,MAAM,CAAC,iBAAiB,GAAG,CAAC,KAAK,CAC/B,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,0BAA0B,CAAC,CAAC,CAChD;QACC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,0BAA0B,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,MAAM,CAAC,iBAAiB,CAAA;IAC5B,oCAAoC;IACpC,MAAM,CAAC,eAAe,GAAG,CAAC,KAAK,CAC7B,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,yBAAyB,CAAC,CAAC,CAC/C;QACC,CAAC,CAAC,IAAI,CAAC,GAAG,CACN,GAAG,GAAG,IAAI,GAAG,IAAI,EACjB,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,yBAAyB,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAC7D;QACH,CAAC,CAAC,MAAM,CAAC,eAAe,CAAA;IAE1B,IAAI,CAAC,KAAK,CAAC,kBAAkB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,CAAC,uBAAuB,MAAM,CAAC,iBAAiB,EAAE,CAAC,CAAA;IAC7D,IAAI,CAAC,KAAK,CAAC,sBAAsB,MAAM,CAAC,eAAe,EAAE,CAAC,CAAA;IAE1D,OAAO,MAAM,CAAA;AACf,CAAC;AA9CD,4CA8CC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,IAAsB;IACvD,MAAM,MAAM,GAAoB;QAC9B,WAAW,EAAE,KAAK;QAClB,uBAAuB,EAAE,IAAI;QAC7B,mBAAmB,EAAE,CAAC;QACtB,WAAW,EAAE,KAAK;QAClB,kBAAkB,EAAE,MAAM;QAC1B,UAAU,EAAE,KAAK;KAClB,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,SAAS,EAAE;YACzC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,uBAAuB,KAAK,SAAS,EAAE;YACrD,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAA;SAC9D;QAED,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,QAAQ,EAAE;YAChD,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAA;SACtD;QAED,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,QAAQ,EAAE;YACxC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,kBAAkB,KAAK,QAAQ,EAAE;YAC/C,MAAM,CAAC,kBAAkB,GAAG,IAAI,CAAC,kBAAkB,CAAA;SACpD;QAED,IAAI,OAAO,IAAI,CAAC,UAAU,KAAK,SAAS,EAAE;YACxC,MAAM,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU,CAAA;SACpC;KACF;IACD,MAAM,0BAA0B,GAC9B,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,CAAA;IAE9C,IACE,0BAA0B;QAC1B,CAAC,KAAK,CAAC,MAAM,CAAC,0BAA0B,CAAC,CAAC;QAC1C,QAAQ,CAAC,MAAM,CAAC,0BAA0B,CAAC,CAAC,EAC5C;QACA,MAAM,CAAC,kBAAkB,GAAG,MAAM,CAAC,0BAA0B,CAAC,GAAG,EAAE,GAAG,IAAI,CAAA;KAC3E;IACD,IAAI,CAAC,KAAK,CAAC,kBAAkB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,mBAAmB,EAAE,CAAC,CAAA;IACjE,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IACzD,IAAI,CAAC,KAAK,CACR,gDAAgD,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,EAAE,CAC/F,CAAA;IACD,IAAI,CAAC,KAAK,CAAC,kCAAkC,MAAM,CAAC,kBAAkB,EAAE,CAAC,CAAA;IACzE,IAAI,CAAC,KAAK,CAAC,gBAAgB,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;IAE/C,OAAO,MAAM,CAAA;AACf,CAAC;AAvDD,gDAuDC"}
|
||||
8
node_modules/@actions/cache/package.json
generated
vendored
8
node_modules/@actions/cache/package.json
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/cache",
|
||||
"version": "3.3.0",
|
||||
"version": "4.0.0",
|
||||
"preview": true,
|
||||
"description": "Actions cache lib",
|
||||
"keywords": [
|
||||
|
|
@ -45,10 +45,12 @@
|
|||
"@azure/abort-controller": "^1.1.0",
|
||||
"@azure/ms-rest-js": "^2.6.0",
|
||||
"@azure/storage-blob": "^12.13.0",
|
||||
"semver": "^6.3.1"
|
||||
"@protobuf-ts/plugin": "^2.9.4",
|
||||
"semver": "^6.3.1",
|
||||
"twirp-ts": "^2.5.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/semver": "^6.0.0",
|
||||
"typescript": "^5.2.2"
|
||||
}
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue