Bump artifact dependencies if CODEQL_ACTION_ARTIFACT_V2_UPGRADE enabled (#2482)
Co-authored-by: Andrew Eisenberg <aeisenberg@github.com> Co-authored-by: Henry Mercer <henrymercer@github.com>
This commit is contained in:
parent
cf5b0a9041
commit
a196a714b8
5388 changed files with 2176737 additions and 71701 deletions
10
node_modules/@actions/artifact/lib/artifact-client.d.ts
generated
vendored
10
node_modules/@actions/artifact/lib/artifact-client.d.ts
generated
vendored
|
|
@ -1,10 +0,0 @@
|
|||
import { UploadOptions } from './internal/upload-options';
|
||||
import { UploadResponse } from './internal/upload-response';
|
||||
import { DownloadOptions } from './internal/download-options';
|
||||
import { DownloadResponse } from './internal/download-response';
|
||||
import { ArtifactClient } from './internal/artifact-client';
|
||||
export { ArtifactClient, UploadResponse, UploadOptions, DownloadResponse, DownloadOptions };
|
||||
/**
|
||||
* Constructs an ArtifactClient
|
||||
*/
|
||||
export declare function create(): ArtifactClient;
|
||||
12
node_modules/@actions/artifact/lib/artifact-client.js
generated
vendored
12
node_modules/@actions/artifact/lib/artifact-client.js
generated
vendored
|
|
@ -1,12 +0,0 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.create = void 0;
|
||||
const artifact_client_1 = require("./internal/artifact-client");
|
||||
/**
|
||||
* Constructs an ArtifactClient
|
||||
*/
|
||||
function create() {
|
||||
return artifact_client_1.DefaultArtifactClient.create();
|
||||
}
|
||||
exports.create = create;
|
||||
//# sourceMappingURL=artifact-client.js.map
|
||||
1
node_modules/@actions/artifact/lib/artifact-client.js.map
generated
vendored
1
node_modules/@actions/artifact/lib/artifact-client.js.map
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
{"version":3,"file":"artifact-client.js","sourceRoot":"","sources":["../src/artifact-client.ts"],"names":[],"mappings":";;;AAIA,gEAAgF;AAUhF;;GAEG;AACH,SAAgB,MAAM;IACpB,OAAO,uCAAqB,CAAC,MAAM,EAAE,CAAA;AACvC,CAAC;AAFD,wBAEC"}
|
||||
6
node_modules/@actions/artifact/lib/artifact.d.ts
generated
vendored
Normal file
6
node_modules/@actions/artifact/lib/artifact.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
import { ArtifactClient } from './internal/client';
|
||||
export * from './internal/shared/interfaces';
|
||||
export * from './internal/shared/errors';
|
||||
export * from './internal/client';
|
||||
declare const client: ArtifactClient;
|
||||
export default client;
|
||||
23
node_modules/@actions/artifact/lib/artifact.js
generated
vendored
Normal file
23
node_modules/@actions/artifact/lib/artifact.js
generated
vendored
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const client_1 = require("./internal/client");
|
||||
__exportStar(require("./internal/shared/interfaces"), exports);
|
||||
__exportStar(require("./internal/shared/errors"), exports);
|
||||
__exportStar(require("./internal/client"), exports);
|
||||
const client = new client_1.DefaultArtifactClient();
|
||||
exports.default = client;
|
||||
//# sourceMappingURL=artifact.js.map
|
||||
1
node_modules/@actions/artifact/lib/artifact.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/artifact.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"artifact.js","sourceRoot":"","sources":["../src/artifact.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,8CAAuE;AAEvE,+DAA4C;AAC5C,2DAAwC;AACxC,oDAAiC;AAEjC,MAAM,MAAM,GAAmB,IAAI,8BAAqB,EAAE,CAAA;AAC1D,kBAAe,MAAM,CAAA"}
|
||||
145
node_modules/@actions/artifact/lib/generated/google/protobuf/timestamp.d.ts
generated
vendored
Normal file
145
node_modules/@actions/artifact/lib/generated/google/protobuf/timestamp.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import type { JsonValue } from "@protobuf-ts/runtime";
|
||||
import type { JsonReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
/**
|
||||
* A Timestamp represents a point in time independent of any time zone
|
||||
* or calendar, represented as seconds and fractions of seconds at
|
||||
* nanosecond resolution in UTC Epoch time. It is encoded using the
|
||||
* Proleptic Gregorian Calendar which extends the Gregorian calendar
|
||||
* backwards to year one. It is encoded assuming all minutes are 60
|
||||
* seconds long, i.e. leap seconds are "smeared" so that no leap second
|
||||
* table is needed for interpretation. Range is from
|
||||
* 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z.
|
||||
* By restricting to that range, we ensure that we can convert to
|
||||
* and from RFC 3339 date strings.
|
||||
* See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt).
|
||||
*
|
||||
* # Examples
|
||||
*
|
||||
* Example 1: Compute Timestamp from POSIX `time()`.
|
||||
*
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds(time(NULL));
|
||||
* timestamp.set_nanos(0);
|
||||
*
|
||||
* Example 2: Compute Timestamp from POSIX `gettimeofday()`.
|
||||
*
|
||||
* struct timeval tv;
|
||||
* gettimeofday(&tv, NULL);
|
||||
*
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds(tv.tv_sec);
|
||||
* timestamp.set_nanos(tv.tv_usec * 1000);
|
||||
*
|
||||
* Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
|
||||
*
|
||||
* FILETIME ft;
|
||||
* GetSystemTimeAsFileTime(&ft);
|
||||
* UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
|
||||
*
|
||||
* // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
|
||||
* // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
|
||||
* timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
|
||||
*
|
||||
* Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
|
||||
*
|
||||
* long millis = System.currentTimeMillis();
|
||||
*
|
||||
* Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
|
||||
* .setNanos((int) ((millis % 1000) * 1000000)).build();
|
||||
*
|
||||
*
|
||||
* Example 5: Compute Timestamp from current time in Python.
|
||||
*
|
||||
* timestamp = Timestamp()
|
||||
* timestamp.GetCurrentTime()
|
||||
*
|
||||
* # JSON Mapping
|
||||
*
|
||||
* In JSON format, the Timestamp type is encoded as a string in the
|
||||
* [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
|
||||
* format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
|
||||
* where {year} is always expressed using four digits while {month}, {day},
|
||||
* {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
|
||||
* seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
|
||||
* are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
|
||||
* is required. A proto3 JSON serializer should always use UTC (as indicated by
|
||||
* "Z") when printing the Timestamp type and a proto3 JSON parser should be
|
||||
* able to accept both UTC and other timezones (as indicated by an offset).
|
||||
*
|
||||
* For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
|
||||
* 01:30 UTC on January 15, 2017.
|
||||
*
|
||||
* In JavaScript, one can convert a Date object to this format using the
|
||||
* standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString]
|
||||
* method. In Python, a standard `datetime.datetime` object can be converted
|
||||
* to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime)
|
||||
* with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one
|
||||
* can use the Joda Time's [`ISODateTimeFormat.dateTime()`](
|
||||
* http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime--
|
||||
* ) to obtain a formatter capable of generating timestamps in this format.
|
||||
*
|
||||
*
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Timestamp
|
||||
*/
|
||||
export interface Timestamp {
|
||||
/**
|
||||
* Represents seconds of UTC time since Unix epoch
|
||||
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
||||
* 9999-12-31T23:59:59Z inclusive.
|
||||
*
|
||||
* @generated from protobuf field: int64 seconds = 1;
|
||||
*/
|
||||
seconds: string;
|
||||
/**
|
||||
* Non-negative fractions of a second at nanosecond resolution. Negative
|
||||
* second values with fractions must still have non-negative nanos values
|
||||
* that count forward in time. Must be from 0 to 999,999,999
|
||||
* inclusive.
|
||||
*
|
||||
* @generated from protobuf field: int32 nanos = 2;
|
||||
*/
|
||||
nanos: number;
|
||||
}
|
||||
declare class Timestamp$Type extends MessageType<Timestamp> {
|
||||
constructor();
|
||||
/**
|
||||
* Creates a new `Timestamp` for the current time.
|
||||
*/
|
||||
now(): Timestamp;
|
||||
/**
|
||||
* Converts a `Timestamp` to a JavaScript Date.
|
||||
*/
|
||||
toDate(message: Timestamp): Date;
|
||||
/**
|
||||
* Converts a JavaScript Date to a `Timestamp`.
|
||||
*/
|
||||
fromDate(date: Date): Timestamp;
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonWrite(message: Timestamp, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Timestamp): Timestamp;
|
||||
create(value?: PartialMessage<Timestamp>): Timestamp;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Timestamp): Timestamp;
|
||||
internalBinaryWrite(message: Timestamp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Timestamp
|
||||
*/
|
||||
export declare const Timestamp: Timestamp$Type;
|
||||
export {};
|
||||
136
node_modules/@actions/artifact/lib/generated/google/protobuf/timestamp.js
generated
vendored
Normal file
136
node_modules/@actions/artifact/lib/generated/google/protobuf/timestamp.js
generated
vendored
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Timestamp = void 0;
|
||||
const runtime_1 = require("@protobuf-ts/runtime");
|
||||
const runtime_2 = require("@protobuf-ts/runtime");
|
||||
const runtime_3 = require("@protobuf-ts/runtime");
|
||||
const runtime_4 = require("@protobuf-ts/runtime");
|
||||
const runtime_5 = require("@protobuf-ts/runtime");
|
||||
const runtime_6 = require("@protobuf-ts/runtime");
|
||||
const runtime_7 = require("@protobuf-ts/runtime");
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Timestamp$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.Timestamp", [
|
||||
{ no: 1, name: "seconds", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 2, name: "nanos", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Creates a new `Timestamp` for the current time.
|
||||
*/
|
||||
now() {
|
||||
const msg = this.create();
|
||||
const ms = Date.now();
|
||||
msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1000)).toString();
|
||||
msg.nanos = (ms % 1000) * 1000000;
|
||||
return msg;
|
||||
}
|
||||
/**
|
||||
* Converts a `Timestamp` to a JavaScript Date.
|
||||
*/
|
||||
toDate(message) {
|
||||
return new Date(runtime_6.PbLong.from(message.seconds).toNumber() * 1000 + Math.ceil(message.nanos / 1000000));
|
||||
}
|
||||
/**
|
||||
* Converts a JavaScript Date to a `Timestamp`.
|
||||
*/
|
||||
fromDate(date) {
|
||||
const msg = this.create();
|
||||
const ms = date.getTime();
|
||||
msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1000)).toString();
|
||||
msg.nanos = (ms % 1000) * 1000000;
|
||||
return msg;
|
||||
}
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
let ms = runtime_6.PbLong.from(message.seconds).toNumber() * 1000;
|
||||
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
|
||||
throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
|
||||
if (message.nanos < 0)
|
||||
throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative.");
|
||||
let z = "Z";
|
||||
if (message.nanos > 0) {
|
||||
let nanosStr = (message.nanos + 1000000000).toString().substring(1);
|
||||
if (nanosStr.substring(3) === "000000")
|
||||
z = "." + nanosStr.substring(0, 3) + "Z";
|
||||
else if (nanosStr.substring(6) === "000")
|
||||
z = "." + nanosStr.substring(0, 6) + "Z";
|
||||
else
|
||||
z = "." + nanosStr + "Z";
|
||||
}
|
||||
return new Date(ms).toISOString().replace(".000Z", z);
|
||||
}
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (typeof json !== "string")
|
||||
throw new Error("Unable to parse Timestamp from JSON " + (0, runtime_5.typeofJsonValue)(json) + ".");
|
||||
let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/);
|
||||
if (!matches)
|
||||
throw new Error("Unable to parse Timestamp from JSON. Invalid format.");
|
||||
let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z"));
|
||||
if (Number.isNaN(ms))
|
||||
throw new Error("Unable to parse Timestamp from JSON. Invalid value.");
|
||||
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
|
||||
throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.seconds = runtime_6.PbLong.from(ms / 1000).toString();
|
||||
target.nanos = 0;
|
||||
if (matches[7])
|
||||
target.nanos = (parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1000000000);
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { seconds: "0", nanos: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int64 seconds */ 1:
|
||||
message.seconds = reader.int64().toString();
|
||||
break;
|
||||
case /* int32 nanos */ 2:
|
||||
message.nanos = reader.int32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* int64 seconds = 1; */
|
||||
if (message.seconds !== "0")
|
||||
writer.tag(1, runtime_1.WireType.Varint).int64(message.seconds);
|
||||
/* int32 nanos = 2; */
|
||||
if (message.nanos !== 0)
|
||||
writer.tag(2, runtime_1.WireType.Varint).int32(message.nanos);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Timestamp
|
||||
*/
|
||||
exports.Timestamp = new Timestamp$Type();
|
||||
//# sourceMappingURL=timestamp.js.map
|
||||
1
node_modules/@actions/artifact/lib/generated/google/protobuf/timestamp.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/generated/google/protobuf/timestamp.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
307
node_modules/@actions/artifact/lib/generated/google/protobuf/wrappers.d.ts
generated
vendored
Normal file
307
node_modules/@actions/artifact/lib/generated/google/protobuf/wrappers.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,307 @@
|
|||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import type { JsonValue } from "@protobuf-ts/runtime";
|
||||
import type { JsonReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
/**
|
||||
* Wrapper message for `double`.
|
||||
*
|
||||
* The JSON representation for `DoubleValue` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.DoubleValue
|
||||
*/
|
||||
export interface DoubleValue {
|
||||
/**
|
||||
* The double value.
|
||||
*
|
||||
* @generated from protobuf field: double value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `float`.
|
||||
*
|
||||
* The JSON representation for `FloatValue` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.FloatValue
|
||||
*/
|
||||
export interface FloatValue {
|
||||
/**
|
||||
* The float value.
|
||||
*
|
||||
* @generated from protobuf field: float value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `int64`.
|
||||
*
|
||||
* The JSON representation for `Int64Value` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Int64Value
|
||||
*/
|
||||
export interface Int64Value {
|
||||
/**
|
||||
* The int64 value.
|
||||
*
|
||||
* @generated from protobuf field: int64 value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `uint64`.
|
||||
*
|
||||
* The JSON representation for `UInt64Value` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.UInt64Value
|
||||
*/
|
||||
export interface UInt64Value {
|
||||
/**
|
||||
* The uint64 value.
|
||||
*
|
||||
* @generated from protobuf field: uint64 value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `int32`.
|
||||
*
|
||||
* The JSON representation for `Int32Value` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Int32Value
|
||||
*/
|
||||
export interface Int32Value {
|
||||
/**
|
||||
* The int32 value.
|
||||
*
|
||||
* @generated from protobuf field: int32 value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `uint32`.
|
||||
*
|
||||
* The JSON representation for `UInt32Value` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.UInt32Value
|
||||
*/
|
||||
export interface UInt32Value {
|
||||
/**
|
||||
* The uint32 value.
|
||||
*
|
||||
* @generated from protobuf field: uint32 value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `bool`.
|
||||
*
|
||||
* The JSON representation for `BoolValue` is JSON `true` and `false`.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.BoolValue
|
||||
*/
|
||||
export interface BoolValue {
|
||||
/**
|
||||
* The bool value.
|
||||
*
|
||||
* @generated from protobuf field: bool value = 1;
|
||||
*/
|
||||
value: boolean;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `string`.
|
||||
*
|
||||
* The JSON representation for `StringValue` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.StringValue
|
||||
*/
|
||||
export interface StringValue {
|
||||
/**
|
||||
* The string value.
|
||||
*
|
||||
* @generated from protobuf field: string value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `bytes`.
|
||||
*
|
||||
* The JSON representation for `BytesValue` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.BytesValue
|
||||
*/
|
||||
export interface BytesValue {
|
||||
/**
|
||||
* The bytes value.
|
||||
*
|
||||
* @generated from protobuf field: bytes value = 1;
|
||||
*/
|
||||
value: Uint8Array;
|
||||
}
|
||||
declare class DoubleValue$Type extends MessageType<DoubleValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `DoubleValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message: DoubleValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `DoubleValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: DoubleValue): DoubleValue;
|
||||
create(value?: PartialMessage<DoubleValue>): DoubleValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DoubleValue): DoubleValue;
|
||||
internalBinaryWrite(message: DoubleValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.DoubleValue
|
||||
*/
|
||||
export declare const DoubleValue: DoubleValue$Type;
|
||||
declare class FloatValue$Type extends MessageType<FloatValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `FloatValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message: FloatValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `FloatValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: FloatValue): FloatValue;
|
||||
create(value?: PartialMessage<FloatValue>): FloatValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FloatValue): FloatValue;
|
||||
internalBinaryWrite(message: FloatValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.FloatValue
|
||||
*/
|
||||
export declare const FloatValue: FloatValue$Type;
|
||||
declare class Int64Value$Type extends MessageType<Int64Value> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `Int64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: Int64Value, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `Int64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int64Value): Int64Value;
|
||||
create(value?: PartialMessage<Int64Value>): Int64Value;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int64Value): Int64Value;
|
||||
internalBinaryWrite(message: Int64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int64Value
|
||||
*/
|
||||
export declare const Int64Value: Int64Value$Type;
|
||||
declare class UInt64Value$Type extends MessageType<UInt64Value> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `UInt64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: UInt64Value, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `UInt64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt64Value): UInt64Value;
|
||||
create(value?: PartialMessage<UInt64Value>): UInt64Value;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt64Value): UInt64Value;
|
||||
internalBinaryWrite(message: UInt64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt64Value
|
||||
*/
|
||||
export declare const UInt64Value: UInt64Value$Type;
|
||||
declare class Int32Value$Type extends MessageType<Int32Value> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `Int32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: Int32Value, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `Int32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int32Value): Int32Value;
|
||||
create(value?: PartialMessage<Int32Value>): Int32Value;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int32Value): Int32Value;
|
||||
internalBinaryWrite(message: Int32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int32Value
|
||||
*/
|
||||
export declare const Int32Value: Int32Value$Type;
|
||||
declare class UInt32Value$Type extends MessageType<UInt32Value> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `UInt32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: UInt32Value, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `UInt32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt32Value): UInt32Value;
|
||||
create(value?: PartialMessage<UInt32Value>): UInt32Value;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt32Value): UInt32Value;
|
||||
internalBinaryWrite(message: UInt32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt32Value
|
||||
*/
|
||||
export declare const UInt32Value: UInt32Value$Type;
|
||||
declare class BoolValue$Type extends MessageType<BoolValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `BoolValue` to JSON bool.
|
||||
*/
|
||||
internalJsonWrite(message: BoolValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `BoolValue` from JSON bool.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BoolValue): BoolValue;
|
||||
create(value?: PartialMessage<BoolValue>): BoolValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BoolValue): BoolValue;
|
||||
internalBinaryWrite(message: BoolValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BoolValue
|
||||
*/
|
||||
export declare const BoolValue: BoolValue$Type;
|
||||
declare class StringValue$Type extends MessageType<StringValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `StringValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: StringValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `StringValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: StringValue): StringValue;
|
||||
create(value?: PartialMessage<StringValue>): StringValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StringValue): StringValue;
|
||||
internalBinaryWrite(message: StringValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.StringValue
|
||||
*/
|
||||
export declare const StringValue: StringValue$Type;
|
||||
declare class BytesValue$Type extends MessageType<BytesValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `BytesValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: BytesValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `BytesValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BytesValue): BytesValue;
|
||||
create(value?: PartialMessage<BytesValue>): BytesValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BytesValue): BytesValue;
|
||||
internalBinaryWrite(message: BytesValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BytesValue
|
||||
*/
|
||||
export declare const BytesValue: BytesValue$Type;
|
||||
export {};
|
||||
609
node_modules/@actions/artifact/lib/generated/google/protobuf/wrappers.js
generated
vendored
Normal file
609
node_modules/@actions/artifact/lib/generated/google/protobuf/wrappers.js
generated
vendored
Normal file
|
|
@ -0,0 +1,609 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.BytesValue = exports.StringValue = exports.BoolValue = exports.UInt32Value = exports.Int32Value = exports.UInt64Value = exports.Int64Value = exports.FloatValue = exports.DoubleValue = void 0;
|
||||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated from protobuf file "google/protobuf/wrappers.proto" (package "google.protobuf", syntax proto3)
|
||||
// tslint:disable
|
||||
//
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
//
|
||||
// Wrappers for primitive (non-message) types. These types are useful
|
||||
// for embedding primitives in the `google.protobuf.Any` type and for places
|
||||
// where we need to distinguish between the absence of a primitive
|
||||
// typed field and its default value.
|
||||
//
|
||||
const runtime_1 = require("@protobuf-ts/runtime");
|
||||
const runtime_2 = require("@protobuf-ts/runtime");
|
||||
const runtime_3 = require("@protobuf-ts/runtime");
|
||||
const runtime_4 = require("@protobuf-ts/runtime");
|
||||
const runtime_5 = require("@protobuf-ts/runtime");
|
||||
const runtime_6 = require("@protobuf-ts/runtime");
|
||||
const runtime_7 = require("@protobuf-ts/runtime");
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DoubleValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.DoubleValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 1 /*ScalarType.DOUBLE*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `DoubleValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(2, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `DoubleValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 1, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* double value */ 1:
|
||||
message.value = reader.double();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* double value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, runtime_3.WireType.Bit64).double(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.DoubleValue
|
||||
*/
|
||||
exports.DoubleValue = new DoubleValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FloatValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.FloatValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `FloatValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(1, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `FloatValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 1, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* float value */ 1:
|
||||
message.value = reader.float();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* float value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, runtime_3.WireType.Bit32).float(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.FloatValue
|
||||
*/
|
||||
exports.FloatValue = new FloatValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Int64Value$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.Int64Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `Int64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(runtime_1.ScalarType.INT64, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `Int64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.INT64, runtime_2.LongType.STRING, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int64 value */ 1:
|
||||
message.value = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* int64 value = 1; */
|
||||
if (message.value !== "0")
|
||||
writer.tag(1, runtime_3.WireType.Varint).int64(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int64Value
|
||||
*/
|
||||
exports.Int64Value = new Int64Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class UInt64Value$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.UInt64Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 4 /*ScalarType.UINT64*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `UInt64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(runtime_1.ScalarType.UINT64, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `UInt64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.UINT64, runtime_2.LongType.STRING, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* uint64 value */ 1:
|
||||
message.value = reader.uint64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* uint64 value = 1; */
|
||||
if (message.value !== "0")
|
||||
writer.tag(1, runtime_3.WireType.Varint).uint64(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt64Value
|
||||
*/
|
||||
exports.UInt64Value = new UInt64Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Int32Value$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.Int32Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `Int32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(5, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `Int32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 5, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int32 value */ 1:
|
||||
message.value = reader.int32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* int32 value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, runtime_3.WireType.Varint).int32(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int32Value
|
||||
*/
|
||||
exports.Int32Value = new Int32Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class UInt32Value$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.UInt32Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 13 /*ScalarType.UINT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `UInt32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(13, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `UInt32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 13, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* uint32 value */ 1:
|
||||
message.value = reader.uint32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* uint32 value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, runtime_3.WireType.Varint).uint32(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt32Value
|
||||
*/
|
||||
exports.UInt32Value = new UInt32Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class BoolValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.BoolValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `BoolValue` to JSON bool.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return message.value;
|
||||
}
|
||||
/**
|
||||
* Decode `BoolValue` from JSON bool.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 8, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: false };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool value */ 1:
|
||||
message.value = reader.bool();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool value = 1; */
|
||||
if (message.value !== false)
|
||||
writer.tag(1, runtime_3.WireType.Varint).bool(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BoolValue
|
||||
*/
|
||||
exports.BoolValue = new BoolValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class StringValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.StringValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `StringValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return message.value;
|
||||
}
|
||||
/**
|
||||
* Decode `StringValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 9, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string value */ 1:
|
||||
message.value = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string value = 1; */
|
||||
if (message.value !== "")
|
||||
writer.tag(1, runtime_3.WireType.LengthDelimited).string(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.StringValue
|
||||
*/
|
||||
exports.StringValue = new StringValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class BytesValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.BytesValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `BytesValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(12, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `BytesValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 12, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: new Uint8Array(0) };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bytes value */ 1:
|
||||
message.value = reader.bytes();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bytes value = 1; */
|
||||
if (message.value.length)
|
||||
writer.tag(1, runtime_3.WireType.LengthDelimited).bytes(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BytesValue
|
||||
*/
|
||||
exports.BytesValue = new BytesValue$Type();
|
||||
//# sourceMappingURL=wrappers.js.map
|
||||
1
node_modules/@actions/artifact/lib/generated/google/protobuf/wrappers.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/generated/google/protobuf/wrappers.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4
node_modules/@actions/artifact/lib/generated/index.d.ts
generated
vendored
Normal file
4
node_modules/@actions/artifact/lib/generated/index.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
export * from './google/protobuf/timestamp';
|
||||
export * from './google/protobuf/wrappers';
|
||||
export * from './results/api/v1/artifact';
|
||||
export * from './results/api/v1/artifact.twirp';
|
||||
21
node_modules/@actions/artifact/lib/generated/index.js
generated
vendored
Normal file
21
node_modules/@actions/artifact/lib/generated/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
__exportStar(require("./google/protobuf/timestamp"), exports);
|
||||
__exportStar(require("./google/protobuf/wrappers"), exports);
|
||||
__exportStar(require("./results/api/v1/artifact"), exports);
|
||||
__exportStar(require("./results/api/v1/artifact.twirp"), exports);
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/@actions/artifact/lib/generated/index.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/generated/index.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/generated/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,8DAA2C;AAC3C,6DAA0C;AAC1C,4DAAyC;AACzC,kEAA+C"}
|
||||
336
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.d.ts
generated
vendored
Normal file
336
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,336 @@
|
|||
import { ServiceType } from "@protobuf-ts/runtime-rpc";
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
import { Int64Value } from "../../../google/protobuf/wrappers";
|
||||
import { StringValue } from "../../../google/protobuf/wrappers";
|
||||
import { Timestamp } from "../../../google/protobuf/timestamp";
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.CreateArtifactRequest
|
||||
*/
|
||||
export interface CreateArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 4;
|
||||
*/
|
||||
expiresAt?: Timestamp;
|
||||
/**
|
||||
* @generated from protobuf field: int32 version = 5;
|
||||
*/
|
||||
version: number;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.CreateArtifactResponse
|
||||
*/
|
||||
export interface CreateArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: string signed_upload_url = 2;
|
||||
*/
|
||||
signedUploadUrl: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeArtifactRequest
|
||||
*/
|
||||
export interface FinalizeArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @generated from protobuf field: int64 size = 4;
|
||||
*/
|
||||
size: string;
|
||||
/**
|
||||
* @generated from protobuf field: google.protobuf.StringValue hash = 5;
|
||||
*/
|
||||
hash?: StringValue;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeArtifactResponse
|
||||
*/
|
||||
export interface FinalizeArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: int64 artifact_id = 2;
|
||||
*/
|
||||
artifactId: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsRequest
|
||||
*/
|
||||
export interface ListArtifactsRequest {
|
||||
/**
|
||||
* The backend plan ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* The backend job ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* Name of the artifact to filter on
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.StringValue name_filter = 3;
|
||||
*/
|
||||
nameFilter?: StringValue;
|
||||
/**
|
||||
* Monolith Database ID of the artifact to filter on
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.Int64Value id_filter = 4;
|
||||
*/
|
||||
idFilter?: Int64Value;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsResponse
|
||||
*/
|
||||
export interface ListArtifactsResponse {
|
||||
/**
|
||||
* @generated from protobuf field: repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts = 1;
|
||||
*/
|
||||
artifacts: ListArtifactsResponse_MonolithArtifact[];
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact
|
||||
*/
|
||||
export interface ListArtifactsResponse_MonolithArtifact {
|
||||
/**
|
||||
* The backend plan ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* The backend job ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* Monolith database ID of the artifact
|
||||
*
|
||||
* @generated from protobuf field: int64 database_id = 3;
|
||||
*/
|
||||
databaseId: string;
|
||||
/**
|
||||
* Name of the artifact
|
||||
*
|
||||
* @generated from protobuf field: string name = 4;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* Size of the artifact in bytes
|
||||
*
|
||||
* @generated from protobuf field: int64 size = 5;
|
||||
*/
|
||||
size: string;
|
||||
/**
|
||||
* When the artifact was created in the monolith
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
|
||||
*/
|
||||
createdAt?: Timestamp;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||
*/
|
||||
export interface GetSignedArtifactURLRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
|
||||
*/
|
||||
export interface GetSignedArtifactURLResponse {
|
||||
/**
|
||||
* @generated from protobuf field: string signed_url = 1;
|
||||
*/
|
||||
signedUrl: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.DeleteArtifactRequest
|
||||
*/
|
||||
export interface DeleteArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.DeleteArtifactResponse
|
||||
*/
|
||||
export interface DeleteArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: int64 artifact_id = 2;
|
||||
*/
|
||||
artifactId: string;
|
||||
}
|
||||
declare class CreateArtifactRequest$Type extends MessageType<CreateArtifactRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<CreateArtifactRequest>): CreateArtifactRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateArtifactRequest): CreateArtifactRequest;
|
||||
internalBinaryWrite(message: CreateArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateArtifactRequest
|
||||
*/
|
||||
export declare const CreateArtifactRequest: CreateArtifactRequest$Type;
|
||||
declare class CreateArtifactResponse$Type extends MessageType<CreateArtifactResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<CreateArtifactResponse>): CreateArtifactResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateArtifactResponse): CreateArtifactResponse;
|
||||
internalBinaryWrite(message: CreateArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateArtifactResponse
|
||||
*/
|
||||
export declare const CreateArtifactResponse: CreateArtifactResponse$Type;
|
||||
declare class FinalizeArtifactRequest$Type extends MessageType<FinalizeArtifactRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<FinalizeArtifactRequest>): FinalizeArtifactRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeArtifactRequest): FinalizeArtifactRequest;
|
||||
internalBinaryWrite(message: FinalizeArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeArtifactRequest
|
||||
*/
|
||||
export declare const FinalizeArtifactRequest: FinalizeArtifactRequest$Type;
|
||||
declare class FinalizeArtifactResponse$Type extends MessageType<FinalizeArtifactResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<FinalizeArtifactResponse>): FinalizeArtifactResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeArtifactResponse): FinalizeArtifactResponse;
|
||||
internalBinaryWrite(message: FinalizeArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeArtifactResponse
|
||||
*/
|
||||
export declare const FinalizeArtifactResponse: FinalizeArtifactResponse$Type;
|
||||
declare class ListArtifactsRequest$Type extends MessageType<ListArtifactsRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<ListArtifactsRequest>): ListArtifactsRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsRequest): ListArtifactsRequest;
|
||||
internalBinaryWrite(message: ListArtifactsRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsRequest
|
||||
*/
|
||||
export declare const ListArtifactsRequest: ListArtifactsRequest$Type;
|
||||
declare class ListArtifactsResponse$Type extends MessageType<ListArtifactsResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<ListArtifactsResponse>): ListArtifactsResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsResponse): ListArtifactsResponse;
|
||||
internalBinaryWrite(message: ListArtifactsResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse
|
||||
*/
|
||||
export declare const ListArtifactsResponse: ListArtifactsResponse$Type;
|
||||
declare class ListArtifactsResponse_MonolithArtifact$Type extends MessageType<ListArtifactsResponse_MonolithArtifact> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<ListArtifactsResponse_MonolithArtifact>): ListArtifactsResponse_MonolithArtifact;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsResponse_MonolithArtifact): ListArtifactsResponse_MonolithArtifact;
|
||||
internalBinaryWrite(message: ListArtifactsResponse_MonolithArtifact, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact
|
||||
*/
|
||||
export declare const ListArtifactsResponse_MonolithArtifact: ListArtifactsResponse_MonolithArtifact$Type;
|
||||
declare class GetSignedArtifactURLRequest$Type extends MessageType<GetSignedArtifactURLRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<GetSignedArtifactURLRequest>): GetSignedArtifactURLRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetSignedArtifactURLRequest): GetSignedArtifactURLRequest;
|
||||
internalBinaryWrite(message: GetSignedArtifactURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||
*/
|
||||
export declare const GetSignedArtifactURLRequest: GetSignedArtifactURLRequest$Type;
|
||||
declare class GetSignedArtifactURLResponse$Type extends MessageType<GetSignedArtifactURLResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<GetSignedArtifactURLResponse>): GetSignedArtifactURLResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetSignedArtifactURLResponse): GetSignedArtifactURLResponse;
|
||||
internalBinaryWrite(message: GetSignedArtifactURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
|
||||
*/
|
||||
export declare const GetSignedArtifactURLResponse: GetSignedArtifactURLResponse$Type;
|
||||
declare class DeleteArtifactRequest$Type extends MessageType<DeleteArtifactRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<DeleteArtifactRequest>): DeleteArtifactRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteArtifactRequest): DeleteArtifactRequest;
|
||||
internalBinaryWrite(message: DeleteArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactRequest
|
||||
*/
|
||||
export declare const DeleteArtifactRequest: DeleteArtifactRequest$Type;
|
||||
declare class DeleteArtifactResponse$Type extends MessageType<DeleteArtifactResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<DeleteArtifactResponse>): DeleteArtifactResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteArtifactResponse): DeleteArtifactResponse;
|
||||
internalBinaryWrite(message: DeleteArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactResponse
|
||||
*/
|
||||
export declare const DeleteArtifactResponse: DeleteArtifactResponse$Type;
|
||||
/**
|
||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.ArtifactService
|
||||
*/
|
||||
export declare const ArtifactService: ServiceType;
|
||||
export {};
|
||||
704
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.js
generated
vendored
Normal file
704
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.js
generated
vendored
Normal file
|
|
@ -0,0 +1,704 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifactRequest = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = void 0;
|
||||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated from protobuf file "results/api/v1/artifact.proto" (package "github.actions.results.api.v1", syntax proto3)
|
||||
// tslint:disable
|
||||
const runtime_rpc_1 = require("@protobuf-ts/runtime-rpc");
|
||||
const runtime_1 = require("@protobuf-ts/runtime");
|
||||
const runtime_2 = require("@protobuf-ts/runtime");
|
||||
const runtime_3 = require("@protobuf-ts/runtime");
|
||||
const runtime_4 = require("@protobuf-ts/runtime");
|
||||
const runtime_5 = require("@protobuf-ts/runtime");
|
||||
const wrappers_1 = require("../../../google/protobuf/wrappers");
|
||||
const wrappers_2 = require("../../../google/protobuf/wrappers");
|
||||
const timestamp_1 = require("../../../google/protobuf/timestamp");
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CreateArtifactRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.CreateArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp },
|
||||
{ no: 5, name: "version", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "", version: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* google.protobuf.Timestamp expires_at */ 4:
|
||||
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||
break;
|
||||
case /* int32 version */ 5:
|
||||
message.version = reader.int32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.name);
|
||||
/* google.protobuf.Timestamp expires_at = 4; */
|
||||
if (message.expiresAt)
|
||||
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(4, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* int32 version = 5; */
|
||||
if (message.version !== 0)
|
||||
writer.tag(5, runtime_1.WireType.Varint).int32(message.version);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateArtifactRequest
|
||||
*/
|
||||
exports.CreateArtifactRequest = new CreateArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CreateArtifactResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.CreateArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, signedUploadUrl: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* string signed_upload_url */ 2:
|
||||
message.signedUploadUrl = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||
/* string signed_upload_url = 2; */
|
||||
if (message.signedUploadUrl !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateArtifactResponse
|
||||
*/
|
||||
exports.CreateArtifactResponse = new CreateArtifactResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeArtifactRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 5, name: "hash", kind: "message", T: () => wrappers_2.StringValue }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "", size: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* int64 size */ 4:
|
||||
message.size = reader.int64().toString();
|
||||
break;
|
||||
case /* google.protobuf.StringValue hash */ 5:
|
||||
message.hash = wrappers_2.StringValue.internalBinaryRead(reader, reader.uint32(), options, message.hash);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.name);
|
||||
/* int64 size = 4; */
|
||||
if (message.size !== "0")
|
||||
writer.tag(4, runtime_1.WireType.Varint).int64(message.size);
|
||||
/* google.protobuf.StringValue hash = 5; */
|
||||
if (message.hash)
|
||||
wrappers_2.StringValue.internalBinaryWrite(message.hash, writer.tag(5, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeArtifactRequest
|
||||
*/
|
||||
exports.FinalizeArtifactRequest = new FinalizeArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeArtifactResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, artifactId: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 artifact_id */ 2:
|
||||
message.artifactId = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||
/* int64 artifact_id = 2; */
|
||||
if (message.artifactId !== "0")
|
||||
writer.tag(2, runtime_1.WireType.Varint).int64(message.artifactId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeArtifactResponse
|
||||
*/
|
||||
exports.FinalizeArtifactResponse = new FinalizeArtifactResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name_filter", kind: "message", T: () => wrappers_2.StringValue },
|
||||
{ no: 4, name: "id_filter", kind: "message", T: () => wrappers_1.Int64Value }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* google.protobuf.StringValue name_filter */ 3:
|
||||
message.nameFilter = wrappers_2.StringValue.internalBinaryRead(reader, reader.uint32(), options, message.nameFilter);
|
||||
break;
|
||||
case /* google.protobuf.Int64Value id_filter */ 4:
|
||||
message.idFilter = wrappers_1.Int64Value.internalBinaryRead(reader, reader.uint32(), options, message.idFilter);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* google.protobuf.StringValue name_filter = 3; */
|
||||
if (message.nameFilter)
|
||||
wrappers_2.StringValue.internalBinaryWrite(message.nameFilter, writer.tag(3, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* google.protobuf.Int64Value id_filter = 4; */
|
||||
if (message.idFilter)
|
||||
wrappers_1.Int64Value.internalBinaryWrite(message.idFilter, writer.tag(4, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsRequest
|
||||
*/
|
||||
exports.ListArtifactsRequest = new ListArtifactsRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsResponse", [
|
||||
{ no: 1, name: "artifacts", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => exports.ListArtifactsResponse_MonolithArtifact }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { artifacts: [] };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts */ 1:
|
||||
message.artifacts.push(exports.ListArtifactsResponse_MonolithArtifact.internalBinaryRead(reader, reader.uint32(), options));
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts = 1; */
|
||||
for (let i = 0; i < message.artifacts.length; i++)
|
||||
exports.ListArtifactsResponse_MonolithArtifact.internalBinaryWrite(message.artifacts[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse
|
||||
*/
|
||||
exports.ListArtifactsResponse = new ListArtifactsResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", databaseId: "0", name: "", size: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* int64 database_id */ 3:
|
||||
message.databaseId = reader.int64().toString();
|
||||
break;
|
||||
case /* string name */ 4:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* int64 size */ 5:
|
||||
message.size = reader.int64().toString();
|
||||
break;
|
||||
case /* google.protobuf.Timestamp created_at */ 6:
|
||||
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* int64 database_id = 3; */
|
||||
if (message.databaseId !== "0")
|
||||
writer.tag(3, runtime_1.WireType.Varint).int64(message.databaseId);
|
||||
/* string name = 4; */
|
||||
if (message.name !== "")
|
||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.name);
|
||||
/* int64 size = 5; */
|
||||
if (message.size !== "0")
|
||||
writer.tag(5, runtime_1.WireType.Varint).int64(message.size);
|
||||
/* google.protobuf.Timestamp created_at = 6; */
|
||||
if (message.createdAt)
|
||||
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact
|
||||
*/
|
||||
exports.ListArtifactsResponse_MonolithArtifact = new ListArtifactsResponse_MonolithArtifact$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetSignedArtifactURLRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetSignedArtifactURLRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.name);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||
*/
|
||||
exports.GetSignedArtifactURLRequest = new GetSignedArtifactURLRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetSignedArtifactURLResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetSignedArtifactURLResponse", [
|
||||
{ no: 1, name: "signed_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { signedUrl: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string signed_url */ 1:
|
||||
message.signedUrl = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string signed_url = 1; */
|
||||
if (message.signedUrl !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.signedUrl);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
|
||||
*/
|
||||
exports.GetSignedArtifactURLResponse = new GetSignedArtifactURLResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DeleteArtifactRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.DeleteArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.name);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactRequest
|
||||
*/
|
||||
exports.DeleteArtifactRequest = new DeleteArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DeleteArtifactResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.DeleteArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, artifactId: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 artifact_id */ 2:
|
||||
message.artifactId = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||
/* int64 artifact_id = 2; */
|
||||
if (message.artifactId !== "0")
|
||||
writer.tag(2, runtime_1.WireType.Varint).int64(message.artifactId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactResponse
|
||||
*/
|
||||
exports.DeleteArtifactResponse = new DeleteArtifactResponse$Type();
|
||||
/**
|
||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.ArtifactService
|
||||
*/
|
||||
exports.ArtifactService = new runtime_rpc_1.ServiceType("github.actions.results.api.v1.ArtifactService", [
|
||||
{ name: "CreateArtifact", options: {}, I: exports.CreateArtifactRequest, O: exports.CreateArtifactResponse },
|
||||
{ name: "FinalizeArtifact", options: {}, I: exports.FinalizeArtifactRequest, O: exports.FinalizeArtifactResponse },
|
||||
{ name: "ListArtifacts", options: {}, I: exports.ListArtifactsRequest, O: exports.ListArtifactsResponse },
|
||||
{ name: "GetSignedArtifactURL", options: {}, I: exports.GetSignedArtifactURLRequest, O: exports.GetSignedArtifactURLResponse },
|
||||
{ name: "DeleteArtifact", options: {}, I: exports.DeleteArtifactRequest, O: exports.DeleteArtifactResponse }
|
||||
]);
|
||||
//# sourceMappingURL=artifact.js.map
|
||||
1
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
48
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp.d.ts
generated
vendored
Normal file
48
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
/// <reference types="node" />
|
||||
import { TwirpContext, TwirpServer } from "twirp-ts";
|
||||
import { CreateArtifactRequest, CreateArtifactResponse, FinalizeArtifactRequest, FinalizeArtifactResponse, ListArtifactsRequest, ListArtifactsResponse, GetSignedArtifactURLRequest, GetSignedArtifactURLResponse, DeleteArtifactRequest, DeleteArtifactResponse } from "./artifact";
|
||||
interface Rpc {
|
||||
request(service: string, method: string, contentType: "application/json" | "application/protobuf", data: object | Uint8Array): Promise<object | Uint8Array>;
|
||||
}
|
||||
export interface ArtifactServiceClient {
|
||||
CreateArtifact(request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
|
||||
DeleteArtifact(request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
export declare class ArtifactServiceClientJSON implements ArtifactServiceClient {
|
||||
private readonly rpc;
|
||||
constructor(rpc: Rpc);
|
||||
CreateArtifact(request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
|
||||
DeleteArtifact(request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
export declare class ArtifactServiceClientProtobuf implements ArtifactServiceClient {
|
||||
private readonly rpc;
|
||||
constructor(rpc: Rpc);
|
||||
CreateArtifact(request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
|
||||
DeleteArtifact(request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
export interface ArtifactServiceTwirp<T extends TwirpContext = TwirpContext> {
|
||||
CreateArtifact(ctx: T, request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(ctx: T, request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(ctx: T, request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(ctx: T, request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
|
||||
DeleteArtifact(ctx: T, request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
export declare enum ArtifactServiceMethod {
|
||||
CreateArtifact = "CreateArtifact",
|
||||
FinalizeArtifact = "FinalizeArtifact",
|
||||
ListArtifacts = "ListArtifacts",
|
||||
GetSignedArtifactURL = "GetSignedArtifactURL",
|
||||
DeleteArtifact = "DeleteArtifact"
|
||||
}
|
||||
export declare const ArtifactServiceMethodList: ArtifactServiceMethod[];
|
||||
export declare function createArtifactServiceServer<T extends TwirpContext = TwirpContext>(service: ArtifactServiceTwirp<T>): TwirpServer<ArtifactServiceTwirp<TwirpContext<import("http").IncomingMessage, import("http").ServerResponse<import("http").IncomingMessage>>>, T>;
|
||||
export {};
|
||||
508
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp.js
generated
vendored
Normal file
508
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp.js
generated
vendored
Normal file
|
|
@ -0,0 +1,508 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createArtifactServiceServer = exports.ArtifactServiceMethodList = exports.ArtifactServiceMethod = exports.ArtifactServiceClientProtobuf = exports.ArtifactServiceClientJSON = void 0;
|
||||
const twirp_ts_1 = require("twirp-ts");
|
||||
const artifact_1 = require("./artifact");
|
||||
class ArtifactServiceClientJSON {
|
||||
constructor(rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
this.DeleteArtifact.bind(this);
|
||||
}
|
||||
CreateArtifact(request) {
|
||||
const data = artifact_1.CreateArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "CreateArtifact", "application/json", data);
|
||||
return promise.then((data) => artifact_1.CreateArtifactResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
FinalizeArtifact(request) {
|
||||
const data = artifact_1.FinalizeArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "FinalizeArtifact", "application/json", data);
|
||||
return promise.then((data) => artifact_1.FinalizeArtifactResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
ListArtifacts(request) {
|
||||
const data = artifact_1.ListArtifactsRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "ListArtifacts", "application/json", data);
|
||||
return promise.then((data) => artifact_1.ListArtifactsResponse.fromJson(data, { ignoreUnknownFields: true }));
|
||||
}
|
||||
GetSignedArtifactURL(request) {
|
||||
const data = artifact_1.GetSignedArtifactURLRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "GetSignedArtifactURL", "application/json", data);
|
||||
return promise.then((data) => artifact_1.GetSignedArtifactURLResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
DeleteArtifact(request) {
|
||||
const data = artifact_1.DeleteArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "DeleteArtifact", "application/json", data);
|
||||
return promise.then((data) => artifact_1.DeleteArtifactResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
}
|
||||
exports.ArtifactServiceClientJSON = ArtifactServiceClientJSON;
|
||||
class ArtifactServiceClientProtobuf {
|
||||
constructor(rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
this.DeleteArtifact.bind(this);
|
||||
}
|
||||
CreateArtifact(request) {
|
||||
const data = artifact_1.CreateArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "CreateArtifact", "application/protobuf", data);
|
||||
return promise.then((data) => artifact_1.CreateArtifactResponse.fromBinary(data));
|
||||
}
|
||||
FinalizeArtifact(request) {
|
||||
const data = artifact_1.FinalizeArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "FinalizeArtifact", "application/protobuf", data);
|
||||
return promise.then((data) => artifact_1.FinalizeArtifactResponse.fromBinary(data));
|
||||
}
|
||||
ListArtifacts(request) {
|
||||
const data = artifact_1.ListArtifactsRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "ListArtifacts", "application/protobuf", data);
|
||||
return promise.then((data) => artifact_1.ListArtifactsResponse.fromBinary(data));
|
||||
}
|
||||
GetSignedArtifactURL(request) {
|
||||
const data = artifact_1.GetSignedArtifactURLRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "GetSignedArtifactURL", "application/protobuf", data);
|
||||
return promise.then((data) => artifact_1.GetSignedArtifactURLResponse.fromBinary(data));
|
||||
}
|
||||
DeleteArtifact(request) {
|
||||
const data = artifact_1.DeleteArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "DeleteArtifact", "application/protobuf", data);
|
||||
return promise.then((data) => artifact_1.DeleteArtifactResponse.fromBinary(data));
|
||||
}
|
||||
}
|
||||
exports.ArtifactServiceClientProtobuf = ArtifactServiceClientProtobuf;
|
||||
var ArtifactServiceMethod;
|
||||
(function (ArtifactServiceMethod) {
|
||||
ArtifactServiceMethod["CreateArtifact"] = "CreateArtifact";
|
||||
ArtifactServiceMethod["FinalizeArtifact"] = "FinalizeArtifact";
|
||||
ArtifactServiceMethod["ListArtifacts"] = "ListArtifacts";
|
||||
ArtifactServiceMethod["GetSignedArtifactURL"] = "GetSignedArtifactURL";
|
||||
ArtifactServiceMethod["DeleteArtifact"] = "DeleteArtifact";
|
||||
})(ArtifactServiceMethod || (exports.ArtifactServiceMethod = ArtifactServiceMethod = {}));
|
||||
exports.ArtifactServiceMethodList = [
|
||||
ArtifactServiceMethod.CreateArtifact,
|
||||
ArtifactServiceMethod.FinalizeArtifact,
|
||||
ArtifactServiceMethod.ListArtifacts,
|
||||
ArtifactServiceMethod.GetSignedArtifactURL,
|
||||
ArtifactServiceMethod.DeleteArtifact,
|
||||
];
|
||||
function createArtifactServiceServer(service) {
|
||||
return new twirp_ts_1.TwirpServer({
|
||||
service,
|
||||
packageName: "github.actions.results.api.v1",
|
||||
serviceName: "ArtifactService",
|
||||
methodList: exports.ArtifactServiceMethodList,
|
||||
matchRoute: matchArtifactServiceRoute,
|
||||
});
|
||||
}
|
||||
exports.createArtifactServiceServer = createArtifactServiceServer;
|
||||
function matchArtifactServiceRoute(method, events) {
|
||||
switch (method) {
|
||||
case "CreateArtifact":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "CreateArtifact" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleArtifactServiceCreateArtifactRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "FinalizeArtifact":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "FinalizeArtifact" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleArtifactServiceFinalizeArtifactRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "ListArtifacts":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "ListArtifacts" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleArtifactServiceListArtifactsRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "GetSignedArtifactURL":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "GetSignedArtifactURL" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleArtifactServiceGetSignedArtifactURLRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "DeleteArtifact":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "DeleteArtifact" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleArtifactServiceDeleteArtifactRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
default:
|
||||
events.onNotFound();
|
||||
const msg = `no handler found`;
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleArtifactServiceCreateArtifactRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleArtifactServiceCreateArtifactJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceCreateArtifactProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleArtifactServiceFinalizeArtifactRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleArtifactServiceFinalizeArtifactJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceFinalizeArtifactProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleArtifactServiceListArtifactsRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleArtifactServiceListArtifactsJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceListArtifactsProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleArtifactServiceGetSignedArtifactURLRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleArtifactServiceGetSignedArtifactURLJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceGetSignedArtifactURLProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleArtifactServiceDeleteArtifactRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleArtifactServiceDeleteArtifactJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceDeleteArtifactProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleArtifactServiceCreateArtifactJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = artifact_1.CreateArtifactRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.CreateArtifact(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.CreateArtifact(ctx, request);
|
||||
}
|
||||
return JSON.stringify(artifact_1.CreateArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceFinalizeArtifactJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = artifact_1.FinalizeArtifactRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.FinalizeArtifact(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.FinalizeArtifact(ctx, request);
|
||||
}
|
||||
return JSON.stringify(artifact_1.FinalizeArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceListArtifactsJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = artifact_1.ListArtifactsRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.ListArtifacts(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.ListArtifacts(ctx, request);
|
||||
}
|
||||
return JSON.stringify(artifact_1.ListArtifactsResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceGetSignedArtifactURLJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = artifact_1.GetSignedArtifactURLRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.GetSignedArtifactURL(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.GetSignedArtifactURL(ctx, request);
|
||||
}
|
||||
return JSON.stringify(artifact_1.GetSignedArtifactURLResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceDeleteArtifactJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = artifact_1.DeleteArtifactRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.DeleteArtifact(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.DeleteArtifact(ctx, request);
|
||||
}
|
||||
return JSON.stringify(artifact_1.DeleteArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceCreateArtifactProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = artifact_1.CreateArtifactRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.CreateArtifact(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.CreateArtifact(ctx, request);
|
||||
}
|
||||
return Buffer.from(artifact_1.CreateArtifactResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceFinalizeArtifactProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = artifact_1.FinalizeArtifactRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.FinalizeArtifact(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.FinalizeArtifact(ctx, request);
|
||||
}
|
||||
return Buffer.from(artifact_1.FinalizeArtifactResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceListArtifactsProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = artifact_1.ListArtifactsRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.ListArtifacts(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.ListArtifacts(ctx, request);
|
||||
}
|
||||
return Buffer.from(artifact_1.ListArtifactsResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceGetSignedArtifactURLProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = artifact_1.GetSignedArtifactURLRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.GetSignedArtifactURL(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.GetSignedArtifactURL(ctx, request);
|
||||
}
|
||||
return Buffer.from(artifact_1.GetSignedArtifactURLResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceDeleteArtifactProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = artifact_1.DeleteArtifactRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.DeleteArtifact(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.DeleteArtifact(ctx, request);
|
||||
}
|
||||
return Buffer.from(artifact_1.DeleteArtifactResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=artifact.twirp.js.map
|
||||
1
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
41
node_modules/@actions/artifact/lib/internal/artifact-client.d.ts
generated
vendored
41
node_modules/@actions/artifact/lib/internal/artifact-client.d.ts
generated
vendored
|
|
@ -1,41 +0,0 @@
|
|||
import { UploadResponse } from './upload-response';
|
||||
import { UploadOptions } from './upload-options';
|
||||
import { DownloadOptions } from './download-options';
|
||||
import { DownloadResponse } from './download-response';
|
||||
export interface ArtifactClient {
|
||||
/**
|
||||
* Uploads an artifact
|
||||
*
|
||||
* @param name the name of the artifact, required
|
||||
* @param files a list of absolute or relative paths that denote what files should be uploaded
|
||||
* @param rootDirectory an absolute or relative file path that denotes the root parent directory of the files being uploaded
|
||||
* @param options extra options for customizing the upload behavior
|
||||
* @returns single UploadInfo object
|
||||
*/
|
||||
uploadArtifact(name: string, files: string[], rootDirectory: string, options?: UploadOptions): Promise<UploadResponse>;
|
||||
/**
|
||||
* Downloads a single artifact associated with a run
|
||||
*
|
||||
* @param name the name of the artifact being downloaded
|
||||
* @param path optional path that denotes where the artifact will be downloaded to
|
||||
* @param options extra options that allow for the customization of the download behavior
|
||||
*/
|
||||
downloadArtifact(name: string, path?: string, options?: DownloadOptions): Promise<DownloadResponse>;
|
||||
/**
|
||||
* Downloads all artifacts associated with a run. Because there are multiple artifacts being downloaded, a folder will be created for each one in the specified or default directory
|
||||
* @param path optional path that denotes where the artifacts will be downloaded to
|
||||
*/
|
||||
downloadAllArtifacts(path?: string): Promise<DownloadResponse[]>;
|
||||
}
|
||||
export declare class DefaultArtifactClient implements ArtifactClient {
|
||||
/**
|
||||
* Constructs a DefaultArtifactClient
|
||||
*/
|
||||
static create(): DefaultArtifactClient;
|
||||
/**
|
||||
* Uploads an artifact
|
||||
*/
|
||||
uploadArtifact(name: string, files: string[], rootDirectory: string, options?: UploadOptions | undefined): Promise<UploadResponse>;
|
||||
downloadArtifact(name: string, path?: string | undefined, options?: DownloadOptions | undefined): Promise<DownloadResponse>;
|
||||
downloadAllArtifacts(path?: string | undefined): Promise<DownloadResponse[]>;
|
||||
}
|
||||
182
node_modules/@actions/artifact/lib/internal/artifact-client.js
generated
vendored
182
node_modules/@actions/artifact/lib/internal/artifact-client.js
generated
vendored
|
|
@ -1,182 +0,0 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.DefaultArtifactClient = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const upload_specification_1 = require("./upload-specification");
|
||||
const upload_http_client_1 = require("./upload-http-client");
|
||||
const utils_1 = require("./utils");
|
||||
const path_and_artifact_name_validation_1 = require("./path-and-artifact-name-validation");
|
||||
const download_http_client_1 = require("./download-http-client");
|
||||
const download_specification_1 = require("./download-specification");
|
||||
const config_variables_1 = require("./config-variables");
|
||||
const path_1 = require("path");
|
||||
class DefaultArtifactClient {
|
||||
/**
|
||||
* Constructs a DefaultArtifactClient
|
||||
*/
|
||||
static create() {
|
||||
return new DefaultArtifactClient();
|
||||
}
|
||||
/**
|
||||
* Uploads an artifact
|
||||
*/
|
||||
uploadArtifact(name, files, rootDirectory, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.info(`Starting artifact upload
|
||||
For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`);
|
||||
(0, path_and_artifact_name_validation_1.checkArtifactName)(name);
|
||||
// Get specification for the files being uploaded
|
||||
const uploadSpecification = (0, upload_specification_1.getUploadSpecification)(name, rootDirectory, files);
|
||||
const uploadResponse = {
|
||||
artifactName: name,
|
||||
artifactItems: [],
|
||||
size: 0,
|
||||
failedItems: []
|
||||
};
|
||||
const uploadHttpClient = new upload_http_client_1.UploadHttpClient();
|
||||
if (uploadSpecification.length === 0) {
|
||||
core.warning(`No files found that can be uploaded`);
|
||||
}
|
||||
else {
|
||||
// Create an entry for the artifact in the file container
|
||||
const response = yield uploadHttpClient.createArtifactInFileContainer(name, options);
|
||||
if (!response.fileContainerResourceUrl) {
|
||||
core.debug(response.toString());
|
||||
throw new Error('No URL provided by the Artifact Service to upload an artifact to');
|
||||
}
|
||||
core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`);
|
||||
core.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`);
|
||||
// Upload each of the files that were found concurrently
|
||||
const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options);
|
||||
// Update the size of the artifact to indicate we are done uploading
|
||||
// The uncompressed size is used for display when downloading a zip of the artifact from the UI
|
||||
core.info(`File upload process has finished. Finalizing the artifact upload`);
|
||||
yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name);
|
||||
if (uploadResult.failedItems.length > 0) {
|
||||
core.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`);
|
||||
}
|
||||
else {
|
||||
core.info(`Artifact has been finalized. All files have been successfully uploaded!`);
|
||||
}
|
||||
core.info(`
|
||||
The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes
|
||||
The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage
|
||||
|
||||
Note: The size of downloaded zips can differ significantly from the reported size. For more information see: https://github.com/actions/upload-artifact#zipped-artifact-downloads \r\n`);
|
||||
uploadResponse.artifactItems = uploadSpecification.map(item => item.absoluteFilePath);
|
||||
uploadResponse.size = uploadResult.uploadSize;
|
||||
uploadResponse.failedItems = uploadResult.failedItems;
|
||||
}
|
||||
return uploadResponse;
|
||||
});
|
||||
}
|
||||
downloadArtifact(name, path, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const downloadHttpClient = new download_http_client_1.DownloadHttpClient();
|
||||
const artifacts = yield downloadHttpClient.listArtifacts();
|
||||
if (artifacts.count === 0) {
|
||||
throw new Error(`Unable to find any artifacts for the associated workflow`);
|
||||
}
|
||||
const artifactToDownload = artifacts.value.find(artifact => {
|
||||
return artifact.name === name;
|
||||
});
|
||||
if (!artifactToDownload) {
|
||||
throw new Error(`Unable to find an artifact with the name: ${name}`);
|
||||
}
|
||||
const items = yield downloadHttpClient.getContainerItems(artifactToDownload.name, artifactToDownload.fileContainerResourceUrl);
|
||||
if (!path) {
|
||||
path = (0, config_variables_1.getWorkSpaceDirectory)();
|
||||
}
|
||||
path = (0, path_1.normalize)(path);
|
||||
path = (0, path_1.resolve)(path);
|
||||
// During upload, empty directories are rejected by the remote server so there should be no artifacts that consist of only empty directories
|
||||
const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false);
|
||||
if (downloadSpecification.filesToDownload.length === 0) {
|
||||
core.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`);
|
||||
}
|
||||
else {
|
||||
// Create all necessary directories recursively before starting any download
|
||||
yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure);
|
||||
core.info('Directory structure has been set up for the artifact');
|
||||
yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate);
|
||||
yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload);
|
||||
}
|
||||
return {
|
||||
artifactName: name,
|
||||
downloadPath: downloadSpecification.rootDownloadLocation
|
||||
};
|
||||
});
|
||||
}
|
||||
downloadAllArtifacts(path) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const downloadHttpClient = new download_http_client_1.DownloadHttpClient();
|
||||
const response = [];
|
||||
const artifacts = yield downloadHttpClient.listArtifacts();
|
||||
if (artifacts.count === 0) {
|
||||
core.info('Unable to find any artifacts for the associated workflow');
|
||||
return response;
|
||||
}
|
||||
if (!path) {
|
||||
path = (0, config_variables_1.getWorkSpaceDirectory)();
|
||||
}
|
||||
path = (0, path_1.normalize)(path);
|
||||
path = (0, path_1.resolve)(path);
|
||||
let downloadedArtifacts = 0;
|
||||
while (downloadedArtifacts < artifacts.count) {
|
||||
const currentArtifactToDownload = artifacts.value[downloadedArtifacts];
|
||||
downloadedArtifacts += 1;
|
||||
core.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`);
|
||||
// Get container entries for the specific artifact
|
||||
const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl);
|
||||
const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path, true);
|
||||
if (downloadSpecification.filesToDownload.length === 0) {
|
||||
core.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`);
|
||||
}
|
||||
else {
|
||||
yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure);
|
||||
yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate);
|
||||
yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload);
|
||||
}
|
||||
response.push({
|
||||
artifactName: currentArtifactToDownload.name,
|
||||
downloadPath: downloadSpecification.rootDownloadLocation
|
||||
});
|
||||
}
|
||||
return response;
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.DefaultArtifactClient = DefaultArtifactClient;
|
||||
//# sourceMappingURL=artifact-client.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/artifact-client.js.map
generated
vendored
1
node_modules/@actions/artifact/lib/internal/artifact-client.js.map
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
{"version":3,"file":"artifact-client.js","sourceRoot":"","sources":["../../src/internal/artifact-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,iEAG+B;AAC/B,6DAAqD;AAKrD,mCAGgB;AAChB,2FAAqE;AACrE,iEAAyD;AACzD,qEAAiE;AACjE,yDAAwD;AACxD,+BAAuC;AAuCvC,MAAa,qBAAqB;IAChC;;OAEG;IACH,MAAM,CAAC,MAAM;QACX,OAAO,IAAI,qBAAqB,EAAE,CAAA;IACpC,CAAC;IAED;;OAEG;IACG,cAAc,CAClB,IAAY,EACZ,KAAe,EACf,aAAqB,EACrB,OAAmC;;YAEnC,IAAI,CAAC,IAAI,CACP;8MACwM,CACzM,CAAA;YACD,IAAA,qDAAiB,EAAC,IAAI,CAAC,CAAA;YAEvB,iDAAiD;YACjD,MAAM,mBAAmB,GAA0B,IAAA,6CAAsB,EACvE,IAAI,EACJ,aAAa,EACb,KAAK,CACN,CAAA;YACD,MAAM,cAAc,GAAmB;gBACrC,YAAY,EAAE,IAAI;gBAClB,aAAa,EAAE,EAAE;gBACjB,IAAI,EAAE,CAAC;gBACP,WAAW,EAAE,EAAE;aAChB,CAAA;YAED,MAAM,gBAAgB,GAAG,IAAI,qCAAgB,EAAE,CAAA;YAE/C,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;gBACpC,IAAI,CAAC,OAAO,CAAC,qCAAqC,CAAC,CAAA;aACpD;iBAAM;gBACL,yDAAyD;gBACzD,MAAM,QAAQ,GAAG,MAAM,gBAAgB,CAAC,6BAA6B,CACnE,IAAI,EACJ,OAAO,CACR,CAAA;gBACD,IAAI,CAAC,QAAQ,CAAC,wBAAwB,EAAE;oBACtC,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE,CAAC,CAAA;oBAC/B,MAAM,IAAI,KAAK,CACb,kEAAkE,CACnE,CAAA;iBACF;gBAED,IAAI,CAAC,KAAK,CAAC,wBAAwB,QAAQ,CAAC,wBAAwB,EAAE,CAAC,CAAA;gBACvE,IAAI,CAAC,IAAI,CACP,2BAA2B,IAAI,oDAAoD,CACpF,CAAA;gBAED,wDAAwD;gBACxD,MAAM,YAAY,GAAG,MAAM,gBAAgB,CAAC,6BAA6B,CACvE,QAAQ,CAAC,wBAAwB,EACjC,mBAAmB,EACnB,OAAO,CACR,CAAA;gBAED,oEAAoE;gBACpE,+FAA+F;gBAC/F,IAAI,CAAC,IAAI,CACP,kEAAkE,CACnE,CAAA;gBACD,MAAM,gBAAgB,CAAC,iBAAiB,CAAC,YAAY,CAAC,SAAS,EAAE,IAAI,CAAC,CAAA;gBAEtE,IAAI,YAAY,CAAC,WAAW,CAAC,MAAM,GAAG,CAAC,EAAE;oBACvC,IAAI,CAAC,IAAI,CACP,+BAA+B,YAAY,CAAC,WAAW,CAAC,MAAM,8BAA8B,CAC7F,CAAA;iBACF;qBAAM;oBACL,IAAI,CAAC,IAAI,CACP,yEAAyE,CAC1E,CAAA;iBACF;gBAED,IAAI,CAAC,IAAI,CACP;kEAC0D,YAAY,CAAC,SAAS;kDACtC,YAAY,CAAC,UAAU;;uLAE8G,CAChL,CAAA;gBAED,cAAc,CAAC,aAAa,GAAG,mBAAmB,CAAC,GAAG,CACpD,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,gBAAgB,CAC9B,CAAA;gBACD,cAAc,CAAC,IAAI,GAAG,YAAY,CAAC,UAAU,CAAA;gBAC7C,cAAc,CAAC,WAAW,GAAG,YAAY,CAAC,WAAW,CAAA;aACtD;YACD,OAAO,cAAc,CAAA;QACvB,CAAC;KAAA;IAEK,gBAAgB,CACpB,IAAY,EACZ,IAAyB,EACzB,OAAqC;;YAErC,MAAM,kBAAkB,GAAG,IAAI,yCAAkB,EAAE,CAAA;YAEnD,MAAM,SAAS,GAAG,MAAM,kBAAkB,CAAC,aAAa,EAAE,CAAA;YAC1D,IAAI,SAAS,CAAC,KAAK,KAAK,CAAC,EAAE;gBACzB,MAAM,IAAI,KAAK,CACb,0DAA0D,CAC3D,CAAA;aACF;YAED,MAAM,kBAAkB,GAAG,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;gBACzD,OAAO,QAAQ,CAAC,IAAI,KAAK,IAAI,CAAA;YAC/B,CAAC,CAAC,CAAA;YACF,IAAI,CAAC,kBAAkB,EAAE;gBACvB,MAAM,IAAI,KAAK,CAAC,6CAA6C,IAAI,EAAE,CAAC,CAAA;aACrE;YAED,MAAM,KAAK,GAAG,MAAM,kBAAkB,CAAC,iBAAiB,CACtD,kBAAkB,CAAC,IAAI,EACvB,kBAAkB,CAAC,wBAAwB,CAC5C,CAAA;YAED,IAAI,CAAC,IAAI,EAAE;gBACT,IAAI,GAAG,IAAA,wCAAqB,GAAE,CAAA;aAC/B;YACD,IAAI,GAAG,IAAA,gBAAS,EAAC,IAAI,CAAC,CAAA;YACtB,IAAI,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;YAEpB,4IAA4I;YAC5I,MAAM,qBAAqB,GAAG,IAAA,iDAAwB,EACpD,IAAI,EACJ,KAAK,CAAC,KAAK,EACX,IAAI,EACJ,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,oBAAoB,KAAI,KAAK,CACvC,CAAA;YAED,IAAI,qBAAqB,CAAC,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE;gBACtD,IAAI,CAAC,IAAI,CACP,sDAAsD,kBAAkB,CAAC,IAAI,EAAE,CAChF,CAAA;aACF;iBAAM;gBACL,4EAA4E;gBAC5E,MAAM,IAAA,oCAA4B,EAChC,qBAAqB,CAAC,kBAAkB,CACzC,CAAA;gBACD,IAAI,CAAC,IAAI,CAAC,sDAAsD,CAAC,CAAA;gBACjE,MAAM,IAAA,mCAA2B,EAC/B,qBAAqB,CAAC,kBAAkB,CACzC,CAAA;gBACD,MAAM,kBAAkB,CAAC,sBAAsB,CAC7C,qBAAqB,CAAC,eAAe,CACtC,CAAA;aACF;YAED,OAAO;gBACL,YAAY,EAAE,IAAI;gBAClB,YAAY,EAAE,qBAAqB,CAAC,oBAAoB;aACzD,CAAA;QACH,CAAC;KAAA;IAEK,oBAAoB,CACxB,IAAyB;;YAEzB,MAAM,kBAAkB,GAAG,IAAI,yCAAkB,EAAE,CAAA;YAEnD,MAAM,QAAQ,GAAuB,EAAE,CAAA;YACvC,MAAM,SAAS,GAAG,MAAM,kBAAkB,CAAC,aAAa,EAAE,CAAA;YAC1D,IAAI,SAAS,CAAC,KAAK,KAAK,CAAC,EAAE;gBACzB,IAAI,CAAC,IAAI,CAAC,0DAA0D,CAAC,CAAA;gBACrE,OAAO,QAAQ,CAAA;aAChB;YAED,IAAI,CAAC,IAAI,EAAE;gBACT,IAAI,GAAG,IAAA,wCAAqB,GAAE,CAAA;aAC/B;YACD,IAAI,GAAG,IAAA,gBAAS,EAAC,IAAI,CAAC,CAAA;YACtB,IAAI,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;YAEpB,IAAI,mBAAmB,GAAG,CAAC,CAAA;YAC3B,OAAO,mBAAmB,GAAG,SAAS,CAAC,KAAK,EAAE;gBAC5C,MAAM,yBAAyB,GAAG,SAAS,CAAC,KAAK,CAAC,mBAAmB,CAAC,CAAA;gBACtE,mBAAmB,IAAI,CAAC,CAAA;gBACxB,IAAI,CAAC,IAAI,CACP,iCAAiC,yBAAyB,CAAC,IAAI,MAAM,mBAAmB,IAAI,SAAS,CAAC,KAAK,EAAE,CAC9G,CAAA;gBAED,kDAAkD;gBAClD,MAAM,KAAK,GAAG,MAAM,kBAAkB,CAAC,iBAAiB,CACtD,yBAAyB,CAAC,IAAI,EAC9B,yBAAyB,CAAC,wBAAwB,CACnD,CAAA;gBAED,MAAM,qBAAqB,GAAG,IAAA,iDAAwB,EACpD,yBAAyB,CAAC,IAAI,EAC9B,KAAK,CAAC,KAAK,EACX,IAAI,EACJ,IAAI,CACL,CAAA;gBACD,IAAI,qBAAqB,CAAC,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE;oBACtD,IAAI,CAAC,IAAI,CACP,qDAAqD,yBAAyB,CAAC,IAAI,EAAE,CACtF,CAAA;iBACF;qBAAM;oBACL,MAAM,IAAA,oCAA4B,EAChC,qBAAqB,CAAC,kBAAkB,CACzC,CAAA;oBACD,MAAM,IAAA,mCAA2B,EAC/B,qBAAqB,CAAC,kBAAkB,CACzC,CAAA;oBACD,MAAM,kBAAkB,CAAC,sBAAsB,CAC7C,qBAAqB,CAAC,eAAe,CACtC,CAAA;iBACF;gBAED,QAAQ,CAAC,IAAI,CAAC;oBACZ,YAAY,EAAE,yBAAyB,CAAC,IAAI;oBAC5C,YAAY,EAAE,qBAAqB,CAAC,oBAAoB;iBACzD,CAAC,CAAA;aACH;YACD,OAAO,QAAQ,CAAA;QACjB,CAAC;KAAA;CACF;AAhOD,sDAgOC"}
|
||||
72
node_modules/@actions/artifact/lib/internal/client.d.ts
generated
vendored
Normal file
72
node_modules/@actions/artifact/lib/internal/client.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
import { UploadArtifactOptions, UploadArtifactResponse, DownloadArtifactOptions, GetArtifactResponse, ListArtifactsOptions, ListArtifactsResponse, DownloadArtifactResponse, FindOptions, DeleteArtifactResponse } from './shared/interfaces';
|
||||
/**
|
||||
* Generic interface for the artifact client.
|
||||
*/
|
||||
export interface ArtifactClient {
|
||||
/**
|
||||
* Uploads an artifact.
|
||||
*
|
||||
* @param name The name of the artifact, required
|
||||
* @param files A list of absolute or relative paths that denote what files should be uploaded
|
||||
* @param rootDirectory An absolute or relative file path that denotes the root parent directory of the files being uploaded
|
||||
* @param options Extra options for customizing the upload behavior
|
||||
* @returns single UploadArtifactResponse object
|
||||
*/
|
||||
uploadArtifact(name: string, files: string[], rootDirectory: string, options?: UploadArtifactOptions): Promise<UploadArtifactResponse>;
|
||||
/**
|
||||
* Lists all artifacts that are part of the current workflow run.
|
||||
* This function will return at most 1000 artifacts per workflow run.
|
||||
*
|
||||
* If `options.findBy` is specified, this will call the public List-Artifacts API which can list from other runs.
|
||||
* https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
*
|
||||
* @param options Extra options that allow for the customization of the list behavior
|
||||
* @returns ListArtifactResponse object
|
||||
*/
|
||||
listArtifacts(options?: ListArtifactsOptions & FindOptions): Promise<ListArtifactsResponse>;
|
||||
/**
|
||||
* Finds an artifact by name.
|
||||
* If there are multiple artifacts with the same name in the same workflow run, this will return the latest.
|
||||
* If the artifact is not found, it will throw.
|
||||
*
|
||||
* If `options.findBy` is specified, this will use the public List Artifacts API with a name filter which can get artifacts from other runs.
|
||||
* https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
* `@actions/artifact` v2+ does not allow for creating multiple artifacts with the same name in the same workflow run.
|
||||
* It is possible to have multiple artifacts with the same name in the same workflow run by using old versions of upload-artifact (v1,v2 and v3), @actions/artifact < v2 or it is a rerun.
|
||||
* If there are multiple artifacts with the same name in the same workflow run this function will return the first artifact that matches the name.
|
||||
*
|
||||
* @param artifactName The name of the artifact to find
|
||||
* @param options Extra options that allow for the customization of the get behavior
|
||||
*/
|
||||
getArtifact(artifactName: string, options?: FindOptions): Promise<GetArtifactResponse>;
|
||||
/**
|
||||
* Downloads an artifact and unzips the content.
|
||||
*
|
||||
* If `options.findBy` is specified, this will use the public Download Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact
|
||||
*
|
||||
* @param artifactId The id of the artifact to download
|
||||
* @param options Extra options that allow for the customization of the download behavior
|
||||
* @returns single DownloadArtifactResponse object
|
||||
*/
|
||||
downloadArtifact(artifactId: number, options?: DownloadArtifactOptions & FindOptions): Promise<DownloadArtifactResponse>;
|
||||
/**
|
||||
* Delete an Artifact
|
||||
*
|
||||
* If `options.findBy` is specified, this will use the public Delete Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#delete-an-artifact
|
||||
*
|
||||
* @param artifactName The name of the artifact to delete
|
||||
* @param options Extra options that allow for the customization of the delete behavior
|
||||
* @returns single DeleteArtifactResponse object
|
||||
*/
|
||||
deleteArtifact(artifactName: string, options?: FindOptions): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
/**
|
||||
* The default artifact client that is used by the artifact action(s).
|
||||
*/
|
||||
export declare class DefaultArtifactClient implements ArtifactClient {
|
||||
uploadArtifact(name: string, files: string[], rootDirectory: string, options?: UploadArtifactOptions): Promise<UploadArtifactResponse>;
|
||||
downloadArtifact(artifactId: number, options?: DownloadArtifactOptions & FindOptions): Promise<DownloadArtifactResponse>;
|
||||
listArtifacts(options?: ListArtifactsOptions & FindOptions): Promise<ListArtifactsResponse>;
|
||||
getArtifact(artifactName: string, options?: FindOptions): Promise<GetArtifactResponse>;
|
||||
deleteArtifact(artifactName: string, options?: FindOptions): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
144
node_modules/@actions/artifact/lib/internal/client.js
generated
vendored
Normal file
144
node_modules/@actions/artifact/lib/internal/client.js
generated
vendored
Normal file
|
|
@ -0,0 +1,144 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __rest = (this && this.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.DefaultArtifactClient = void 0;
|
||||
const core_1 = require("@actions/core");
|
||||
const config_1 = require("./shared/config");
|
||||
const upload_artifact_1 = require("./upload/upload-artifact");
|
||||
const download_artifact_1 = require("./download/download-artifact");
|
||||
const delete_artifact_1 = require("./delete/delete-artifact");
|
||||
const get_artifact_1 = require("./find/get-artifact");
|
||||
const list_artifacts_1 = require("./find/list-artifacts");
|
||||
const errors_1 = require("./shared/errors");
|
||||
/**
|
||||
* The default artifact client that is used by the artifact action(s).
|
||||
*/
|
||||
class DefaultArtifactClient {
|
||||
uploadArtifact(name, files, rootDirectory, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
throw new errors_1.GHESNotSupportedError();
|
||||
}
|
||||
return (0, upload_artifact_1.uploadArtifact)(name, files, rootDirectory, options);
|
||||
}
|
||||
catch (error) {
|
||||
(0, core_1.warning)(`Artifact upload failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions is operating normally at [https://githubstatus.com](https://www.githubstatus.com).`);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
downloadArtifact(artifactId, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
throw new errors_1.GHESNotSupportedError();
|
||||
}
|
||||
if (options === null || options === void 0 ? void 0 : options.findBy) {
|
||||
const { findBy: { repositoryOwner, repositoryName, token } } = options, downloadOptions = __rest(options, ["findBy"]);
|
||||
return (0, download_artifact_1.downloadArtifactPublic)(artifactId, repositoryOwner, repositoryName, token, downloadOptions);
|
||||
}
|
||||
return (0, download_artifact_1.downloadArtifactInternal)(artifactId, options);
|
||||
}
|
||||
catch (error) {
|
||||
(0, core_1.warning)(`Download Artifact failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
listArtifacts(options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
throw new errors_1.GHESNotSupportedError();
|
||||
}
|
||||
if (options === null || options === void 0 ? void 0 : options.findBy) {
|
||||
const { findBy: { workflowRunId, repositoryOwner, repositoryName, token } } = options;
|
||||
return (0, list_artifacts_1.listArtifactsPublic)(workflowRunId, repositoryOwner, repositoryName, token, options === null || options === void 0 ? void 0 : options.latest);
|
||||
}
|
||||
return (0, list_artifacts_1.listArtifactsInternal)(options === null || options === void 0 ? void 0 : options.latest);
|
||||
}
|
||||
catch (error) {
|
||||
(0, core_1.warning)(`Listing Artifacts failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
getArtifact(artifactName, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
throw new errors_1.GHESNotSupportedError();
|
||||
}
|
||||
if (options === null || options === void 0 ? void 0 : options.findBy) {
|
||||
const { findBy: { workflowRunId, repositoryOwner, repositoryName, token } } = options;
|
||||
return (0, get_artifact_1.getArtifactPublic)(artifactName, workflowRunId, repositoryOwner, repositoryName, token);
|
||||
}
|
||||
return (0, get_artifact_1.getArtifactInternal)(artifactName);
|
||||
}
|
||||
catch (error) {
|
||||
(0, core_1.warning)(`Get Artifact failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
deleteArtifact(artifactName, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
throw new errors_1.GHESNotSupportedError();
|
||||
}
|
||||
if (options === null || options === void 0 ? void 0 : options.findBy) {
|
||||
const { findBy: { repositoryOwner, repositoryName, workflowRunId, token } } = options;
|
||||
return (0, delete_artifact_1.deleteArtifactPublic)(artifactName, workflowRunId, repositoryOwner, repositoryName, token);
|
||||
}
|
||||
return (0, delete_artifact_1.deleteArtifactInternal)(artifactName);
|
||||
}
|
||||
catch (error) {
|
||||
(0, core_1.warning)(`Delete Artifact failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.DefaultArtifactClient = DefaultArtifactClient;
|
||||
//# sourceMappingURL=client.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/client.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/internal/client.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"client.js","sourceRoot":"","sources":["../../src/internal/client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;AAAA,wCAAqC;AACrC,4CAAsC;AAYtC,8DAAuD;AACvD,oEAGqC;AACrC,8DAGiC;AACjC,sDAA0E;AAC1E,0DAAgF;AAChF,4CAAqD;AAoFrD;;GAEG;AACH,MAAa,qBAAqB;IAC1B,cAAc,CAClB,IAAY,EACZ,KAAe,EACf,aAAqB,EACrB,OAA+B;;YAE/B,IAAI;gBACF,IAAI,IAAA,eAAM,GAAE,EAAE;oBACZ,MAAM,IAAI,8BAAqB,EAAE,CAAA;iBAClC;gBAED,OAAO,IAAA,gCAAc,EAAC,IAAI,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,CAAC,CAAA;aAC3D;YAAC,OAAO,KAAK,EAAE;gBACd,IAAA,cAAO,EACL,sCAAsC,KAAK;;;;uIAIoF,CAChI,CAAA;gBAED,MAAM,KAAK,CAAA;aACZ;QACH,CAAC;KAAA;IAEK,gBAAgB,CACpB,UAAkB,EAClB,OAA+C;;YAE/C,IAAI;gBACF,IAAI,IAAA,eAAM,GAAE,EAAE;oBACZ,MAAM,IAAI,8BAAqB,EAAE,CAAA;iBAClC;gBAED,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,MAAM,EAAE;oBACnB,MAAM,EACJ,MAAM,EAAE,EAAC,eAAe,EAAE,cAAc,EAAE,KAAK,EAAC,KAE9C,OAAO,EADN,eAAe,UAChB,OAAO,EAHL,UAGL,CAAU,CAAA;oBAEX,OAAO,IAAA,0CAAsB,EAC3B,UAAU,EACV,eAAe,EACf,cAAc,EACd,KAAK,EACL,eAAe,CAChB,CAAA;iBACF;gBAED,OAAO,IAAA,4CAAwB,EAAC,UAAU,EAAE,OAAO,CAAC,CAAA;aACrD;YAAC,OAAO,KAAK,EAAE;gBACd,IAAA,cAAO,EACL,wCAAwC,KAAK;;;;yJAIoG,CAClJ,CAAA;gBAED,MAAM,KAAK,CAAA;aACZ;QACH,CAAC;KAAA;IAEK,aAAa,CACjB,OAA4C;;YAE5C,IAAI;gBACF,IAAI,IAAA,eAAM,GAAE,EAAE;oBACZ,MAAM,IAAI,8BAAqB,EAAE,CAAA;iBAClC;gBAED,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,MAAM,EAAE;oBACnB,MAAM,EACJ,MAAM,EAAE,EAAC,aAAa,EAAE,eAAe,EAAE,cAAc,EAAE,KAAK,EAAC,EAChE,GAAG,OAAO,CAAA;oBAEX,OAAO,IAAA,oCAAmB,EACxB,aAAa,EACb,eAAe,EACf,cAAc,EACd,KAAK,EACL,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,MAAM,CAChB,CAAA;iBACF;gBAED,OAAO,IAAA,sCAAqB,EAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,MAAM,CAAC,CAAA;aAC9C;YAAC,OAAO,KAAc,EAAE;gBACvB,IAAA,cAAO,EACL,wCAAwC,KAAK;;;;yJAIoG,CAClJ,CAAA;gBAED,MAAM,KAAK,CAAA;aACZ;QACH,CAAC;KAAA;IAEK,WAAW,CACf,YAAoB,EACpB,OAAqB;;YAErB,IAAI;gBACF,IAAI,IAAA,eAAM,GAAE,EAAE;oBACZ,MAAM,IAAI,8BAAqB,EAAE,CAAA;iBAClC;gBAED,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,MAAM,EAAE;oBACnB,MAAM,EACJ,MAAM,EAAE,EAAC,aAAa,EAAE,eAAe,EAAE,cAAc,EAAE,KAAK,EAAC,EAChE,GAAG,OAAO,CAAA;oBAEX,OAAO,IAAA,gCAAiB,EACtB,YAAY,EACZ,aAAa,EACb,eAAe,EACf,cAAc,EACd,KAAK,CACN,CAAA;iBACF;gBAED,OAAO,IAAA,kCAAmB,EAAC,YAAY,CAAC,CAAA;aACzC;YAAC,OAAO,KAAc,EAAE;gBACvB,IAAA,cAAO,EACL,mCAAmC,KAAK;;;;yJAIyG,CAClJ,CAAA;gBACD,MAAM,KAAK,CAAA;aACZ;QACH,CAAC;KAAA;IAEK,cAAc,CAClB,YAAoB,EACpB,OAAqB;;YAErB,IAAI;gBACF,IAAI,IAAA,eAAM,GAAE,EAAE;oBACZ,MAAM,IAAI,8BAAqB,EAAE,CAAA;iBAClC;gBAED,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,MAAM,EAAE;oBACnB,MAAM,EACJ,MAAM,EAAE,EAAC,eAAe,EAAE,cAAc,EAAE,aAAa,EAAE,KAAK,EAAC,EAChE,GAAG,OAAO,CAAA;oBAEX,OAAO,IAAA,sCAAoB,EACzB,YAAY,EACZ,aAAa,EACb,eAAe,EACf,cAAc,EACd,KAAK,CACN,CAAA;iBACF;gBAED,OAAO,IAAA,wCAAsB,EAAC,YAAY,CAAC,CAAA;aAC5C;YAAC,OAAO,KAAK,EAAE;gBACd,IAAA,cAAO,EACL,sCAAsC,KAAK;;;;yJAIsG,CAClJ,CAAA;gBAED,MAAM,KAAK,CAAA;aACZ;QACH,CAAC;KAAA;CACF;AA5KD,sDA4KC"}
|
||||
12
node_modules/@actions/artifact/lib/internal/config-variables.d.ts
generated
vendored
12
node_modules/@actions/artifact/lib/internal/config-variables.d.ts
generated
vendored
|
|
@ -1,12 +0,0 @@
|
|||
export declare function getUploadFileConcurrency(): number;
|
||||
export declare function getUploadChunkSize(): number;
|
||||
export declare function getRetryLimit(): number;
|
||||
export declare function getRetryMultiplier(): number;
|
||||
export declare function getInitialRetryIntervalInMilliseconds(): number;
|
||||
export declare function getDownloadFileConcurrency(): number;
|
||||
export declare function getRuntimeToken(): string;
|
||||
export declare function getRuntimeUrl(): string;
|
||||
export declare function getWorkFlowRunId(): string;
|
||||
export declare function getWorkSpaceDirectory(): string;
|
||||
export declare function getRetentionDays(): string | undefined;
|
||||
export declare function isGhes(): boolean;
|
||||
77
node_modules/@actions/artifact/lib/internal/config-variables.js
generated
vendored
77
node_modules/@actions/artifact/lib/internal/config-variables.js
generated
vendored
|
|
@ -1,77 +0,0 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isGhes = exports.getRetentionDays = exports.getWorkSpaceDirectory = exports.getWorkFlowRunId = exports.getRuntimeUrl = exports.getRuntimeToken = exports.getDownloadFileConcurrency = exports.getInitialRetryIntervalInMilliseconds = exports.getRetryMultiplier = exports.getRetryLimit = exports.getUploadChunkSize = exports.getUploadFileConcurrency = void 0;
|
||||
// The number of concurrent uploads that happens at the same time
|
||||
function getUploadFileConcurrency() {
|
||||
return 2;
|
||||
}
|
||||
exports.getUploadFileConcurrency = getUploadFileConcurrency;
|
||||
// When uploading large files that can't be uploaded with a single http call, this controls
|
||||
// the chunk size that is used during upload
|
||||
function getUploadChunkSize() {
|
||||
return 8 * 1024 * 1024; // 8 MB Chunks
|
||||
}
|
||||
exports.getUploadChunkSize = getUploadChunkSize;
|
||||
// The maximum number of retries that can be attempted before an upload or download fails
|
||||
function getRetryLimit() {
|
||||
return 5;
|
||||
}
|
||||
exports.getRetryLimit = getRetryLimit;
|
||||
// With exponential backoff, the larger the retry count, the larger the wait time before another attempt
|
||||
// The retry multiplier controls by how much the backOff time increases depending on the number of retries
|
||||
function getRetryMultiplier() {
|
||||
return 1.5;
|
||||
}
|
||||
exports.getRetryMultiplier = getRetryMultiplier;
|
||||
// The initial wait time if an upload or download fails and a retry is being attempted for the first time
|
||||
function getInitialRetryIntervalInMilliseconds() {
|
||||
return 3000;
|
||||
}
|
||||
exports.getInitialRetryIntervalInMilliseconds = getInitialRetryIntervalInMilliseconds;
|
||||
// The number of concurrent downloads that happens at the same time
|
||||
function getDownloadFileConcurrency() {
|
||||
return 2;
|
||||
}
|
||||
exports.getDownloadFileConcurrency = getDownloadFileConcurrency;
|
||||
function getRuntimeToken() {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN'];
|
||||
if (!token) {
|
||||
throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN env variable');
|
||||
}
|
||||
return token;
|
||||
}
|
||||
exports.getRuntimeToken = getRuntimeToken;
|
||||
function getRuntimeUrl() {
|
||||
const runtimeUrl = process.env['ACTIONS_RUNTIME_URL'];
|
||||
if (!runtimeUrl) {
|
||||
throw new Error('Unable to get ACTIONS_RUNTIME_URL env variable');
|
||||
}
|
||||
return runtimeUrl;
|
||||
}
|
||||
exports.getRuntimeUrl = getRuntimeUrl;
|
||||
function getWorkFlowRunId() {
|
||||
const workFlowRunId = process.env['GITHUB_RUN_ID'];
|
||||
if (!workFlowRunId) {
|
||||
throw new Error('Unable to get GITHUB_RUN_ID env variable');
|
||||
}
|
||||
return workFlowRunId;
|
||||
}
|
||||
exports.getWorkFlowRunId = getWorkFlowRunId;
|
||||
function getWorkSpaceDirectory() {
|
||||
const workspaceDirectory = process.env['GITHUB_WORKSPACE'];
|
||||
if (!workspaceDirectory) {
|
||||
throw new Error('Unable to get GITHUB_WORKSPACE env variable');
|
||||
}
|
||||
return workspaceDirectory;
|
||||
}
|
||||
exports.getWorkSpaceDirectory = getWorkSpaceDirectory;
|
||||
function getRetentionDays() {
|
||||
return process.env['GITHUB_RETENTION_DAYS'];
|
||||
}
|
||||
exports.getRetentionDays = getRetentionDays;
|
||||
function isGhes() {
|
||||
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
|
||||
}
|
||||
exports.isGhes = isGhes;
|
||||
//# sourceMappingURL=config-variables.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/config-variables.js.map
generated
vendored
1
node_modules/@actions/artifact/lib/internal/config-variables.js.map
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
{"version":3,"file":"config-variables.js","sourceRoot":"","sources":["../../src/internal/config-variables.ts"],"names":[],"mappings":";;;AAAA,iEAAiE;AACjE,SAAgB,wBAAwB;IACtC,OAAO,CAAC,CAAA;AACV,CAAC;AAFD,4DAEC;AAED,2FAA2F;AAC3F,4CAA4C;AAC5C,SAAgB,kBAAkB;IAChC,OAAO,CAAC,GAAG,IAAI,GAAG,IAAI,CAAA,CAAC,cAAc;AACvC,CAAC;AAFD,gDAEC;AAED,yFAAyF;AACzF,SAAgB,aAAa;IAC3B,OAAO,CAAC,CAAA;AACV,CAAC;AAFD,sCAEC;AAED,wGAAwG;AACxG,0GAA0G;AAC1G,SAAgB,kBAAkB;IAChC,OAAO,GAAG,CAAA;AACZ,CAAC;AAFD,gDAEC;AAED,yGAAyG;AACzG,SAAgB,qCAAqC;IACnD,OAAO,IAAI,CAAA;AACb,CAAC;AAFD,sFAEC;AAED,mEAAmE;AACnE,SAAgB,0BAA0B;IACxC,OAAO,CAAC,CAAA;AACV,CAAC;AAFD,gEAEC;AAED,SAAgB,eAAe;IAC7B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,EAAE;QACV,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;KACpE;IACD,OAAO,KAAK,CAAA;AACd,CAAC;AAND,0CAMC;AAED,SAAgB,aAAa;IAC3B,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC,CAAA;IACrD,IAAI,CAAC,UAAU,EAAE;QACf,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;KAClE;IACD,OAAO,UAAU,CAAA;AACnB,CAAC;AAND,sCAMC;AAED,SAAgB,gBAAgB;IAC9B,MAAM,aAAa,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAA;IAClD,IAAI,CAAC,aAAa,EAAE;QAClB,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAA;KAC5D;IACD,OAAO,aAAa,CAAA;AACtB,CAAC;AAND,4CAMC;AAED,SAAgB,qBAAqB;IACnC,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAA;IAC1D,IAAI,CAAC,kBAAkB,EAAE;QACvB,MAAM,IAAI,KAAK,CAAC,6CAA6C,CAAC,CAAA;KAC/D;IACD,OAAO,kBAAkB,CAAA;AAC3B,CAAC;AAND,sDAMC;AAED,SAAgB,gBAAgB;IAC9B,OAAO,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;AAC7C,CAAC;AAFD,4CAEC;AAED,SAAgB,MAAM;IACpB,MAAM,KAAK,GAAG,IAAI,GAAG,CACnB,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,oBAAoB,CACzD,CAAA;IACD,OAAO,KAAK,CAAC,QAAQ,CAAC,WAAW,EAAE,KAAK,YAAY,CAAA;AACtD,CAAC;AALD,wBAKC"}
|
||||
67
node_modules/@actions/artifact/lib/internal/contracts.d.ts
generated
vendored
67
node_modules/@actions/artifact/lib/internal/contracts.d.ts
generated
vendored
|
|
@ -1,67 +0,0 @@
|
|||
export interface ArtifactResponse {
|
||||
containerId: string;
|
||||
size: number;
|
||||
signedContent: string;
|
||||
fileContainerResourceUrl: string;
|
||||
type: string;
|
||||
name: string;
|
||||
url: string;
|
||||
}
|
||||
export interface CreateArtifactParameters {
|
||||
Type: string;
|
||||
Name: string;
|
||||
RetentionDays?: number;
|
||||
}
|
||||
export interface PatchArtifactSize {
|
||||
Size: number;
|
||||
}
|
||||
export interface PatchArtifactSizeSuccessResponse {
|
||||
containerId: number;
|
||||
size: number;
|
||||
signedContent: string;
|
||||
type: string;
|
||||
name: string;
|
||||
url: string;
|
||||
uploadUrl: string;
|
||||
}
|
||||
export interface UploadResults {
|
||||
/**
|
||||
* The size in bytes of data that was transferred during the upload process to the actions backend service. This takes into account possible
|
||||
* gzip compression to reduce the amount of data that needs to be transferred
|
||||
*/
|
||||
uploadSize: number;
|
||||
/**
|
||||
* The raw size of the files that were specified for upload
|
||||
*/
|
||||
totalSize: number;
|
||||
/**
|
||||
* An array of files that failed to upload
|
||||
*/
|
||||
failedItems: string[];
|
||||
}
|
||||
export interface ListArtifactsResponse {
|
||||
count: number;
|
||||
value: ArtifactResponse[];
|
||||
}
|
||||
export interface QueryArtifactResponse {
|
||||
count: number;
|
||||
value: ContainerEntry[];
|
||||
}
|
||||
export interface ContainerEntry {
|
||||
containerId: number;
|
||||
scopeIdentifier: string;
|
||||
path: string;
|
||||
itemType: string;
|
||||
status: string;
|
||||
fileLength?: number;
|
||||
fileEncoding?: number;
|
||||
fileType?: number;
|
||||
dateCreated: string;
|
||||
dateLastModified: string;
|
||||
createdBy: string;
|
||||
lastModifiedBy: string;
|
||||
itemLocation: string;
|
||||
contentLocation: string;
|
||||
fileId?: number;
|
||||
contentId: string;
|
||||
}
|
||||
1
node_modules/@actions/artifact/lib/internal/contracts.js.map
generated
vendored
1
node_modules/@actions/artifact/lib/internal/contracts.js.map
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
{"version":3,"file":"contracts.js","sourceRoot":"","sources":["../../src/internal/contracts.ts"],"names":[],"mappings":""}
|
||||
21
node_modules/@actions/artifact/lib/internal/crc64.d.ts
generated
vendored
21
node_modules/@actions/artifact/lib/internal/crc64.d.ts
generated
vendored
|
|
@ -1,21 +0,0 @@
|
|||
/**
|
||||
* CRC64: cyclic redundancy check, 64-bits
|
||||
*
|
||||
* In order to validate that artifacts are not being corrupted over the wire, this redundancy check allows us to
|
||||
* validate that there was no corruption during transmission. The implementation here is based on Go's hash/crc64 pkg,
|
||||
* but without the slicing-by-8 optimization: https://cs.opensource.google/go/go/+/master:src/hash/crc64/crc64.go
|
||||
*
|
||||
* This implementation uses a pregenerated table based on 0x9A6C9329AC4BC9B5 as the polynomial, the same polynomial that
|
||||
* is used for Azure Storage: https://github.com/Azure/azure-storage-net/blob/cbe605f9faa01bfc3003d75fc5a16b2eaccfe102/Lib/Common/Core/Util/Crc64.cs#L27
|
||||
*/
|
||||
/// <reference types="node" />
|
||||
export type CRC64DigestEncoding = 'hex' | 'base64' | 'buffer';
|
||||
declare class CRC64 {
|
||||
private _crc;
|
||||
constructor();
|
||||
update(data: Buffer | string): void;
|
||||
digest(encoding?: CRC64DigestEncoding): string | Buffer;
|
||||
private toBuffer;
|
||||
static flip64Bits(n: bigint): bigint;
|
||||
}
|
||||
export default CRC64;
|
||||
303
node_modules/@actions/artifact/lib/internal/crc64.js
generated
vendored
303
node_modules/@actions/artifact/lib/internal/crc64.js
generated
vendored
|
|
@ -1,303 +0,0 @@
|
|||
"use strict";
|
||||
/**
|
||||
* CRC64: cyclic redundancy check, 64-bits
|
||||
*
|
||||
* In order to validate that artifacts are not being corrupted over the wire, this redundancy check allows us to
|
||||
* validate that there was no corruption during transmission. The implementation here is based on Go's hash/crc64 pkg,
|
||||
* but without the slicing-by-8 optimization: https://cs.opensource.google/go/go/+/master:src/hash/crc64/crc64.go
|
||||
*
|
||||
* This implementation uses a pregenerated table based on 0x9A6C9329AC4BC9B5 as the polynomial, the same polynomial that
|
||||
* is used for Azure Storage: https://github.com/Azure/azure-storage-net/blob/cbe605f9faa01bfc3003d75fc5a16b2eaccfe102/Lib/Common/Core/Util/Crc64.cs#L27
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// when transpile target is >= ES2020 (after dropping node 12) these can be changed to bigint literals - ts(2737)
|
||||
const PREGEN_POLY_TABLE = [
|
||||
BigInt('0x0000000000000000'),
|
||||
BigInt('0x7F6EF0C830358979'),
|
||||
BigInt('0xFEDDE190606B12F2'),
|
||||
BigInt('0x81B31158505E9B8B'),
|
||||
BigInt('0xC962E5739841B68F'),
|
||||
BigInt('0xB60C15BBA8743FF6'),
|
||||
BigInt('0x37BF04E3F82AA47D'),
|
||||
BigInt('0x48D1F42BC81F2D04'),
|
||||
BigInt('0xA61CECB46814FE75'),
|
||||
BigInt('0xD9721C7C5821770C'),
|
||||
BigInt('0x58C10D24087FEC87'),
|
||||
BigInt('0x27AFFDEC384A65FE'),
|
||||
BigInt('0x6F7E09C7F05548FA'),
|
||||
BigInt('0x1010F90FC060C183'),
|
||||
BigInt('0x91A3E857903E5A08'),
|
||||
BigInt('0xEECD189FA00BD371'),
|
||||
BigInt('0x78E0FF3B88BE6F81'),
|
||||
BigInt('0x078E0FF3B88BE6F8'),
|
||||
BigInt('0x863D1EABE8D57D73'),
|
||||
BigInt('0xF953EE63D8E0F40A'),
|
||||
BigInt('0xB1821A4810FFD90E'),
|
||||
BigInt('0xCEECEA8020CA5077'),
|
||||
BigInt('0x4F5FFBD87094CBFC'),
|
||||
BigInt('0x30310B1040A14285'),
|
||||
BigInt('0xDEFC138FE0AA91F4'),
|
||||
BigInt('0xA192E347D09F188D'),
|
||||
BigInt('0x2021F21F80C18306'),
|
||||
BigInt('0x5F4F02D7B0F40A7F'),
|
||||
BigInt('0x179EF6FC78EB277B'),
|
||||
BigInt('0x68F0063448DEAE02'),
|
||||
BigInt('0xE943176C18803589'),
|
||||
BigInt('0x962DE7A428B5BCF0'),
|
||||
BigInt('0xF1C1FE77117CDF02'),
|
||||
BigInt('0x8EAF0EBF2149567B'),
|
||||
BigInt('0x0F1C1FE77117CDF0'),
|
||||
BigInt('0x7072EF2F41224489'),
|
||||
BigInt('0x38A31B04893D698D'),
|
||||
BigInt('0x47CDEBCCB908E0F4'),
|
||||
BigInt('0xC67EFA94E9567B7F'),
|
||||
BigInt('0xB9100A5CD963F206'),
|
||||
BigInt('0x57DD12C379682177'),
|
||||
BigInt('0x28B3E20B495DA80E'),
|
||||
BigInt('0xA900F35319033385'),
|
||||
BigInt('0xD66E039B2936BAFC'),
|
||||
BigInt('0x9EBFF7B0E12997F8'),
|
||||
BigInt('0xE1D10778D11C1E81'),
|
||||
BigInt('0x606216208142850A'),
|
||||
BigInt('0x1F0CE6E8B1770C73'),
|
||||
BigInt('0x8921014C99C2B083'),
|
||||
BigInt('0xF64FF184A9F739FA'),
|
||||
BigInt('0x77FCE0DCF9A9A271'),
|
||||
BigInt('0x08921014C99C2B08'),
|
||||
BigInt('0x4043E43F0183060C'),
|
||||
BigInt('0x3F2D14F731B68F75'),
|
||||
BigInt('0xBE9E05AF61E814FE'),
|
||||
BigInt('0xC1F0F56751DD9D87'),
|
||||
BigInt('0x2F3DEDF8F1D64EF6'),
|
||||
BigInt('0x50531D30C1E3C78F'),
|
||||
BigInt('0xD1E00C6891BD5C04'),
|
||||
BigInt('0xAE8EFCA0A188D57D'),
|
||||
BigInt('0xE65F088B6997F879'),
|
||||
BigInt('0x9931F84359A27100'),
|
||||
BigInt('0x1882E91B09FCEA8B'),
|
||||
BigInt('0x67EC19D339C963F2'),
|
||||
BigInt('0xD75ADABD7A6E2D6F'),
|
||||
BigInt('0xA8342A754A5BA416'),
|
||||
BigInt('0x29873B2D1A053F9D'),
|
||||
BigInt('0x56E9CBE52A30B6E4'),
|
||||
BigInt('0x1E383FCEE22F9BE0'),
|
||||
BigInt('0x6156CF06D21A1299'),
|
||||
BigInt('0xE0E5DE5E82448912'),
|
||||
BigInt('0x9F8B2E96B271006B'),
|
||||
BigInt('0x71463609127AD31A'),
|
||||
BigInt('0x0E28C6C1224F5A63'),
|
||||
BigInt('0x8F9BD7997211C1E8'),
|
||||
BigInt('0xF0F5275142244891'),
|
||||
BigInt('0xB824D37A8A3B6595'),
|
||||
BigInt('0xC74A23B2BA0EECEC'),
|
||||
BigInt('0x46F932EAEA507767'),
|
||||
BigInt('0x3997C222DA65FE1E'),
|
||||
BigInt('0xAFBA2586F2D042EE'),
|
||||
BigInt('0xD0D4D54EC2E5CB97'),
|
||||
BigInt('0x5167C41692BB501C'),
|
||||
BigInt('0x2E0934DEA28ED965'),
|
||||
BigInt('0x66D8C0F56A91F461'),
|
||||
BigInt('0x19B6303D5AA47D18'),
|
||||
BigInt('0x980521650AFAE693'),
|
||||
BigInt('0xE76BD1AD3ACF6FEA'),
|
||||
BigInt('0x09A6C9329AC4BC9B'),
|
||||
BigInt('0x76C839FAAAF135E2'),
|
||||
BigInt('0xF77B28A2FAAFAE69'),
|
||||
BigInt('0x8815D86ACA9A2710'),
|
||||
BigInt('0xC0C42C4102850A14'),
|
||||
BigInt('0xBFAADC8932B0836D'),
|
||||
BigInt('0x3E19CDD162EE18E6'),
|
||||
BigInt('0x41773D1952DB919F'),
|
||||
BigInt('0x269B24CA6B12F26D'),
|
||||
BigInt('0x59F5D4025B277B14'),
|
||||
BigInt('0xD846C55A0B79E09F'),
|
||||
BigInt('0xA72835923B4C69E6'),
|
||||
BigInt('0xEFF9C1B9F35344E2'),
|
||||
BigInt('0x90973171C366CD9B'),
|
||||
BigInt('0x1124202993385610'),
|
||||
BigInt('0x6E4AD0E1A30DDF69'),
|
||||
BigInt('0x8087C87E03060C18'),
|
||||
BigInt('0xFFE938B633338561'),
|
||||
BigInt('0x7E5A29EE636D1EEA'),
|
||||
BigInt('0x0134D92653589793'),
|
||||
BigInt('0x49E52D0D9B47BA97'),
|
||||
BigInt('0x368BDDC5AB7233EE'),
|
||||
BigInt('0xB738CC9DFB2CA865'),
|
||||
BigInt('0xC8563C55CB19211C'),
|
||||
BigInt('0x5E7BDBF1E3AC9DEC'),
|
||||
BigInt('0x21152B39D3991495'),
|
||||
BigInt('0xA0A63A6183C78F1E'),
|
||||
BigInt('0xDFC8CAA9B3F20667'),
|
||||
BigInt('0x97193E827BED2B63'),
|
||||
BigInt('0xE877CE4A4BD8A21A'),
|
||||
BigInt('0x69C4DF121B863991'),
|
||||
BigInt('0x16AA2FDA2BB3B0E8'),
|
||||
BigInt('0xF86737458BB86399'),
|
||||
BigInt('0x8709C78DBB8DEAE0'),
|
||||
BigInt('0x06BAD6D5EBD3716B'),
|
||||
BigInt('0x79D4261DDBE6F812'),
|
||||
BigInt('0x3105D23613F9D516'),
|
||||
BigInt('0x4E6B22FE23CC5C6F'),
|
||||
BigInt('0xCFD833A67392C7E4'),
|
||||
BigInt('0xB0B6C36E43A74E9D'),
|
||||
BigInt('0x9A6C9329AC4BC9B5'),
|
||||
BigInt('0xE50263E19C7E40CC'),
|
||||
BigInt('0x64B172B9CC20DB47'),
|
||||
BigInt('0x1BDF8271FC15523E'),
|
||||
BigInt('0x530E765A340A7F3A'),
|
||||
BigInt('0x2C608692043FF643'),
|
||||
BigInt('0xADD397CA54616DC8'),
|
||||
BigInt('0xD2BD67026454E4B1'),
|
||||
BigInt('0x3C707F9DC45F37C0'),
|
||||
BigInt('0x431E8F55F46ABEB9'),
|
||||
BigInt('0xC2AD9E0DA4342532'),
|
||||
BigInt('0xBDC36EC59401AC4B'),
|
||||
BigInt('0xF5129AEE5C1E814F'),
|
||||
BigInt('0x8A7C6A266C2B0836'),
|
||||
BigInt('0x0BCF7B7E3C7593BD'),
|
||||
BigInt('0x74A18BB60C401AC4'),
|
||||
BigInt('0xE28C6C1224F5A634'),
|
||||
BigInt('0x9DE29CDA14C02F4D'),
|
||||
BigInt('0x1C518D82449EB4C6'),
|
||||
BigInt('0x633F7D4A74AB3DBF'),
|
||||
BigInt('0x2BEE8961BCB410BB'),
|
||||
BigInt('0x548079A98C8199C2'),
|
||||
BigInt('0xD53368F1DCDF0249'),
|
||||
BigInt('0xAA5D9839ECEA8B30'),
|
||||
BigInt('0x449080A64CE15841'),
|
||||
BigInt('0x3BFE706E7CD4D138'),
|
||||
BigInt('0xBA4D61362C8A4AB3'),
|
||||
BigInt('0xC52391FE1CBFC3CA'),
|
||||
BigInt('0x8DF265D5D4A0EECE'),
|
||||
BigInt('0xF29C951DE49567B7'),
|
||||
BigInt('0x732F8445B4CBFC3C'),
|
||||
BigInt('0x0C41748D84FE7545'),
|
||||
BigInt('0x6BAD6D5EBD3716B7'),
|
||||
BigInt('0x14C39D968D029FCE'),
|
||||
BigInt('0x95708CCEDD5C0445'),
|
||||
BigInt('0xEA1E7C06ED698D3C'),
|
||||
BigInt('0xA2CF882D2576A038'),
|
||||
BigInt('0xDDA178E515432941'),
|
||||
BigInt('0x5C1269BD451DB2CA'),
|
||||
BigInt('0x237C997575283BB3'),
|
||||
BigInt('0xCDB181EAD523E8C2'),
|
||||
BigInt('0xB2DF7122E51661BB'),
|
||||
BigInt('0x336C607AB548FA30'),
|
||||
BigInt('0x4C0290B2857D7349'),
|
||||
BigInt('0x04D364994D625E4D'),
|
||||
BigInt('0x7BBD94517D57D734'),
|
||||
BigInt('0xFA0E85092D094CBF'),
|
||||
BigInt('0x856075C11D3CC5C6'),
|
||||
BigInt('0x134D926535897936'),
|
||||
BigInt('0x6C2362AD05BCF04F'),
|
||||
BigInt('0xED9073F555E26BC4'),
|
||||
BigInt('0x92FE833D65D7E2BD'),
|
||||
BigInt('0xDA2F7716ADC8CFB9'),
|
||||
BigInt('0xA54187DE9DFD46C0'),
|
||||
BigInt('0x24F29686CDA3DD4B'),
|
||||
BigInt('0x5B9C664EFD965432'),
|
||||
BigInt('0xB5517ED15D9D8743'),
|
||||
BigInt('0xCA3F8E196DA80E3A'),
|
||||
BigInt('0x4B8C9F413DF695B1'),
|
||||
BigInt('0x34E26F890DC31CC8'),
|
||||
BigInt('0x7C339BA2C5DC31CC'),
|
||||
BigInt('0x035D6B6AF5E9B8B5'),
|
||||
BigInt('0x82EE7A32A5B7233E'),
|
||||
BigInt('0xFD808AFA9582AA47'),
|
||||
BigInt('0x4D364994D625E4DA'),
|
||||
BigInt('0x3258B95CE6106DA3'),
|
||||
BigInt('0xB3EBA804B64EF628'),
|
||||
BigInt('0xCC8558CC867B7F51'),
|
||||
BigInt('0x8454ACE74E645255'),
|
||||
BigInt('0xFB3A5C2F7E51DB2C'),
|
||||
BigInt('0x7A894D772E0F40A7'),
|
||||
BigInt('0x05E7BDBF1E3AC9DE'),
|
||||
BigInt('0xEB2AA520BE311AAF'),
|
||||
BigInt('0x944455E88E0493D6'),
|
||||
BigInt('0x15F744B0DE5A085D'),
|
||||
BigInt('0x6A99B478EE6F8124'),
|
||||
BigInt('0x224840532670AC20'),
|
||||
BigInt('0x5D26B09B16452559'),
|
||||
BigInt('0xDC95A1C3461BBED2'),
|
||||
BigInt('0xA3FB510B762E37AB'),
|
||||
BigInt('0x35D6B6AF5E9B8B5B'),
|
||||
BigInt('0x4AB846676EAE0222'),
|
||||
BigInt('0xCB0B573F3EF099A9'),
|
||||
BigInt('0xB465A7F70EC510D0'),
|
||||
BigInt('0xFCB453DCC6DA3DD4'),
|
||||
BigInt('0x83DAA314F6EFB4AD'),
|
||||
BigInt('0x0269B24CA6B12F26'),
|
||||
BigInt('0x7D0742849684A65F'),
|
||||
BigInt('0x93CA5A1B368F752E'),
|
||||
BigInt('0xECA4AAD306BAFC57'),
|
||||
BigInt('0x6D17BB8B56E467DC'),
|
||||
BigInt('0x12794B4366D1EEA5'),
|
||||
BigInt('0x5AA8BF68AECEC3A1'),
|
||||
BigInt('0x25C64FA09EFB4AD8'),
|
||||
BigInt('0xA4755EF8CEA5D153'),
|
||||
BigInt('0xDB1BAE30FE90582A'),
|
||||
BigInt('0xBCF7B7E3C7593BD8'),
|
||||
BigInt('0xC399472BF76CB2A1'),
|
||||
BigInt('0x422A5673A732292A'),
|
||||
BigInt('0x3D44A6BB9707A053'),
|
||||
BigInt('0x759552905F188D57'),
|
||||
BigInt('0x0AFBA2586F2D042E'),
|
||||
BigInt('0x8B48B3003F739FA5'),
|
||||
BigInt('0xF42643C80F4616DC'),
|
||||
BigInt('0x1AEB5B57AF4DC5AD'),
|
||||
BigInt('0x6585AB9F9F784CD4'),
|
||||
BigInt('0xE436BAC7CF26D75F'),
|
||||
BigInt('0x9B584A0FFF135E26'),
|
||||
BigInt('0xD389BE24370C7322'),
|
||||
BigInt('0xACE74EEC0739FA5B'),
|
||||
BigInt('0x2D545FB4576761D0'),
|
||||
BigInt('0x523AAF7C6752E8A9'),
|
||||
BigInt('0xC41748D84FE75459'),
|
||||
BigInt('0xBB79B8107FD2DD20'),
|
||||
BigInt('0x3ACAA9482F8C46AB'),
|
||||
BigInt('0x45A459801FB9CFD2'),
|
||||
BigInt('0x0D75ADABD7A6E2D6'),
|
||||
BigInt('0x721B5D63E7936BAF'),
|
||||
BigInt('0xF3A84C3BB7CDF024'),
|
||||
BigInt('0x8CC6BCF387F8795D'),
|
||||
BigInt('0x620BA46C27F3AA2C'),
|
||||
BigInt('0x1D6554A417C62355'),
|
||||
BigInt('0x9CD645FC4798B8DE'),
|
||||
BigInt('0xE3B8B53477AD31A7'),
|
||||
BigInt('0xAB69411FBFB21CA3'),
|
||||
BigInt('0xD407B1D78F8795DA'),
|
||||
BigInt('0x55B4A08FDFD90E51'),
|
||||
BigInt('0x2ADA5047EFEC8728')
|
||||
];
|
||||
class CRC64 {
|
||||
constructor() {
|
||||
this._crc = BigInt(0);
|
||||
}
|
||||
update(data) {
|
||||
const buffer = typeof data === 'string' ? Buffer.from(data) : data;
|
||||
let crc = CRC64.flip64Bits(this._crc);
|
||||
for (const dataByte of buffer) {
|
||||
const crcByte = Number(crc & BigInt(0xff));
|
||||
crc = PREGEN_POLY_TABLE[crcByte ^ dataByte] ^ (crc >> BigInt(8));
|
||||
}
|
||||
this._crc = CRC64.flip64Bits(crc);
|
||||
}
|
||||
digest(encoding) {
|
||||
switch (encoding) {
|
||||
case 'hex':
|
||||
return this._crc.toString(16).toUpperCase();
|
||||
case 'base64':
|
||||
return this.toBuffer().toString('base64');
|
||||
default:
|
||||
return this.toBuffer();
|
||||
}
|
||||
}
|
||||
toBuffer() {
|
||||
return Buffer.from([0, 8, 16, 24, 32, 40, 48, 56].map(s => Number((this._crc >> BigInt(s)) & BigInt(0xff))));
|
||||
}
|
||||
static flip64Bits(n) {
|
||||
return (BigInt(1) << BigInt(64)) - BigInt(1) - n;
|
||||
}
|
||||
}
|
||||
exports.default = CRC64;
|
||||
//# sourceMappingURL=crc64.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/crc64.js.map
generated
vendored
1
node_modules/@actions/artifact/lib/internal/crc64.js.map
generated
vendored
File diff suppressed because one or more lines are too long
3
node_modules/@actions/artifact/lib/internal/delete/delete-artifact.d.ts
generated
vendored
Normal file
3
node_modules/@actions/artifact/lib/internal/delete/delete-artifact.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
import { DeleteArtifactResponse } from '../shared/interfaces';
|
||||
export declare function deleteArtifactPublic(artifactName: string, workflowRunId: number, repositoryOwner: string, repositoryName: string, token: string): Promise<DeleteArtifactResponse>;
|
||||
export declare function deleteArtifactInternal(artifactName: any): Promise<DeleteArtifactResponse>;
|
||||
83
node_modules/@actions/artifact/lib/internal/delete/delete-artifact.js
generated
vendored
Normal file
83
node_modules/@actions/artifact/lib/internal/delete/delete-artifact.js
generated
vendored
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.deleteArtifactInternal = exports.deleteArtifactPublic = void 0;
|
||||
const core_1 = require("@actions/core");
|
||||
const github_1 = require("@actions/github");
|
||||
const user_agent_1 = require("../shared/user-agent");
|
||||
const retry_options_1 = require("../find/retry-options");
|
||||
const utils_1 = require("@actions/github/lib/utils");
|
||||
const plugin_request_log_1 = require("@octokit/plugin-request-log");
|
||||
const plugin_retry_1 = require("@octokit/plugin-retry");
|
||||
const artifact_twirp_client_1 = require("../shared/artifact-twirp-client");
|
||||
const util_1 = require("../shared/util");
|
||||
const generated_1 = require("../../generated");
|
||||
const get_artifact_1 = require("../find/get-artifact");
|
||||
const errors_1 = require("../shared/errors");
|
||||
function deleteArtifactPublic(artifactName, workflowRunId, repositoryOwner, repositoryName, token) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const [retryOpts, requestOpts] = (0, retry_options_1.getRetryOptions)(utils_1.defaults);
|
||||
const opts = {
|
||||
log: undefined,
|
||||
userAgent: (0, user_agent_1.getUserAgentString)(),
|
||||
previews: undefined,
|
||||
retry: retryOpts,
|
||||
request: requestOpts
|
||||
};
|
||||
const github = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog);
|
||||
const getArtifactResp = yield (0, get_artifact_1.getArtifactPublic)(artifactName, workflowRunId, repositoryOwner, repositoryName, token);
|
||||
const deleteArtifactResp = yield github.rest.actions.deleteArtifact({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
artifact_id: getArtifactResp.artifact.id
|
||||
});
|
||||
if (deleteArtifactResp.status !== 204) {
|
||||
throw new errors_1.InvalidResponseError(`Invalid response from GitHub API: ${deleteArtifactResp.status} (${(_a = deleteArtifactResp === null || deleteArtifactResp === void 0 ? void 0 : deleteArtifactResp.headers) === null || _a === void 0 ? void 0 : _a['x-github-request-id']})`);
|
||||
}
|
||||
return {
|
||||
id: getArtifactResp.artifact.id
|
||||
};
|
||||
});
|
||||
}
|
||||
exports.deleteArtifactPublic = deleteArtifactPublic;
|
||||
function deleteArtifactInternal(artifactName) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
||||
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
||||
const listReq = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
nameFilter: generated_1.StringValue.create({ value: artifactName })
|
||||
};
|
||||
const listRes = yield artifactClient.ListArtifacts(listReq);
|
||||
if (listRes.artifacts.length === 0) {
|
||||
throw new errors_1.ArtifactNotFoundError(`Artifact not found for name: ${artifactName}`);
|
||||
}
|
||||
let artifact = listRes.artifacts[0];
|
||||
if (listRes.artifacts.length > 1) {
|
||||
artifact = listRes.artifacts.sort((a, b) => Number(b.databaseId) - Number(a.databaseId))[0];
|
||||
(0, core_1.debug)(`More than one artifact found for a single name, returning newest (id: ${artifact.databaseId})`);
|
||||
}
|
||||
const req = {
|
||||
workflowRunBackendId: artifact.workflowRunBackendId,
|
||||
workflowJobRunBackendId: artifact.workflowJobRunBackendId,
|
||||
name: artifact.name
|
||||
};
|
||||
const res = yield artifactClient.DeleteArtifact(req);
|
||||
(0, core_1.info)(`Artifact '${artifactName}' (ID: ${res.artifactId}) deleted`);
|
||||
return {
|
||||
id: Number(res.artifactId)
|
||||
};
|
||||
});
|
||||
}
|
||||
exports.deleteArtifactInternal = deleteArtifactInternal;
|
||||
//# sourceMappingURL=delete-artifact.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/delete/delete-artifact.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/internal/delete/delete-artifact.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"delete-artifact.js","sourceRoot":"","sources":["../../../src/internal/delete/delete-artifact.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,wCAAyC;AACzC,4CAA0C;AAE1C,qDAAuD;AACvD,yDAAqD;AACrD,qDAA0E;AAC1E,oEAAsD;AACtD,wDAA2C;AAE3C,2EAA2E;AAC3E,yCAAqD;AACrD,+CAIwB;AACxB,uDAAsD;AACtD,6CAA4E;AAE5E,SAAsB,oBAAoB,CACxC,YAAoB,EACpB,aAAqB,EACrB,eAAuB,EACvB,cAAsB,EACtB,KAAa;;;QAEb,MAAM,CAAC,SAAS,EAAE,WAAW,CAAC,GAAG,IAAA,+BAAe,EAAC,gBAAoB,CAAC,CAAA;QAEtE,MAAM,IAAI,GAAmB;YAC3B,GAAG,EAAE,SAAS;YACd,SAAS,EAAE,IAAA,+BAAkB,GAAE;YAC/B,QAAQ,EAAE,SAAS;YACnB,KAAK,EAAE,SAAS;YAChB,OAAO,EAAE,WAAW;SACrB,CAAA;QAED,MAAM,MAAM,GAAG,IAAA,mBAAU,EAAC,KAAK,EAAE,IAAI,EAAE,oBAAK,EAAE,+BAAU,CAAC,CAAA;QAEzD,MAAM,eAAe,GAAG,MAAM,IAAA,gCAAiB,EAC7C,YAAY,EACZ,aAAa,EACb,eAAe,EACf,cAAc,EACd,KAAK,CACN,CAAA;QAED,MAAM,kBAAkB,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,cAAc,CAAC;YAClE,KAAK,EAAE,eAAe;YACtB,IAAI,EAAE,cAAc;YACpB,WAAW,EAAE,eAAe,CAAC,QAAQ,CAAC,EAAE;SACzC,CAAC,CAAA;QAEF,IAAI,kBAAkB,CAAC,MAAM,KAAK,GAAG,EAAE;YACrC,MAAM,IAAI,6BAAoB,CAC5B,qCAAqC,kBAAkB,CAAC,MAAM,KAAK,MAAA,kBAAkB,aAAlB,kBAAkB,uBAAlB,kBAAkB,CAAE,OAAO,0CAAG,qBAAqB,CAAC,GAAG,CAC3H,CAAA;SACF;QAED,OAAO;YACL,EAAE,EAAE,eAAe,CAAC,QAAQ,CAAC,EAAE;SAChC,CAAA;;CACF;AA1CD,oDA0CC;AAED,SAAsB,sBAAsB,CAC1C,YAAY;;QAEZ,MAAM,cAAc,GAAG,IAAA,mDAA2B,GAAE,CAAA;QAEpD,MAAM,EAAC,oBAAoB,EAAE,uBAAuB,EAAC,GACnD,IAAA,6BAAsB,GAAE,CAAA;QAE1B,MAAM,OAAO,GAAyB;YACpC,oBAAoB;YACpB,uBAAuB;YACvB,UAAU,EAAE,uBAAW,CAAC,MAAM,CAAC,EAAC,KAAK,EAAE,YAAY,EAAC,CAAC;SACtD,CAAA;QAED,MAAM,OAAO,GAAG,MAAM,cAAc,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QAE3D,IAAI,OAAO,CAAC,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YAClC,MAAM,IAAI,8BAAqB,CAC7B,gCAAgC,YAAY,EAAE,CAC/C,CAAA;SACF;QAED,IAAI,QAAQ,GAAG,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;QACnC,IAAI,OAAO,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;YAChC,QAAQ,GAAG,OAAO,CAAC,SAAS,CAAC,IAAI,CAC/B,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,CACtD,CAAC,CAAC,CAAC,CAAA;YAEJ,IAAA,YAAK,EACH,yEAAyE,QAAQ,CAAC,UAAU,GAAG,CAChG,CAAA;SACF;QAED,MAAM,GAAG,GAA0B;YACjC,oBAAoB,EAAE,QAAQ,CAAC,oBAAoB;YACnD,uBAAuB,EAAE,QAAQ,CAAC,uBAAuB;YACzD,IAAI,EAAE,QAAQ,CAAC,IAAI;SACpB,CAAA;QAED,MAAM,GAAG,GAAG,MAAM,cAAc,CAAC,cAAc,CAAC,GAAG,CAAC,CAAA;QACpD,IAAA,WAAI,EAAC,aAAa,YAAY,UAAU,GAAG,CAAC,UAAU,WAAW,CAAC,CAAA;QAElE,OAAO;YACL,EAAE,EAAE,MAAM,CAAC,GAAG,CAAC,UAAU,CAAC;SAC3B,CAAA;IACH,CAAC;CAAA;AA7CD,wDA6CC"}
|
||||
39
node_modules/@actions/artifact/lib/internal/download-http-client.d.ts
generated
vendored
39
node_modules/@actions/artifact/lib/internal/download-http-client.d.ts
generated
vendored
|
|
@ -1,39 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
import * as fs from 'fs';
|
||||
import { ListArtifactsResponse, QueryArtifactResponse } from './contracts';
|
||||
import { HttpClientResponse } from '@actions/http-client';
|
||||
import { DownloadItem } from './download-specification';
|
||||
export declare class DownloadHttpClient {
|
||||
private downloadHttpManager;
|
||||
private statusReporter;
|
||||
constructor();
|
||||
/**
|
||||
* Gets a list of all artifacts that are in a specific container
|
||||
*/
|
||||
listArtifacts(): Promise<ListArtifactsResponse>;
|
||||
/**
|
||||
* Fetches a set of container items that describe the contents of an artifact
|
||||
* @param artifactName the name of the artifact
|
||||
* @param containerUrl the artifact container URL for the run
|
||||
*/
|
||||
getContainerItems(artifactName: string, containerUrl: string): Promise<QueryArtifactResponse>;
|
||||
/**
|
||||
* Concurrently downloads all the files that are part of an artifact
|
||||
* @param downloadItems information about what items to download and where to save them
|
||||
*/
|
||||
downloadSingleArtifact(downloadItems: DownloadItem[]): Promise<void>;
|
||||
/**
|
||||
* Downloads an individual file
|
||||
* @param httpClientIndex the index of the http client that is used to make all of the calls
|
||||
* @param artifactLocation origin location where a file will be downloaded from
|
||||
* @param downloadPath destination location for the file being downloaded
|
||||
*/
|
||||
private downloadIndividualFile;
|
||||
/**
|
||||
* Pipes the response from downloading an individual file to the appropriate destination stream while decoding gzip content if necessary
|
||||
* @param response the http response received when downloading a file
|
||||
* @param destinationStream the stream where the file should be written to
|
||||
* @param isGzip a boolean denoting if the content is compressed using gzip and if we need to decode it
|
||||
*/
|
||||
pipeResponseToFile(response: HttpClientResponse, destinationStream: fs.WriteStream, isGzip: boolean): Promise<void>;
|
||||
}
|
||||
296
node_modules/@actions/artifact/lib/internal/download-http-client.js
generated
vendored
296
node_modules/@actions/artifact/lib/internal/download-http-client.js
generated
vendored
|
|
@ -1,296 +0,0 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.DownloadHttpClient = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const zlib = __importStar(require("zlib"));
|
||||
const utils_1 = require("./utils");
|
||||
const url_1 = require("url");
|
||||
const status_reporter_1 = require("./status-reporter");
|
||||
const perf_hooks_1 = require("perf_hooks");
|
||||
const http_manager_1 = require("./http-manager");
|
||||
const config_variables_1 = require("./config-variables");
|
||||
const requestUtils_1 = require("./requestUtils");
|
||||
class DownloadHttpClient {
|
||||
constructor() {
|
||||
this.downloadHttpManager = new http_manager_1.HttpManager((0, config_variables_1.getDownloadFileConcurrency)(), '@actions/artifact-download');
|
||||
// downloads are usually significantly faster than uploads so display status information every second
|
||||
this.statusReporter = new status_reporter_1.StatusReporter(1000);
|
||||
}
|
||||
/**
|
||||
* Gets a list of all artifacts that are in a specific container
|
||||
*/
|
||||
listArtifacts() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const artifactUrl = (0, utils_1.getArtifactUrl)();
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.downloadHttpManager.getClient(0);
|
||||
const headers = (0, utils_1.getDownloadHeaders)('application/json');
|
||||
const response = yield (0, requestUtils_1.retryHttpClientRequest)('List Artifacts', () => __awaiter(this, void 0, void 0, function* () { return client.get(artifactUrl, headers); }));
|
||||
const body = yield response.readBody();
|
||||
return JSON.parse(body);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Fetches a set of container items that describe the contents of an artifact
|
||||
* @param artifactName the name of the artifact
|
||||
* @param containerUrl the artifact container URL for the run
|
||||
*/
|
||||
getContainerItems(artifactName, containerUrl) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// the itemPath search parameter controls which containers will be returned
|
||||
const resourceUrl = new url_1.URL(containerUrl);
|
||||
resourceUrl.searchParams.append('itemPath', artifactName);
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.downloadHttpManager.getClient(0);
|
||||
const headers = (0, utils_1.getDownloadHeaders)('application/json');
|
||||
const response = yield (0, requestUtils_1.retryHttpClientRequest)('Get Container Items', () => __awaiter(this, void 0, void 0, function* () { return client.get(resourceUrl.toString(), headers); }));
|
||||
const body = yield response.readBody();
|
||||
return JSON.parse(body);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Concurrently downloads all the files that are part of an artifact
|
||||
* @param downloadItems information about what items to download and where to save them
|
||||
*/
|
||||
downloadSingleArtifact(downloadItems) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const DOWNLOAD_CONCURRENCY = (0, config_variables_1.getDownloadFileConcurrency)();
|
||||
// limit the number of files downloaded at a single time
|
||||
core.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`);
|
||||
const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()];
|
||||
let currentFile = 0;
|
||||
let downloadedFiles = 0;
|
||||
core.info(`Total number of files that will be downloaded: ${downloadItems.length}`);
|
||||
this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length);
|
||||
this.statusReporter.start();
|
||||
yield Promise.all(parallelDownloads.map((index) => __awaiter(this, void 0, void 0, function* () {
|
||||
while (currentFile < downloadItems.length) {
|
||||
const currentFileToDownload = downloadItems[currentFile];
|
||||
currentFile += 1;
|
||||
const startTime = perf_hooks_1.performance.now();
|
||||
yield this.downloadIndividualFile(index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath);
|
||||
if (core.isDebug()) {
|
||||
core.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`);
|
||||
}
|
||||
this.statusReporter.incrementProcessedCount();
|
||||
}
|
||||
})))
|
||||
.catch(error => {
|
||||
throw new Error(`Unable to download the artifact: ${error}`);
|
||||
})
|
||||
.finally(() => {
|
||||
this.statusReporter.stop();
|
||||
// safety dispose all connections
|
||||
this.downloadHttpManager.disposeAndReplaceAllClients();
|
||||
});
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Downloads an individual file
|
||||
* @param httpClientIndex the index of the http client that is used to make all of the calls
|
||||
* @param artifactLocation origin location where a file will be downloaded from
|
||||
* @param downloadPath destination location for the file being downloaded
|
||||
*/
|
||||
downloadIndividualFile(httpClientIndex, artifactLocation, downloadPath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let retryCount = 0;
|
||||
const retryLimit = (0, config_variables_1.getRetryLimit)();
|
||||
let destinationStream = fs.createWriteStream(downloadPath);
|
||||
const headers = (0, utils_1.getDownloadHeaders)('application/json', true, true);
|
||||
// a single GET request is used to download a file
|
||||
const makeDownloadRequest = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const client = this.downloadHttpManager.getClient(httpClientIndex);
|
||||
return yield client.get(artifactLocation, headers);
|
||||
});
|
||||
// check the response headers to determine if the file was compressed using gzip
|
||||
const isGzip = (incomingHeaders) => {
|
||||
return ('content-encoding' in incomingHeaders &&
|
||||
incomingHeaders['content-encoding'] === 'gzip');
|
||||
};
|
||||
// Increments the current retry count and then checks if the retry limit has been reached
|
||||
// If there have been too many retries, fail so the download stops. If there is a retryAfterValue value provided,
|
||||
// it will be used
|
||||
const backOff = (retryAfterValue) => __awaiter(this, void 0, void 0, function* () {
|
||||
retryCount++;
|
||||
if (retryCount > retryLimit) {
|
||||
return Promise.reject(new Error(`Retry limit has been reached. Unable to download ${artifactLocation}`));
|
||||
}
|
||||
else {
|
||||
this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex);
|
||||
if (retryAfterValue) {
|
||||
// Back off by waiting the specified time denoted by the retry-after header
|
||||
core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`);
|
||||
yield (0, utils_1.sleep)(retryAfterValue);
|
||||
}
|
||||
else {
|
||||
// Back off using an exponential value that depends on the retry count
|
||||
const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount);
|
||||
core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`);
|
||||
yield (0, utils_1.sleep)(backoffTime);
|
||||
}
|
||||
core.info(`Finished backoff for retry #${retryCount}, continuing with download`);
|
||||
}
|
||||
});
|
||||
const isAllBytesReceived = (expected, received) => {
|
||||
// be lenient, if any input is missing, assume success, i.e. not truncated
|
||||
if (!expected ||
|
||||
!received ||
|
||||
process.env['ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION']) {
|
||||
core.info('Skipping download validation.');
|
||||
return true;
|
||||
}
|
||||
return parseInt(expected) === received;
|
||||
};
|
||||
const resetDestinationStream = (fileDownloadPath) => __awaiter(this, void 0, void 0, function* () {
|
||||
destinationStream.close();
|
||||
// await until file is created at downloadpath; node15 and up fs.createWriteStream had not created a file yet
|
||||
yield new Promise(resolve => {
|
||||
destinationStream.on('close', resolve);
|
||||
if (destinationStream.writableFinished) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
yield (0, utils_1.rmFile)(fileDownloadPath);
|
||||
destinationStream = fs.createWriteStream(fileDownloadPath);
|
||||
});
|
||||
// keep trying to download a file until a retry limit has been reached
|
||||
while (retryCount <= retryLimit) {
|
||||
let response;
|
||||
try {
|
||||
response = yield makeDownloadRequest();
|
||||
}
|
||||
catch (error) {
|
||||
// if an error is caught, it is usually indicative of a timeout so retry the download
|
||||
core.info('An error occurred while attempting to download a file');
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(error);
|
||||
// increment the retryCount and use exponential backoff to wait before making the next request
|
||||
yield backOff();
|
||||
continue;
|
||||
}
|
||||
let forceRetry = false;
|
||||
if ((0, utils_1.isSuccessStatusCode)(response.message.statusCode)) {
|
||||
// The body contains the contents of the file however calling response.readBody() causes all the content to be converted to a string
|
||||
// which can cause some gzip encoded data to be lost
|
||||
// Instead of using response.readBody(), response.message is a readableStream that can be directly used to get the raw body contents
|
||||
try {
|
||||
const isGzipped = isGzip(response.message.headers);
|
||||
yield this.pipeResponseToFile(response, destinationStream, isGzipped);
|
||||
if (isGzipped ||
|
||||
isAllBytesReceived(response.message.headers['content-length'], yield (0, utils_1.getFileSize)(downloadPath))) {
|
||||
return;
|
||||
}
|
||||
else {
|
||||
forceRetry = true;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// retry on error, most likely streams were corrupted
|
||||
forceRetry = true;
|
||||
}
|
||||
}
|
||||
if (forceRetry || (0, utils_1.isRetryableStatusCode)(response.message.statusCode)) {
|
||||
core.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`);
|
||||
resetDestinationStream(downloadPath);
|
||||
// if a throttled status code is received, try to get the retryAfter header value, else differ to standard exponential backoff
|
||||
(0, utils_1.isThrottledStatusCode)(response.message.statusCode)
|
||||
? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers))
|
||||
: yield backOff();
|
||||
}
|
||||
else {
|
||||
// Some unexpected response code, fail immediately and stop the download
|
||||
(0, utils_1.displayHttpDiagnostics)(response);
|
||||
return Promise.reject(new Error(`Unexpected http ${response.message.statusCode} during download for ${artifactLocation}`));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Pipes the response from downloading an individual file to the appropriate destination stream while decoding gzip content if necessary
|
||||
* @param response the http response received when downloading a file
|
||||
* @param destinationStream the stream where the file should be written to
|
||||
* @param isGzip a boolean denoting if the content is compressed using gzip and if we need to decode it
|
||||
*/
|
||||
pipeResponseToFile(response, destinationStream, isGzip) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield new Promise((resolve, reject) => {
|
||||
if (isGzip) {
|
||||
const gunzip = zlib.createGunzip();
|
||||
response.message
|
||||
.on('error', error => {
|
||||
core.info(`An error occurred while attempting to read the response stream`);
|
||||
gunzip.close();
|
||||
destinationStream.close();
|
||||
reject(error);
|
||||
})
|
||||
.pipe(gunzip)
|
||||
.on('error', error => {
|
||||
core.info(`An error occurred while attempting to decompress the response stream`);
|
||||
destinationStream.close();
|
||||
reject(error);
|
||||
})
|
||||
.pipe(destinationStream)
|
||||
.on('close', () => {
|
||||
resolve();
|
||||
})
|
||||
.on('error', error => {
|
||||
core.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`);
|
||||
reject(error);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response.message
|
||||
.on('error', error => {
|
||||
core.info(`An error occurred while attempting to read the response stream`);
|
||||
destinationStream.close();
|
||||
reject(error);
|
||||
})
|
||||
.pipe(destinationStream)
|
||||
.on('close', () => {
|
||||
resolve();
|
||||
})
|
||||
.on('error', error => {
|
||||
core.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`);
|
||||
reject(error);
|
||||
});
|
||||
}
|
||||
});
|
||||
return;
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.DownloadHttpClient = DownloadHttpClient;
|
||||
//# sourceMappingURL=download-http-client.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/download-http-client.js.map
generated
vendored
1
node_modules/@actions/artifact/lib/internal/download-http-client.js.map
generated
vendored
File diff suppressed because one or more lines are too long
7
node_modules/@actions/artifact/lib/internal/download-options.d.ts
generated
vendored
7
node_modules/@actions/artifact/lib/internal/download-options.d.ts
generated
vendored
|
|
@ -1,7 +0,0 @@
|
|||
export interface DownloadOptions {
|
||||
/**
|
||||
* Specifies if a folder is created for the artifact that is downloaded (contents downloaded into this folder),
|
||||
* defaults to false if not specified
|
||||
* */
|
||||
createArtifactFolder?: boolean;
|
||||
}
|
||||
3
node_modules/@actions/artifact/lib/internal/download-options.js
generated
vendored
3
node_modules/@actions/artifact/lib/internal/download-options.js
generated
vendored
|
|
@ -1,3 +0,0 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=download-options.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/download-options.js.map
generated
vendored
1
node_modules/@actions/artifact/lib/internal/download-options.js.map
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
{"version":3,"file":"download-options.js","sourceRoot":"","sources":["../../src/internal/download-options.ts"],"names":[],"mappings":""}
|
||||
10
node_modules/@actions/artifact/lib/internal/download-response.d.ts
generated
vendored
10
node_modules/@actions/artifact/lib/internal/download-response.d.ts
generated
vendored
|
|
@ -1,10 +0,0 @@
|
|||
export interface DownloadResponse {
|
||||
/**
|
||||
* The name of the artifact that was downloaded
|
||||
*/
|
||||
artifactName: string;
|
||||
/**
|
||||
* The full Path to where the artifact was downloaded
|
||||
*/
|
||||
downloadPath: string;
|
||||
}
|
||||
3
node_modules/@actions/artifact/lib/internal/download-response.js
generated
vendored
3
node_modules/@actions/artifact/lib/internal/download-response.js
generated
vendored
|
|
@ -1,3 +0,0 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=download-response.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/download-response.js.map
generated
vendored
1
node_modules/@actions/artifact/lib/internal/download-response.js.map
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
{"version":3,"file":"download-response.js","sourceRoot":"","sources":["../../src/internal/download-response.ts"],"names":[],"mappings":""}
|
||||
19
node_modules/@actions/artifact/lib/internal/download-specification.d.ts
generated
vendored
19
node_modules/@actions/artifact/lib/internal/download-specification.d.ts
generated
vendored
|
|
@ -1,19 +0,0 @@
|
|||
import { ContainerEntry } from './contracts';
|
||||
export interface DownloadSpecification {
|
||||
rootDownloadLocation: string;
|
||||
directoryStructure: string[];
|
||||
emptyFilesToCreate: string[];
|
||||
filesToDownload: DownloadItem[];
|
||||
}
|
||||
export interface DownloadItem {
|
||||
sourceLocation: string;
|
||||
targetPath: string;
|
||||
}
|
||||
/**
|
||||
* Creates a specification for a set of files that will be downloaded
|
||||
* @param artifactName the name of the artifact
|
||||
* @param artifactEntries a set of container entries that describe that files that make up an artifact
|
||||
* @param downloadPath the path where the artifact will be downloaded to
|
||||
* @param includeRootDirectory specifies if there should be an extra directory (denoted by the artifact name) where the artifact files should be downloaded to
|
||||
*/
|
||||
export declare function getDownloadSpecification(artifactName: string, artifactEntries: ContainerEntry[], downloadPath: string, includeRootDirectory: boolean): DownloadSpecification;
|
||||
78
node_modules/@actions/artifact/lib/internal/download-specification.js
generated
vendored
78
node_modules/@actions/artifact/lib/internal/download-specification.js
generated
vendored
|
|
@ -1,78 +0,0 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getDownloadSpecification = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
/**
|
||||
* Creates a specification for a set of files that will be downloaded
|
||||
* @param artifactName the name of the artifact
|
||||
* @param artifactEntries a set of container entries that describe that files that make up an artifact
|
||||
* @param downloadPath the path where the artifact will be downloaded to
|
||||
* @param includeRootDirectory specifies if there should be an extra directory (denoted by the artifact name) where the artifact files should be downloaded to
|
||||
*/
|
||||
function getDownloadSpecification(artifactName, artifactEntries, downloadPath, includeRootDirectory) {
|
||||
// use a set for the directory paths so that there are no duplicates
|
||||
const directories = new Set();
|
||||
const specifications = {
|
||||
rootDownloadLocation: includeRootDirectory
|
||||
? path.join(downloadPath, artifactName)
|
||||
: downloadPath,
|
||||
directoryStructure: [],
|
||||
emptyFilesToCreate: [],
|
||||
filesToDownload: []
|
||||
};
|
||||
for (const entry of artifactEntries) {
|
||||
// Ignore artifacts in the container that don't begin with the same name
|
||||
if (entry.path.startsWith(`${artifactName}/`) ||
|
||||
entry.path.startsWith(`${artifactName}\\`)) {
|
||||
// normalize all separators to the local OS
|
||||
const normalizedPathEntry = path.normalize(entry.path);
|
||||
// entry.path always starts with the artifact name, if includeRootDirectory is false, remove the name from the beginning of the path
|
||||
const filePath = path.join(downloadPath, includeRootDirectory
|
||||
? normalizedPathEntry
|
||||
: normalizedPathEntry.replace(artifactName, ''));
|
||||
// Case insensitive folder structure maintained in the backend, not every folder is created so the 'folder'
|
||||
// itemType cannot be relied upon. The file must be used to determine the directory structure
|
||||
if (entry.itemType === 'file') {
|
||||
// Get the directories that we need to create from the filePath for each individual file
|
||||
directories.add(path.dirname(filePath));
|
||||
if (entry.fileLength === 0) {
|
||||
// An empty file was uploaded, create the empty files locally so that no extra http calls are made
|
||||
specifications.emptyFilesToCreate.push(filePath);
|
||||
}
|
||||
else {
|
||||
specifications.filesToDownload.push({
|
||||
sourceLocation: entry.contentLocation,
|
||||
targetPath: filePath
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
specifications.directoryStructure = Array.from(directories);
|
||||
return specifications;
|
||||
}
|
||||
exports.getDownloadSpecification = getDownloadSpecification;
|
||||
//# sourceMappingURL=download-specification.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/download-specification.js.map
generated
vendored
1
node_modules/@actions/artifact/lib/internal/download-specification.js.map
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
{"version":3,"file":"download-specification.js","sourceRoot":"","sources":["../../src/internal/download-specification.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA4B;AAyB5B;;;;;;GAMG;AACH,SAAgB,wBAAwB,CACtC,YAAoB,EACpB,eAAiC,EACjC,YAAoB,EACpB,oBAA6B;IAE7B,oEAAoE;IACpE,MAAM,WAAW,GAAG,IAAI,GAAG,EAAU,CAAA;IAErC,MAAM,cAAc,GAA0B;QAC5C,oBAAoB,EAAE,oBAAoB;YACxC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,YAAY,CAAC;YACvC,CAAC,CAAC,YAAY;QAChB,kBAAkB,EAAE,EAAE;QACtB,kBAAkB,EAAE,EAAE;QACtB,eAAe,EAAE,EAAE;KACpB,CAAA;IAED,KAAK,MAAM,KAAK,IAAI,eAAe,EAAE;QACnC,wEAAwE;QACxE,IACE,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,YAAY,GAAG,CAAC;YACzC,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,YAAY,IAAI,CAAC,EAC1C;YACA,2CAA2C;YAC3C,MAAM,mBAAmB,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;YACtD,oIAAoI;YACpI,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CACxB,YAAY,EACZ,oBAAoB;gBAClB,CAAC,CAAC,mBAAmB;gBACrB,CAAC,CAAC,mBAAmB,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE,CAAC,CAClD,CAAA;YAED,2GAA2G;YAC3G,6FAA6F;YAC7F,IAAI,KAAK,CAAC,QAAQ,KAAK,MAAM,EAAE;gBAC7B,wFAAwF;gBACxF,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAA;gBACvC,IAAI,KAAK,CAAC,UAAU,KAAK,CAAC,EAAE;oBAC1B,kGAAkG;oBAClG,cAAc,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;iBACjD;qBAAM;oBACL,cAAc,CAAC,eAAe,CAAC,IAAI,CAAC;wBAClC,cAAc,EAAE,KAAK,CAAC,eAAe;wBACrC,UAAU,EAAE,QAAQ;qBACrB,CAAC,CAAA;iBACH;aACF;SACF;KACF;IAED,cAAc,CAAC,kBAAkB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;IAC3D,OAAO,cAAc,CAAA;AACvB,CAAC;AAtDD,4DAsDC"}
|
||||
4
node_modules/@actions/artifact/lib/internal/download/download-artifact.d.ts
generated
vendored
Normal file
4
node_modules/@actions/artifact/lib/internal/download/download-artifact.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
import { DownloadArtifactOptions, DownloadArtifactResponse } from '../shared/interfaces';
|
||||
export declare function streamExtractExternal(url: string, directory: string): Promise<void>;
|
||||
export declare function downloadArtifactPublic(artifactId: number, repositoryOwner: string, repositoryName: string, token: string, options?: DownloadArtifactOptions): Promise<DownloadArtifactResponse>;
|
||||
export declare function downloadArtifactInternal(artifactId: number, options?: DownloadArtifactOptions): Promise<DownloadArtifactResponse>;
|
||||
205
node_modules/@actions/artifact/lib/internal/download/download-artifact.js
generated
vendored
Normal file
205
node_modules/@actions/artifact/lib/internal/download/download-artifact.js
generated
vendored
Normal file
|
|
@ -0,0 +1,205 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
|
||||
const promises_1 = __importDefault(require("fs/promises"));
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const httpClient = __importStar(require("@actions/http-client"));
|
||||
const unzip_stream_1 = __importDefault(require("unzip-stream"));
|
||||
const user_agent_1 = require("../shared/user-agent");
|
||||
const config_1 = require("../shared/config");
|
||||
const artifact_twirp_client_1 = require("../shared/artifact-twirp-client");
|
||||
const generated_1 = require("../../generated");
|
||||
const util_1 = require("../shared/util");
|
||||
const errors_1 = require("../shared/errors");
|
||||
const scrubQueryParameters = (url) => {
|
||||
const parsed = new URL(url);
|
||||
parsed.search = '';
|
||||
return parsed.toString();
|
||||
};
|
||||
function exists(path) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield promises_1.default.access(path);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return false;
|
||||
}
|
||||
else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function streamExtract(url, directory) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let retryCount = 0;
|
||||
while (retryCount < 5) {
|
||||
try {
|
||||
yield streamExtractExternal(url, directory);
|
||||
return;
|
||||
}
|
||||
catch (error) {
|
||||
retryCount++;
|
||||
core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`);
|
||||
// wait 5 seconds before retrying
|
||||
yield new Promise(resolve => setTimeout(resolve, 5000));
|
||||
}
|
||||
}
|
||||
throw new Error(`Artifact download failed after ${retryCount} retries.`);
|
||||
});
|
||||
}
|
||||
function streamExtractExternal(url, directory) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const client = new httpClient.HttpClient((0, user_agent_1.getUserAgentString)());
|
||||
const response = yield client.get(url);
|
||||
if (response.message.statusCode !== 200) {
|
||||
throw new Error(`Unexpected HTTP response from blob storage: ${response.message.statusCode} ${response.message.statusMessage}`);
|
||||
}
|
||||
const timeout = 30 * 1000; // 30 seconds
|
||||
return new Promise((resolve, reject) => {
|
||||
const timerFn = () => {
|
||||
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
|
||||
};
|
||||
const timer = setTimeout(timerFn, timeout);
|
||||
response.message
|
||||
.on('data', () => {
|
||||
timer.refresh();
|
||||
})
|
||||
.on('error', (error) => {
|
||||
core.debug(`response.message: Artifact download failed: ${error.message}`);
|
||||
clearTimeout(timer);
|
||||
reject(error);
|
||||
})
|
||||
.pipe(unzip_stream_1.default.Extract({ path: directory }))
|
||||
.on('close', () => {
|
||||
clearTimeout(timer);
|
||||
resolve();
|
||||
})
|
||||
.on('error', (error) => {
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.streamExtractExternal = streamExtractExternal;
|
||||
function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, token, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
|
||||
const api = github.getOctokit(token);
|
||||
core.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`);
|
||||
const { headers, status } = yield api.rest.actions.downloadArtifact({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
artifact_id: artifactId,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
});
|
||||
if (status !== 302) {
|
||||
throw new Error(`Unable to download artifact. Unexpected status: ${status}`);
|
||||
}
|
||||
const { location } = headers;
|
||||
if (!location) {
|
||||
throw new Error(`Unable to redirect to artifact download url`);
|
||||
}
|
||||
core.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`);
|
||||
try {
|
||||
core.info(`Starting download of artifact to: ${downloadPath}`);
|
||||
yield streamExtract(location, downloadPath);
|
||||
core.info(`Artifact download completed successfully.`);
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error(`Unable to download and extract artifact: ${error.message}`);
|
||||
}
|
||||
return { downloadPath };
|
||||
});
|
||||
}
|
||||
exports.downloadArtifactPublic = downloadArtifactPublic;
|
||||
function downloadArtifactInternal(artifactId, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
|
||||
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
||||
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
||||
const listReq = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
idFilter: generated_1.Int64Value.create({ value: artifactId.toString() })
|
||||
};
|
||||
const { artifacts } = yield artifactClient.ListArtifacts(listReq);
|
||||
if (artifacts.length === 0) {
|
||||
throw new errors_1.ArtifactNotFoundError(`No artifacts found for ID: ${artifactId}\nAre you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`);
|
||||
}
|
||||
if (artifacts.length > 1) {
|
||||
core.warning('Multiple artifacts found, defaulting to first.');
|
||||
}
|
||||
const signedReq = {
|
||||
workflowRunBackendId: artifacts[0].workflowRunBackendId,
|
||||
workflowJobRunBackendId: artifacts[0].workflowJobRunBackendId,
|
||||
name: artifacts[0].name
|
||||
};
|
||||
const { signedUrl } = yield artifactClient.GetSignedArtifactURL(signedReq);
|
||||
core.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`);
|
||||
try {
|
||||
core.info(`Starting download of artifact to: ${downloadPath}`);
|
||||
yield streamExtract(signedUrl, downloadPath);
|
||||
core.info(`Artifact download completed successfully.`);
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error(`Unable to download and extract artifact: ${error.message}`);
|
||||
}
|
||||
return { downloadPath };
|
||||
});
|
||||
}
|
||||
exports.downloadArtifactInternal = downloadArtifactInternal;
|
||||
function resolveOrCreateDirectory(downloadPath = (0, config_1.getGitHubWorkspaceDir)()) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (!(yield exists(downloadPath))) {
|
||||
core.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`);
|
||||
yield promises_1.default.mkdir(downloadPath, { recursive: true });
|
||||
}
|
||||
else {
|
||||
core.debug(`Artifact destination folder already exists: ${downloadPath}`);
|
||||
}
|
||||
return downloadPath;
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=download-artifact.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/download/download-artifact.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/internal/download/download-artifact.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3
node_modules/@actions/artifact/lib/internal/find/get-artifact.d.ts
generated
vendored
Normal file
3
node_modules/@actions/artifact/lib/internal/find/get-artifact.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
import { GetArtifactResponse } from '../shared/interfaces';
|
||||
export declare function getArtifactPublic(artifactName: string, workflowRunId: number, repositoryOwner: string, repositoryName: string, token: string): Promise<GetArtifactResponse>;
|
||||
export declare function getArtifactInternal(artifactName: string): Promise<GetArtifactResponse>;
|
||||
122
node_modules/@actions/artifact/lib/internal/find/get-artifact.js
generated
vendored
Normal file
122
node_modules/@actions/artifact/lib/internal/find/get-artifact.js
generated
vendored
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getArtifactInternal = exports.getArtifactPublic = void 0;
|
||||
const github_1 = require("@actions/github");
|
||||
const plugin_retry_1 = require("@octokit/plugin-retry");
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const utils_1 = require("@actions/github/lib/utils");
|
||||
const retry_options_1 = require("./retry-options");
|
||||
const plugin_request_log_1 = require("@octokit/plugin-request-log");
|
||||
const util_1 = require("../shared/util");
|
||||
const user_agent_1 = require("../shared/user-agent");
|
||||
const artifact_twirp_client_1 = require("../shared/artifact-twirp-client");
|
||||
const generated_1 = require("../../generated");
|
||||
const errors_1 = require("../shared/errors");
|
||||
function getArtifactPublic(artifactName, workflowRunId, repositoryOwner, repositoryName, token) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const [retryOpts, requestOpts] = (0, retry_options_1.getRetryOptions)(utils_1.defaults);
|
||||
const opts = {
|
||||
log: undefined,
|
||||
userAgent: (0, user_agent_1.getUserAgentString)(),
|
||||
previews: undefined,
|
||||
retry: retryOpts,
|
||||
request: requestOpts
|
||||
};
|
||||
const github = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog);
|
||||
const getArtifactResp = yield github.request('GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts{?name}', {
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
run_id: workflowRunId,
|
||||
name: artifactName
|
||||
});
|
||||
if (getArtifactResp.status !== 200) {
|
||||
throw new errors_1.InvalidResponseError(`Invalid response from GitHub API: ${getArtifactResp.status} (${(_a = getArtifactResp === null || getArtifactResp === void 0 ? void 0 : getArtifactResp.headers) === null || _a === void 0 ? void 0 : _a['x-github-request-id']})`);
|
||||
}
|
||||
if (getArtifactResp.data.artifacts.length === 0) {
|
||||
throw new errors_1.ArtifactNotFoundError(`Artifact not found for name: ${artifactName}
|
||||
Please ensure that your artifact is not expired and the artifact was uploaded using a compatible version of toolkit/upload-artifact.
|
||||
For more information, visit the GitHub Artifacts FAQ: https://github.com/actions/toolkit/blob/main/packages/artifact/docs/faq.md`);
|
||||
}
|
||||
let artifact = getArtifactResp.data.artifacts[0];
|
||||
if (getArtifactResp.data.artifacts.length > 1) {
|
||||
artifact = getArtifactResp.data.artifacts.sort((a, b) => b.id - a.id)[0];
|
||||
core.debug(`More than one artifact found for a single name, returning newest (id: ${artifact.id})`);
|
||||
}
|
||||
return {
|
||||
artifact: {
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
exports.getArtifactPublic = getArtifactPublic;
|
||||
function getArtifactInternal(artifactName) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
||||
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
||||
const req = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
nameFilter: generated_1.StringValue.create({ value: artifactName })
|
||||
};
|
||||
const res = yield artifactClient.ListArtifacts(req);
|
||||
if (res.artifacts.length === 0) {
|
||||
throw new errors_1.ArtifactNotFoundError(`Artifact not found for name: ${artifactName}
|
||||
Please ensure that your artifact is not expired and the artifact was uploaded using a compatible version of toolkit/upload-artifact.
|
||||
For more information, visit the GitHub Artifacts FAQ: https://github.com/actions/toolkit/blob/main/packages/artifact/docs/faq.md`);
|
||||
}
|
||||
let artifact = res.artifacts[0];
|
||||
if (res.artifacts.length > 1) {
|
||||
artifact = res.artifacts.sort((a, b) => Number(b.databaseId) - Number(a.databaseId))[0];
|
||||
core.debug(`More than one artifact found for a single name, returning newest (id: ${artifact.databaseId})`);
|
||||
}
|
||||
return {
|
||||
artifact: {
|
||||
name: artifact.name,
|
||||
id: Number(artifact.databaseId),
|
||||
size: Number(artifact.size),
|
||||
createdAt: artifact.createdAt
|
||||
? generated_1.Timestamp.toDate(artifact.createdAt)
|
||||
: undefined
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
exports.getArtifactInternal = getArtifactInternal;
|
||||
//# sourceMappingURL=get-artifact.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/find/get-artifact.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/internal/find/get-artifact.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"get-artifact.js","sourceRoot":"","sources":["../../../src/internal/find/get-artifact.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,4CAA0C;AAC1C,wDAA2C;AAC3C,oDAAqC;AAErC,qDAA0E;AAC1E,mDAA+C;AAC/C,oEAAsD;AAEtD,yCAAqD;AACrD,qDAAuD;AACvD,2EAA2E;AAC3E,+CAA4E;AAC5E,6CAA4E;AAE5E,SAAsB,iBAAiB,CACrC,YAAoB,EACpB,aAAqB,EACrB,eAAuB,EACvB,cAAsB,EACtB,KAAa;;;QAEb,MAAM,CAAC,SAAS,EAAE,WAAW,CAAC,GAAG,IAAA,+BAAe,EAAC,gBAAoB,CAAC,CAAA;QAEtE,MAAM,IAAI,GAAmB;YAC3B,GAAG,EAAE,SAAS;YACd,SAAS,EAAE,IAAA,+BAAkB,GAAE;YAC/B,QAAQ,EAAE,SAAS;YACnB,KAAK,EAAE,SAAS;YAChB,OAAO,EAAE,WAAW;SACrB,CAAA;QAED,MAAM,MAAM,GAAG,IAAA,mBAAU,EAAC,KAAK,EAAE,IAAI,EAAE,oBAAK,EAAE,+BAAU,CAAC,CAAA;QAEzD,MAAM,eAAe,GAAG,MAAM,MAAM,CAAC,OAAO,CAC1C,kEAAkE,EAClE;YACE,KAAK,EAAE,eAAe;YACtB,IAAI,EAAE,cAAc;YACpB,MAAM,EAAE,aAAa;YACrB,IAAI,EAAE,YAAY;SACnB,CACF,CAAA;QAED,IAAI,eAAe,CAAC,MAAM,KAAK,GAAG,EAAE;YAClC,MAAM,IAAI,6BAAoB,CAC5B,qCAAqC,eAAe,CAAC,MAAM,KAAK,MAAA,eAAe,aAAf,eAAe,uBAAf,eAAe,CAAE,OAAO,0CAAG,qBAAqB,CAAC,GAAG,CACrH,CAAA;SACF;QAED,IAAI,eAAe,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YAC/C,MAAM,IAAI,8BAAqB,CAC7B,gCAAgC,YAAY;;yIAEuF,CACpI,CAAA;SACF;QAED,IAAI,QAAQ,GAAG,eAAe,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;QAChD,IAAI,eAAe,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;YAC7C,QAAQ,GAAG,eAAe,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;YACxE,IAAI,CAAC,KAAK,CACR,yEAAyE,QAAQ,CAAC,EAAE,GAAG,CACxF,CAAA;SACF;QAED,OAAO;YACL,QAAQ,EAAE;gBACR,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,EAAE,EAAE,QAAQ,CAAC,EAAE;gBACf,IAAI,EAAE,QAAQ,CAAC,aAAa;gBAC5B,SAAS,EAAE,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,SAAS;aAC3E;SACF,CAAA;;CACF;AA3DD,8CA2DC;AAED,SAAsB,mBAAmB,CACvC,YAAoB;;QAEpB,MAAM,cAAc,GAAG,IAAA,mDAA2B,GAAE,CAAA;QAEpD,MAAM,EAAC,oBAAoB,EAAE,uBAAuB,EAAC,GACnD,IAAA,6BAAsB,GAAE,CAAA;QAE1B,MAAM,GAAG,GAAyB;YAChC,oBAAoB;YACpB,uBAAuB;YACvB,UAAU,EAAE,uBAAW,CAAC,MAAM,CAAC,EAAC,KAAK,EAAE,YAAY,EAAC,CAAC;SACtD,CAAA;QAED,MAAM,GAAG,GAAG,MAAM,cAAc,CAAC,aAAa,CAAC,GAAG,CAAC,CAAA;QAEnD,IAAI,GAAG,CAAC,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YAC9B,MAAM,IAAI,8BAAqB,CAC7B,gCAAgC,YAAY;;yIAEuF,CACpI,CAAA;SACF;QAED,IAAI,QAAQ,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;QAC/B,IAAI,GAAG,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;YAC5B,QAAQ,GAAG,GAAG,CAAC,SAAS,CAAC,IAAI,CAC3B,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,CACtD,CAAC,CAAC,CAAC,CAAA;YAEJ,IAAI,CAAC,KAAK,CACR,yEAAyE,QAAQ,CAAC,UAAU,GAAG,CAChG,CAAA;SACF;QAED,OAAO;YACL,QAAQ,EAAE;gBACR,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,EAAE,EAAE,MAAM,CAAC,QAAQ,CAAC,UAAU,CAAC;gBAC/B,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC;gBAC3B,SAAS,EAAE,QAAQ,CAAC,SAAS;oBAC3B,CAAC,CAAC,qBAAS,CAAC,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC;oBACtC,CAAC,CAAC,SAAS;aACd;SACF,CAAA;IACH,CAAC;CAAA;AA7CD,kDA6CC"}
|
||||
3
node_modules/@actions/artifact/lib/internal/find/list-artifacts.d.ts
generated
vendored
Normal file
3
node_modules/@actions/artifact/lib/internal/find/list-artifacts.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
import { ListArtifactsResponse } from '../shared/interfaces';
|
||||
export declare function listArtifactsPublic(workflowRunId: number, repositoryOwner: string, repositoryName: string, token: string, latest?: boolean): Promise<ListArtifactsResponse>;
|
||||
export declare function listArtifactsInternal(latest?: boolean): Promise<ListArtifactsResponse>;
|
||||
139
node_modules/@actions/artifact/lib/internal/find/list-artifacts.js
generated
vendored
Normal file
139
node_modules/@actions/artifact/lib/internal/find/list-artifacts.js
generated
vendored
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.listArtifactsInternal = exports.listArtifactsPublic = void 0;
|
||||
const core_1 = require("@actions/core");
|
||||
const github_1 = require("@actions/github");
|
||||
const user_agent_1 = require("../shared/user-agent");
|
||||
const retry_options_1 = require("./retry-options");
|
||||
const utils_1 = require("@actions/github/lib/utils");
|
||||
const plugin_request_log_1 = require("@octokit/plugin-request-log");
|
||||
const plugin_retry_1 = require("@octokit/plugin-retry");
|
||||
const artifact_twirp_client_1 = require("../shared/artifact-twirp-client");
|
||||
const util_1 = require("../shared/util");
|
||||
const generated_1 = require("../../generated");
|
||||
// Limiting to 1000 for perf reasons
|
||||
const maximumArtifactCount = 1000;
|
||||
const paginationCount = 100;
|
||||
const maxNumberOfPages = maximumArtifactCount / paginationCount;
|
||||
function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, token, latest = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
(0, core_1.info)(`Fetching artifact list for workflow run ${workflowRunId} in repository ${repositoryOwner}/${repositoryName}`);
|
||||
let artifacts = [];
|
||||
const [retryOpts, requestOpts] = (0, retry_options_1.getRetryOptions)(utils_1.defaults);
|
||||
const opts = {
|
||||
log: undefined,
|
||||
userAgent: (0, user_agent_1.getUserAgentString)(),
|
||||
previews: undefined,
|
||||
retry: retryOpts,
|
||||
request: requestOpts
|
||||
};
|
||||
const github = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog);
|
||||
let currentPageNumber = 1;
|
||||
const { data: listArtifactResponse } = yield github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
run_id: workflowRunId,
|
||||
per_page: paginationCount,
|
||||
page: currentPageNumber
|
||||
});
|
||||
let numberOfPages = Math.ceil(listArtifactResponse.total_count / paginationCount);
|
||||
const totalArtifactCount = listArtifactResponse.total_count;
|
||||
if (totalArtifactCount > maximumArtifactCount) {
|
||||
(0, core_1.warning)(`Workflow run ${workflowRunId} has more than 1000 artifacts. Results will be incomplete as only the first ${maximumArtifactCount} artifacts will be returned`);
|
||||
numberOfPages = maxNumberOfPages;
|
||||
}
|
||||
// Iterate over the first page
|
||||
for (const artifact of listArtifactResponse.artifacts) {
|
||||
artifacts.push({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
||||
});
|
||||
}
|
||||
// Iterate over any remaining pages
|
||||
for (currentPageNumber; currentPageNumber < numberOfPages; currentPageNumber++) {
|
||||
currentPageNumber++;
|
||||
(0, core_1.debug)(`Fetching page ${currentPageNumber} of artifact list`);
|
||||
const { data: listArtifactResponse } = yield github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
run_id: workflowRunId,
|
||||
per_page: paginationCount,
|
||||
page: currentPageNumber
|
||||
});
|
||||
for (const artifact of listArtifactResponse.artifacts) {
|
||||
artifacts.push({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at
|
||||
? new Date(artifact.created_at)
|
||||
: undefined
|
||||
});
|
||||
}
|
||||
}
|
||||
if (latest) {
|
||||
artifacts = filterLatest(artifacts);
|
||||
}
|
||||
(0, core_1.info)(`Found ${artifacts.length} artifact(s)`);
|
||||
return {
|
||||
artifacts
|
||||
};
|
||||
});
|
||||
}
|
||||
exports.listArtifactsPublic = listArtifactsPublic;
|
||||
function listArtifactsInternal(latest = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
||||
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
||||
const req = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId
|
||||
};
|
||||
const res = yield artifactClient.ListArtifacts(req);
|
||||
let artifacts = res.artifacts.map(artifact => ({
|
||||
name: artifact.name,
|
||||
id: Number(artifact.databaseId),
|
||||
size: Number(artifact.size),
|
||||
createdAt: artifact.createdAt
|
||||
? generated_1.Timestamp.toDate(artifact.createdAt)
|
||||
: undefined
|
||||
}));
|
||||
if (latest) {
|
||||
artifacts = filterLatest(artifacts);
|
||||
}
|
||||
(0, core_1.info)(`Found ${artifacts.length} artifact(s)`);
|
||||
return {
|
||||
artifacts
|
||||
};
|
||||
});
|
||||
}
|
||||
exports.listArtifactsInternal = listArtifactsInternal;
|
||||
/**
|
||||
* Filters a list of artifacts to only include the latest artifact for each name
|
||||
* @param artifacts The artifacts to filter
|
||||
* @returns The filtered list of artifacts
|
||||
*/
|
||||
function filterLatest(artifacts) {
|
||||
artifacts.sort((a, b) => b.id - a.id);
|
||||
const latestArtifacts = [];
|
||||
const seenArtifactNames = new Set();
|
||||
for (const artifact of artifacts) {
|
||||
if (!seenArtifactNames.has(artifact.name)) {
|
||||
latestArtifacts.push(artifact);
|
||||
seenArtifactNames.add(artifact.name);
|
||||
}
|
||||
}
|
||||
return latestArtifacts;
|
||||
}
|
||||
//# sourceMappingURL=list-artifacts.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/find/list-artifacts.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/internal/find/list-artifacts.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"list-artifacts.js","sourceRoot":"","sources":["../../../src/internal/find/list-artifacts.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,wCAAkD;AAClD,4CAA0C;AAE1C,qDAAuD;AACvD,mDAA+C;AAC/C,qDAA0E;AAC1E,oEAAsD;AACtD,wDAA2C;AAE3C,2EAA2E;AAC3E,yCAAqD;AACrD,+CAA+D;AAE/D,oCAAoC;AACpC,MAAM,oBAAoB,GAAG,IAAI,CAAA;AACjC,MAAM,eAAe,GAAG,GAAG,CAAA;AAC3B,MAAM,gBAAgB,GAAG,oBAAoB,GAAG,eAAe,CAAA;AAE/D,SAAsB,mBAAmB,CACvC,aAAqB,EACrB,eAAuB,EACvB,cAAsB,EACtB,KAAa,EACb,MAAM,GAAG,KAAK;;QAEd,IAAA,WAAI,EACF,2CAA2C,aAAa,kBAAkB,eAAe,IAAI,cAAc,EAAE,CAC9G,CAAA;QAED,IAAI,SAAS,GAAe,EAAE,CAAA;QAC9B,MAAM,CAAC,SAAS,EAAE,WAAW,CAAC,GAAG,IAAA,+BAAe,EAAC,gBAAoB,CAAC,CAAA;QAEtE,MAAM,IAAI,GAAmB;YAC3B,GAAG,EAAE,SAAS;YACd,SAAS,EAAE,IAAA,+BAAkB,GAAE;YAC/B,QAAQ,EAAE,SAAS;YACnB,KAAK,EAAE,SAAS;YAChB,OAAO,EAAE,WAAW;SACrB,CAAA;QAED,MAAM,MAAM,GAAG,IAAA,mBAAU,EAAC,KAAK,EAAE,IAAI,EAAE,oBAAK,EAAE,+BAAU,CAAC,CAAA;QAEzD,IAAI,iBAAiB,GAAG,CAAC,CAAA;QACzB,MAAM,EAAC,IAAI,EAAE,oBAAoB,EAAC,GAChC,MAAM,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,wBAAwB,CAAC;YACjD,KAAK,EAAE,eAAe;YACtB,IAAI,EAAE,cAAc;YACpB,MAAM,EAAE,aAAa;YACrB,QAAQ,EAAE,eAAe;YACzB,IAAI,EAAE,iBAAiB;SACxB,CAAC,CAAA;QAEJ,IAAI,aAAa,GAAG,IAAI,CAAC,IAAI,CAC3B,oBAAoB,CAAC,WAAW,GAAG,eAAe,CACnD,CAAA;QACD,MAAM,kBAAkB,GAAG,oBAAoB,CAAC,WAAW,CAAA;QAC3D,IAAI,kBAAkB,GAAG,oBAAoB,EAAE;YAC7C,IAAA,cAAO,EACL,gBAAgB,aAAa,+EAA+E,oBAAoB,6BAA6B,CAC9J,CAAA;YACD,aAAa,GAAG,gBAAgB,CAAA;SACjC;QAED,8BAA8B;QAC9B,KAAK,MAAM,QAAQ,IAAI,oBAAoB,CAAC,SAAS,EAAE;YACrD,SAAS,CAAC,IAAI,CAAC;gBACb,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,EAAE,EAAE,QAAQ,CAAC,EAAE;gBACf,IAAI,EAAE,QAAQ,CAAC,aAAa;gBAC5B,SAAS,EAAE,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,SAAS;aAC3E,CAAC,CAAA;SACH;QAED,mCAAmC;QACnC,KACE,iBAAiB,EACjB,iBAAiB,GAAG,aAAa,EACjC,iBAAiB,EAAE,EACnB;YACA,iBAAiB,EAAE,CAAA;YACnB,IAAA,YAAK,EAAC,iBAAiB,iBAAiB,mBAAmB,CAAC,CAAA;YAE5D,MAAM,EAAC,IAAI,EAAE,oBAAoB,EAAC,GAChC,MAAM,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,wBAAwB,CAAC;gBACjD,KAAK,EAAE,eAAe;gBACtB,IAAI,EAAE,cAAc;gBACpB,MAAM,EAAE,aAAa;gBACrB,QAAQ,EAAE,eAAe;gBACzB,IAAI,EAAE,iBAAiB;aACxB,CAAC,CAAA;YAEJ,KAAK,MAAM,QAAQ,IAAI,oBAAoB,CAAC,SAAS,EAAE;gBACrD,SAAS,CAAC,IAAI,CAAC;oBACb,IAAI,EAAE,QAAQ,CAAC,IAAI;oBACnB,EAAE,EAAE,QAAQ,CAAC,EAAE;oBACf,IAAI,EAAE,QAAQ,CAAC,aAAa;oBAC5B,SAAS,EAAE,QAAQ,CAAC,UAAU;wBAC5B,CAAC,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC;wBAC/B,CAAC,CAAC,SAAS;iBACd,CAAC,CAAA;aACH;SACF;QAED,IAAI,MAAM,EAAE;YACV,SAAS,GAAG,YAAY,CAAC,SAAS,CAAC,CAAA;SACpC;QAED,IAAA,WAAI,EAAC,SAAS,SAAS,CAAC,MAAM,cAAc,CAAC,CAAA;QAE7C,OAAO;YACL,SAAS;SACV,CAAA;IACH,CAAC;CAAA;AA9FD,kDA8FC;AAED,SAAsB,qBAAqB,CACzC,MAAM,GAAG,KAAK;;QAEd,MAAM,cAAc,GAAG,IAAA,mDAA2B,GAAE,CAAA;QAEpD,MAAM,EAAC,oBAAoB,EAAE,uBAAuB,EAAC,GACnD,IAAA,6BAAsB,GAAE,CAAA;QAE1B,MAAM,GAAG,GAAyB;YAChC,oBAAoB;YACpB,uBAAuB;SACxB,CAAA;QAED,MAAM,GAAG,GAAG,MAAM,cAAc,CAAC,aAAa,CAAC,GAAG,CAAC,CAAA;QACnD,IAAI,SAAS,GAAe,GAAG,CAAC,SAAS,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;YACzD,IAAI,EAAE,QAAQ,CAAC,IAAI;YACnB,EAAE,EAAE,MAAM,CAAC,QAAQ,CAAC,UAAU,CAAC;YAC/B,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC;YAC3B,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC3B,CAAC,CAAC,qBAAS,CAAC,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC;gBACtC,CAAC,CAAC,SAAS;SACd,CAAC,CAAC,CAAA;QAEH,IAAI,MAAM,EAAE;YACV,SAAS,GAAG,YAAY,CAAC,SAAS,CAAC,CAAA;SACpC;QAED,IAAA,WAAI,EAAC,SAAS,SAAS,CAAC,MAAM,cAAc,CAAC,CAAA;QAE7C,OAAO;YACL,SAAS;SACV,CAAA;IACH,CAAC;CAAA;AAhCD,sDAgCC;AAED;;;;GAIG;AACH,SAAS,YAAY,CAAC,SAAqB;IACzC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC,CAAA;IACrC,MAAM,eAAe,GAAe,EAAE,CAAA;IACtC,MAAM,iBAAiB,GAAG,IAAI,GAAG,EAAU,CAAA;IAC3C,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;QAChC,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;YACzC,eAAe,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;YAC9B,iBAAiB,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAA;SACrC;KACF;IACD,OAAO,eAAe,CAAA;AACxB,CAAC"}
|
||||
7
node_modules/@actions/artifact/lib/internal/find/retry-options.d.ts
generated
vendored
Normal file
7
node_modules/@actions/artifact/lib/internal/find/retry-options.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
import { OctokitOptions } from '@octokit/core/dist-types/types';
|
||||
import { RequestRequestOptions } from '@octokit/types';
|
||||
export type RetryOptions = {
|
||||
doNotRetry?: number[];
|
||||
enabled?: boolean;
|
||||
};
|
||||
export declare function getRetryOptions(defaultOptions: OctokitOptions, retries?: number, exemptStatusCodes?: number[]): [RetryOptions, RequestRequestOptions | undefined];
|
||||
50
node_modules/@actions/artifact/lib/internal/find/retry-options.js
generated
vendored
Normal file
50
node_modules/@actions/artifact/lib/internal/find/retry-options.js
generated
vendored
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getRetryOptions = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
// Defaults for fetching artifacts
|
||||
const defaultMaxRetryNumber = 5;
|
||||
const defaultExemptStatusCodes = [400, 401, 403, 404, 422]; // https://github.com/octokit/plugin-retry.js/blob/9a2443746c350b3beedec35cf26e197ea318a261/src/index.ts#L14
|
||||
function getRetryOptions(defaultOptions, retries = defaultMaxRetryNumber, exemptStatusCodes = defaultExemptStatusCodes) {
|
||||
var _a;
|
||||
if (retries <= 0) {
|
||||
return [{ enabled: false }, defaultOptions.request];
|
||||
}
|
||||
const retryOptions = {
|
||||
enabled: true
|
||||
};
|
||||
if (exemptStatusCodes.length > 0) {
|
||||
retryOptions.doNotRetry = exemptStatusCodes;
|
||||
}
|
||||
// The GitHub type has some defaults for `options.request`
|
||||
// see: https://github.com/actions/toolkit/blob/4fbc5c941a57249b19562015edbd72add14be93d/packages/github/src/utils.ts#L15
|
||||
// We pass these in here so they are not overridden.
|
||||
const requestOptions = Object.assign(Object.assign({}, defaultOptions.request), { retries });
|
||||
core.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : 'octokit default: [400, 401, 403, 404, 422]'})`);
|
||||
return [retryOptions, requestOptions];
|
||||
}
|
||||
exports.getRetryOptions = getRetryOptions;
|
||||
//# sourceMappingURL=retry-options.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/find/retry-options.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/internal/find/retry-options.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"retry-options.js","sourceRoot":"","sources":["../../../src/internal/find/retry-options.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AASrC,kCAAkC;AAClC,MAAM,qBAAqB,GAAG,CAAC,CAAA;AAC/B,MAAM,wBAAwB,GAAG,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAA,CAAC,4GAA4G;AAEvK,SAAgB,eAAe,CAC7B,cAA8B,EAC9B,UAAkB,qBAAqB,EACvC,oBAA8B,wBAAwB;;IAEtD,IAAI,OAAO,IAAI,CAAC,EAAE;QAChB,OAAO,CAAC,EAAC,OAAO,EAAE,KAAK,EAAC,EAAE,cAAc,CAAC,OAAO,CAAC,CAAA;KAClD;IAED,MAAM,YAAY,GAAiB;QACjC,OAAO,EAAE,IAAI;KACd,CAAA;IAED,IAAI,iBAAiB,CAAC,MAAM,GAAG,CAAC,EAAE;QAChC,YAAY,CAAC,UAAU,GAAG,iBAAiB,CAAA;KAC5C;IAED,0DAA0D;IAC1D,yHAAyH;IACzH,oDAAoD;IACpD,MAAM,cAAc,mCACf,cAAc,CAAC,OAAO,KACzB,OAAO,GACR,CAAA;IAED,IAAI,CAAC,KAAK,CACR,4CACE,cAAc,CAAC,OACjB,+BACE,MAAA,YAAY,CAAC,UAAU,mCAAI,4CAC7B,GAAG,CACJ,CAAA;IAED,OAAO,CAAC,YAAY,EAAE,cAAc,CAAC,CAAA;AACvC,CAAC;AAlCD,0CAkCC"}
|
||||
12
node_modules/@actions/artifact/lib/internal/http-manager.d.ts
generated
vendored
12
node_modules/@actions/artifact/lib/internal/http-manager.d.ts
generated
vendored
|
|
@ -1,12 +0,0 @@
|
|||
import { HttpClient } from '@actions/http-client';
|
||||
/**
|
||||
* Used for managing http clients during either upload or download
|
||||
*/
|
||||
export declare class HttpManager {
|
||||
private clients;
|
||||
private userAgent;
|
||||
constructor(clientCount: number, userAgent: string);
|
||||
getClient(index: number): HttpClient;
|
||||
disposeAndReplaceClient(index: number): void;
|
||||
disposeAndReplaceAllClients(): void;
|
||||
}
|
||||
32
node_modules/@actions/artifact/lib/internal/http-manager.js
generated
vendored
32
node_modules/@actions/artifact/lib/internal/http-manager.js
generated
vendored
|
|
@ -1,32 +0,0 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.HttpManager = void 0;
|
||||
const utils_1 = require("./utils");
|
||||
/**
|
||||
* Used for managing http clients during either upload or download
|
||||
*/
|
||||
class HttpManager {
|
||||
constructor(clientCount, userAgent) {
|
||||
if (clientCount < 1) {
|
||||
throw new Error('There must be at least one client');
|
||||
}
|
||||
this.userAgent = userAgent;
|
||||
this.clients = new Array(clientCount).fill((0, utils_1.createHttpClient)(userAgent));
|
||||
}
|
||||
getClient(index) {
|
||||
return this.clients[index];
|
||||
}
|
||||
// client disposal is necessary if a keep-alive connection is used to properly close the connection
|
||||
// for more information see: https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292
|
||||
disposeAndReplaceClient(index) {
|
||||
this.clients[index].dispose();
|
||||
this.clients[index] = (0, utils_1.createHttpClient)(this.userAgent);
|
||||
}
|
||||
disposeAndReplaceAllClients() {
|
||||
for (const [index] of this.clients.entries()) {
|
||||
this.disposeAndReplaceClient(index);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.HttpManager = HttpManager;
|
||||
//# sourceMappingURL=http-manager.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/http-manager.js.map
generated
vendored
1
node_modules/@actions/artifact/lib/internal/http-manager.js.map
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
{"version":3,"file":"http-manager.js","sourceRoot":"","sources":["../../src/internal/http-manager.ts"],"names":[],"mappings":";;;AACA,mCAAwC;AAExC;;GAEG;AACH,MAAa,WAAW;IAItB,YAAY,WAAmB,EAAE,SAAiB;QAChD,IAAI,WAAW,GAAG,CAAC,EAAE;YACnB,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAA;SACrD;QACD,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;QAC1B,IAAI,CAAC,OAAO,GAAG,IAAI,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,IAAA,wBAAgB,EAAC,SAAS,CAAC,CAAC,CAAA;IACzE,CAAC;IAED,SAAS,CAAC,KAAa;QACrB,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAA;IAC5B,CAAC;IAED,mGAAmG;IACnG,+HAA+H;IAC/H,uBAAuB,CAAC,KAAa;QACnC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CAAA;QAC7B,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,IAAA,wBAAgB,EAAC,IAAI,CAAC,SAAS,CAAC,CAAA;IACxD,CAAC;IAED,2BAA2B;QACzB,KAAK,MAAM,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE;YAC5C,IAAI,CAAC,uBAAuB,CAAC,KAAK,CAAC,CAAA;SACpC;IACH,CAAC;CACF;AA5BD,kCA4BC"}
|
||||
8
node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.d.ts
generated
vendored
8
node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.d.ts
generated
vendored
|
|
@ -1,8 +0,0 @@
|
|||
/**
|
||||
* Scans the name of the artifact to make sure there are no illegal characters
|
||||
*/
|
||||
export declare function checkArtifactName(name: string): void;
|
||||
/**
|
||||
* Scans the name of the filePath used to make sure there are no illegal characters
|
||||
*/
|
||||
export declare function checkArtifactFilePath(path: string): void;
|
||||
1
node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.js.map
generated
vendored
1
node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.js.map
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
{"version":3,"file":"path-and-artifact-name-validation.js","sourceRoot":"","sources":["../../src/internal/path-and-artifact-name-validation.ts"],"names":[],"mappings":";;;AAAA,wCAAkC;AAElC;;;;;;;GAOG;AACH,MAAM,iCAAiC,GAAG,IAAI,GAAG,CAAiB;IAChE,CAAC,GAAG,EAAE,iBAAiB,CAAC;IACxB,CAAC,GAAG,EAAE,UAAU,CAAC;IACjB,CAAC,GAAG,EAAE,cAAc,CAAC;IACrB,CAAC,GAAG,EAAE,iBAAiB,CAAC;IACxB,CAAC,GAAG,EAAE,iBAAiB,CAAC;IACxB,CAAC,GAAG,EAAE,aAAa,CAAC;IACpB,CAAC,GAAG,EAAE,kBAAkB,CAAC;IACzB,CAAC,IAAI,EAAE,sBAAsB,CAAC;IAC9B,CAAC,IAAI,EAAE,gBAAgB,CAAC;CACzB,CAAC,CAAA;AAEF,MAAM,6BAA6B,GAAG,IAAI,GAAG,CAAiB;IAC5D,GAAG,iCAAiC;IACpC,CAAC,IAAI,EAAE,eAAe,CAAC;IACvB,CAAC,GAAG,EAAE,kBAAkB,CAAC;CAC1B,CAAC,CAAA;AAEF;;GAEG;AACH,SAAgB,iBAAiB,CAAC,IAAY;IAC5C,IAAI,CAAC,IAAI,EAAE;QACT,MAAM,IAAI,KAAK,CAAC,kBAAkB,IAAI,2BAA2B,CAAC,CAAA;KACnE;IAED,KAAK,MAAM,CACT,mBAAmB,EACnB,wBAAwB,CACzB,IAAI,6BAA6B,EAAE;QAClC,IAAI,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,EAAE;YACtC,MAAM,IAAI,KAAK,CACb,+BAA+B,IAAI,uCAAuC,wBAAwB;;8BAE5E,KAAK,CAAC,IAAI,CAC9B,6BAA6B,CAAC,MAAM,EAAE,CACvC,CAAC,QAAQ,EAAE;;mRAE+P,CAC5Q,CAAA;SACF;KACF;IAED,IAAA,WAAI,EAAC,yBAAyB,CAAC,CAAA;AACjC,CAAC;AAvBD,8CAuBC;AAED;;GAEG;AACH,SAAgB,qBAAqB,CAAC,IAAY;IAChD,IAAI,CAAC,IAAI,EAAE;QACT,MAAM,IAAI,KAAK,CAAC,kBAAkB,IAAI,2BAA2B,CAAC,CAAA;KACnE;IAED,KAAK,MAAM,CACT,mBAAmB,EACnB,wBAAwB,CACzB,IAAI,iCAAiC,EAAE;QACtC,IAAI,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,EAAE;YACtC,MAAM,IAAI,KAAK,CACb,+BAA+B,IAAI,uCAAuC,wBAAwB;;8BAE5E,KAAK,CAAC,IAAI,CAC9B,iCAAiC,CAAC,MAAM,EAAE,CAC3C,CAAC,QAAQ,EAAE;;;WAGT,CACJ,CAAA;SACF;KACF;AACH,CAAC;AAtBD,sDAsBC"}
|
||||
3
node_modules/@actions/artifact/lib/internal/requestUtils.d.ts
generated
vendored
3
node_modules/@actions/artifact/lib/internal/requestUtils.d.ts
generated
vendored
|
|
@ -1,3 +0,0 @@
|
|||
import { HttpClientResponse } from '@actions/http-client';
|
||||
export declare function retry(name: string, operation: () => Promise<HttpClientResponse>, customErrorMessages: Map<number, string>, maxAttempts: number): Promise<HttpClientResponse>;
|
||||
export declare function retryHttpClientRequest(name: string, method: () => Promise<HttpClientResponse>, customErrorMessages?: Map<number, string>, maxAttempts?: number): Promise<HttpClientResponse>;
|
||||
92
node_modules/@actions/artifact/lib/internal/requestUtils.js
generated
vendored
92
node_modules/@actions/artifact/lib/internal/requestUtils.js
generated
vendored
|
|
@ -1,92 +0,0 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.retryHttpClientRequest = exports.retry = void 0;
|
||||
const utils_1 = require("./utils");
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const config_variables_1 = require("./config-variables");
|
||||
function retry(name, operation, customErrorMessages, maxAttempts) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let response = undefined;
|
||||
let statusCode = undefined;
|
||||
let isRetryable = false;
|
||||
let errorMessage = '';
|
||||
let customErrorInformation = undefined;
|
||||
let attempt = 1;
|
||||
while (attempt <= maxAttempts) {
|
||||
try {
|
||||
response = yield operation();
|
||||
statusCode = response.message.statusCode;
|
||||
if ((0, utils_1.isSuccessStatusCode)(statusCode)) {
|
||||
return response;
|
||||
}
|
||||
// Extra error information that we want to display if a particular response code is hit
|
||||
if (statusCode) {
|
||||
customErrorInformation = customErrorMessages.get(statusCode);
|
||||
}
|
||||
isRetryable = (0, utils_1.isRetryableStatusCode)(statusCode);
|
||||
errorMessage = `Artifact service responded with ${statusCode}`;
|
||||
}
|
||||
catch (error) {
|
||||
isRetryable = true;
|
||||
errorMessage = error.message;
|
||||
}
|
||||
if (!isRetryable) {
|
||||
core.info(`${name} - Error is not retryable`);
|
||||
if (response) {
|
||||
(0, utils_1.displayHttpDiagnostics)(response);
|
||||
}
|
||||
break;
|
||||
}
|
||||
core.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`);
|
||||
yield (0, utils_1.sleep)((0, utils_1.getExponentialRetryTimeInMilliseconds)(attempt));
|
||||
attempt++;
|
||||
}
|
||||
if (response) {
|
||||
(0, utils_1.displayHttpDiagnostics)(response);
|
||||
}
|
||||
if (customErrorInformation) {
|
||||
throw Error(`${name} failed: ${customErrorInformation}`);
|
||||
}
|
||||
throw Error(`${name} failed: ${errorMessage}`);
|
||||
});
|
||||
}
|
||||
exports.retry = retry;
|
||||
function retryHttpClientRequest(name, method, customErrorMessages = new Map(), maxAttempts = (0, config_variables_1.getRetryLimit)()) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return yield retry(name, method, customErrorMessages, maxAttempts);
|
||||
});
|
||||
}
|
||||
exports.retryHttpClientRequest = retryHttpClientRequest;
|
||||
//# sourceMappingURL=requestUtils.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/requestUtils.js.map
generated
vendored
1
node_modules/@actions/artifact/lib/internal/requestUtils.js.map
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
{"version":3,"file":"requestUtils.js","sourceRoot":"","sources":["../../src/internal/requestUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AACA,mCAMgB;AAChB,oDAAqC;AACrC,yDAAgD;AAEhD,SAAsB,KAAK,CACzB,IAAY,EACZ,SAA4C,EAC5C,mBAAwC,EACxC,WAAmB;;QAEnB,IAAI,QAAQ,GAAmC,SAAS,CAAA;QACxD,IAAI,UAAU,GAAuB,SAAS,CAAA;QAC9C,IAAI,WAAW,GAAG,KAAK,CAAA;QACvB,IAAI,YAAY,GAAG,EAAE,CAAA;QACrB,IAAI,sBAAsB,GAAuB,SAAS,CAAA;QAC1D,IAAI,OAAO,GAAG,CAAC,CAAA;QAEf,OAAO,OAAO,IAAI,WAAW,EAAE;YAC7B,IAAI;gBACF,QAAQ,GAAG,MAAM,SAAS,EAAE,CAAA;gBAC5B,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAA;gBAExC,IAAI,IAAA,2BAAmB,EAAC,UAAU,CAAC,EAAE;oBACnC,OAAO,QAAQ,CAAA;iBAChB;gBAED,uFAAuF;gBACvF,IAAI,UAAU,EAAE;oBACd,sBAAsB,GAAG,mBAAmB,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;iBAC7D;gBAED,WAAW,GAAG,IAAA,6BAAqB,EAAC,UAAU,CAAC,CAAA;gBAC/C,YAAY,GAAG,mCAAmC,UAAU,EAAE,CAAA;aAC/D;YAAC,OAAO,KAAK,EAAE;gBACd,WAAW,GAAG,IAAI,CAAA;gBAClB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAA;aAC7B;YAED,IAAI,CAAC,WAAW,EAAE;gBAChB,IAAI,CAAC,IAAI,CAAC,GAAG,IAAI,2BAA2B,CAAC,CAAA;gBAC7C,IAAI,QAAQ,EAAE;oBACZ,IAAA,8BAAsB,EAAC,QAAQ,CAAC,CAAA;iBACjC;gBACD,MAAK;aACN;YAED,IAAI,CAAC,IAAI,CACP,GAAG,IAAI,cAAc,OAAO,OAAO,WAAW,uBAAuB,YAAY,EAAE,CACpF,CAAA;YAED,MAAM,IAAA,aAAK,EAAC,IAAA,6CAAqC,EAAC,OAAO,CAAC,CAAC,CAAA;YAC3D,OAAO,EAAE,CAAA;SACV;QAED,IAAI,QAAQ,EAAE;YACZ,IAAA,8BAAsB,EAAC,QAAQ,CAAC,CAAA;SACjC;QAED,IAAI,sBAAsB,EAAE;YAC1B,MAAM,KAAK,CAAC,GAAG,IAAI,YAAY,sBAAsB,EAAE,CAAC,CAAA;SACzD;QACD,MAAM,KAAK,CAAC,GAAG,IAAI,YAAY,YAAY,EAAE,CAAC,CAAA;IAChD,CAAC;CAAA;AA1DD,sBA0DC;AAED,SAAsB,sBAAsB,CAC1C,IAAY,EACZ,MAAyC,EACzC,sBAA2C,IAAI,GAAG,EAAE,EACpD,WAAW,GAAG,IAAA,gCAAa,GAAE;;QAE7B,OAAO,MAAM,KAAK,CAAC,IAAI,EAAE,MAAM,EAAE,mBAAmB,EAAE,WAAW,CAAC,CAAA;IACpE,CAAC;CAAA;AAPD,wDAOC"}
|
||||
6
node_modules/@actions/artifact/lib/internal/shared/artifact-twirp-client.d.ts
generated
vendored
Normal file
6
node_modules/@actions/artifact/lib/internal/shared/artifact-twirp-client.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
import { ArtifactServiceClientJSON } from '../../generated';
|
||||
export declare function internalArtifactTwirpClient(options?: {
|
||||
maxAttempts?: number;
|
||||
retryIntervalMs?: number;
|
||||
retryMultiplier?: number;
|
||||
}): ArtifactServiceClientJSON;
|
||||
152
node_modules/@actions/artifact/lib/internal/shared/artifact-twirp-client.js
generated
vendored
Normal file
152
node_modules/@actions/artifact/lib/internal/shared/artifact-twirp-client.js
generated
vendored
Normal file
|
|
@ -0,0 +1,152 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.internalArtifactTwirpClient = void 0;
|
||||
const http_client_1 = require("@actions/http-client");
|
||||
const auth_1 = require("@actions/http-client/lib/auth");
|
||||
const core_1 = require("@actions/core");
|
||||
const generated_1 = require("../../generated");
|
||||
const config_1 = require("./config");
|
||||
const user_agent_1 = require("./user-agent");
|
||||
const errors_1 = require("./errors");
|
||||
class ArtifactHttpClient {
|
||||
constructor(userAgent, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) {
|
||||
this.maxAttempts = 5;
|
||||
this.baseRetryIntervalMilliseconds = 3000;
|
||||
this.retryMultiplier = 1.5;
|
||||
const token = (0, config_1.getRuntimeToken)();
|
||||
this.baseUrl = (0, config_1.getResultsServiceUrl)();
|
||||
if (maxAttempts) {
|
||||
this.maxAttempts = maxAttempts;
|
||||
}
|
||||
if (baseRetryIntervalMilliseconds) {
|
||||
this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds;
|
||||
}
|
||||
if (retryMultiplier) {
|
||||
this.retryMultiplier = retryMultiplier;
|
||||
}
|
||||
this.httpClient = new http_client_1.HttpClient(userAgent, [
|
||||
new auth_1.BearerCredentialHandler(token)
|
||||
]);
|
||||
}
|
||||
// This function satisfies the Rpc interface. It is compatible with the JSON
|
||||
// JSON generated client.
|
||||
request(service, method, contentType, data) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href;
|
||||
(0, core_1.debug)(`[Request] ${method} ${url}`);
|
||||
const headers = {
|
||||
'Content-Type': contentType
|
||||
};
|
||||
try {
|
||||
const { body } = yield this.retryableRequest(() => __awaiter(this, void 0, void 0, function* () { return this.httpClient.post(url, JSON.stringify(data), headers); }));
|
||||
return body;
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error(`Failed to ${method}: ${error.message}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
retryableRequest(operation) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let attempt = 0;
|
||||
let errorMessage = '';
|
||||
let rawBody = '';
|
||||
while (attempt < this.maxAttempts) {
|
||||
let isRetryable = false;
|
||||
try {
|
||||
const response = yield operation();
|
||||
const statusCode = response.message.statusCode;
|
||||
rawBody = yield response.readBody();
|
||||
(0, core_1.debug)(`[Response] - ${response.message.statusCode}`);
|
||||
(0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`);
|
||||
const body = JSON.parse(rawBody);
|
||||
(0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`);
|
||||
if (this.isSuccessStatusCode(statusCode)) {
|
||||
return { response, body };
|
||||
}
|
||||
isRetryable = this.isRetryableHttpStatusCode(statusCode);
|
||||
errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`;
|
||||
if (body.msg) {
|
||||
if (errors_1.UsageError.isUsageErrorMessage(body.msg)) {
|
||||
throw new errors_1.UsageError();
|
||||
}
|
||||
errorMessage = `${errorMessage}: ${body.msg}`;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
(0, core_1.debug)(`Raw Body: ${rawBody}`);
|
||||
}
|
||||
if (error instanceof errors_1.UsageError) {
|
||||
throw error;
|
||||
}
|
||||
if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) {
|
||||
throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code);
|
||||
}
|
||||
isRetryable = true;
|
||||
errorMessage = error.message;
|
||||
}
|
||||
if (!isRetryable) {
|
||||
throw new Error(`Received non-retryable error: ${errorMessage}`);
|
||||
}
|
||||
if (attempt + 1 === this.maxAttempts) {
|
||||
throw new Error(`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`);
|
||||
}
|
||||
const retryTimeMilliseconds = this.getExponentialRetryTimeMilliseconds(attempt);
|
||||
(0, core_1.info)(`Attempt ${attempt + 1} of ${this.maxAttempts} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`);
|
||||
yield this.sleep(retryTimeMilliseconds);
|
||||
attempt++;
|
||||
}
|
||||
throw new Error(`Request failed`);
|
||||
});
|
||||
}
|
||||
isSuccessStatusCode(statusCode) {
|
||||
if (!statusCode)
|
||||
return false;
|
||||
return statusCode >= 200 && statusCode < 300;
|
||||
}
|
||||
isRetryableHttpStatusCode(statusCode) {
|
||||
if (!statusCode)
|
||||
return false;
|
||||
const retryableStatusCodes = [
|
||||
http_client_1.HttpCodes.BadGateway,
|
||||
http_client_1.HttpCodes.GatewayTimeout,
|
||||
http_client_1.HttpCodes.InternalServerError,
|
||||
http_client_1.HttpCodes.ServiceUnavailable,
|
||||
http_client_1.HttpCodes.TooManyRequests
|
||||
];
|
||||
return retryableStatusCodes.includes(statusCode);
|
||||
}
|
||||
sleep(milliseconds) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise(resolve => setTimeout(resolve, milliseconds));
|
||||
});
|
||||
}
|
||||
getExponentialRetryTimeMilliseconds(attempt) {
|
||||
if (attempt < 0) {
|
||||
throw new Error('attempt should be a positive integer');
|
||||
}
|
||||
if (attempt === 0) {
|
||||
return this.baseRetryIntervalMilliseconds;
|
||||
}
|
||||
const minTime = this.baseRetryIntervalMilliseconds * Math.pow(this.retryMultiplier, attempt);
|
||||
const maxTime = minTime * this.retryMultiplier;
|
||||
// returns a random number between minTime and maxTime (exclusive)
|
||||
return Math.trunc(Math.random() * (maxTime - minTime) + minTime);
|
||||
}
|
||||
}
|
||||
function internalArtifactTwirpClient(options) {
|
||||
const client = new ArtifactHttpClient((0, user_agent_1.getUserAgentString)(), options === null || options === void 0 ? void 0 : options.maxAttempts, options === null || options === void 0 ? void 0 : options.retryIntervalMs, options === null || options === void 0 ? void 0 : options.retryMultiplier);
|
||||
return new generated_1.ArtifactServiceClientJSON(client);
|
||||
}
|
||||
exports.internalArtifactTwirpClient = internalArtifactTwirpClient;
|
||||
//# sourceMappingURL=artifact-twirp-client.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/shared/artifact-twirp-client.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/internal/shared/artifact-twirp-client.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"artifact-twirp-client.js","sourceRoot":"","sources":["../../../src/internal/shared/artifact-twirp-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,sDAA8E;AAC9E,wDAAqE;AACrE,wCAAyC;AACzC,+CAAyD;AACzD,qCAA8D;AAC9D,6CAA+C;AAC/C,qCAAiD;AAYjD,MAAM,kBAAkB;IAOtB,YACE,SAAiB,EACjB,WAAoB,EACpB,6BAAsC,EACtC,eAAwB;QARlB,gBAAW,GAAG,CAAC,CAAA;QACf,kCAA6B,GAAG,IAAI,CAAA;QACpC,oBAAe,GAAG,GAAG,CAAA;QAQ3B,MAAM,KAAK,GAAG,IAAA,wBAAe,GAAE,CAAA;QAC/B,IAAI,CAAC,OAAO,GAAG,IAAA,6BAAoB,GAAE,CAAA;QACrC,IAAI,WAAW,EAAE;YACf,IAAI,CAAC,WAAW,GAAG,WAAW,CAAA;SAC/B;QACD,IAAI,6BAA6B,EAAE;YACjC,IAAI,CAAC,6BAA6B,GAAG,6BAA6B,CAAA;SACnE;QACD,IAAI,eAAe,EAAE;YACnB,IAAI,CAAC,eAAe,GAAG,eAAe,CAAA;SACvC;QAED,IAAI,CAAC,UAAU,GAAG,IAAI,wBAAU,CAAC,SAAS,EAAE;YAC1C,IAAI,8BAAuB,CAAC,KAAK,CAAC;SACnC,CAAC,CAAA;IACJ,CAAC;IAED,4EAA4E;IAC5E,yBAAyB;IACnB,OAAO,CACX,OAAe,EACf,MAAc,EACd,WAAwD,EACxD,IAAyB;;YAEzB,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,UAAU,OAAO,IAAI,MAAM,EAAE,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC,IAAI,CAAA;YACrE,IAAA,YAAK,EAAC,aAAa,MAAM,IAAI,GAAG,EAAE,CAAC,CAAA;YACnC,MAAM,OAAO,GAAG;gBACd,cAAc,EAAE,WAAW;aAC5B,CAAA;YACD,IAAI;gBACF,MAAM,EAAC,IAAI,EAAC,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,GAAS,EAAE,gDACpD,OAAA,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,OAAO,CAAC,CAAA,GAAA,CACzD,CAAA;gBAED,OAAO,IAAI,CAAA;aACZ;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,aAAa,MAAM,KAAK,KAAK,CAAC,OAAO,EAAE,CAAC,CAAA;aACzD;QACH,CAAC;KAAA;IAEK,gBAAgB,CACpB,SAA4C;;YAE5C,IAAI,OAAO,GAAG,CAAC,CAAA;YACf,IAAI,YAAY,GAAG,EAAE,CAAA;YACrB,IAAI,OAAO,GAAG,EAAE,CAAA;YAChB,OAAO,OAAO,GAAG,IAAI,CAAC,WAAW,EAAE;gBACjC,IAAI,WAAW,GAAG,KAAK,CAAA;gBAEvB,IAAI;oBACF,MAAM,QAAQ,GAAG,MAAM,SAAS,EAAE,CAAA;oBAClC,MAAM,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAA;oBAC9C,OAAO,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBACnC,IAAA,YAAK,EAAC,gBAAgB,QAAQ,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;oBACpD,IAAA,YAAK,EAAC,YAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAA;oBACtE,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;oBAChC,IAAA,YAAK,EAAC,SAAS,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAA;oBAC/C,IAAI,IAAI,CAAC,mBAAmB,CAAC,UAAU,CAAC,EAAE;wBACxC,OAAO,EAAC,QAAQ,EAAE,IAAI,EAAC,CAAA;qBACxB;oBACD,WAAW,GAAG,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,CAAA;oBACxD,YAAY,GAAG,oBAAoB,UAAU,KAAK,QAAQ,CAAC,OAAO,CAAC,aAAa,EAAE,CAAA;oBAClF,IAAI,IAAI,CAAC,GAAG,EAAE;wBACZ,IAAI,mBAAU,CAAC,mBAAmB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;4BAC5C,MAAM,IAAI,mBAAU,EAAE,CAAA;yBACvB;wBAED,YAAY,GAAG,GAAG,YAAY,KAAK,IAAI,CAAC,GAAG,EAAE,CAAA;qBAC9C;iBACF;gBAAC,OAAO,KAAK,EAAE;oBACd,IAAI,KAAK,YAAY,WAAW,EAAE;wBAChC,IAAA,YAAK,EAAC,aAAa,OAAO,EAAE,CAAC,CAAA;qBAC9B;oBAED,IAAI,KAAK,YAAY,mBAAU,EAAE;wBAC/B,MAAM,KAAK,CAAA;qBACZ;oBAED,IAAI,qBAAY,CAAC,kBAAkB,CAAC,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,IAAI,CAAC,EAAE;wBAChD,MAAM,IAAI,qBAAY,CAAC,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,IAAI,CAAC,CAAA;qBACpC;oBAED,WAAW,GAAG,IAAI,CAAA;oBAClB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAA;iBAC7B;gBAED,IAAI,CAAC,WAAW,EAAE;oBAChB,MAAM,IAAI,KAAK,CAAC,iCAAiC,YAAY,EAAE,CAAC,CAAA;iBACjE;gBAED,IAAI,OAAO,GAAG,CAAC,KAAK,IAAI,CAAC,WAAW,EAAE;oBACpC,MAAM,IAAI,KAAK,CACb,gCAAgC,IAAI,CAAC,WAAW,cAAc,YAAY,EAAE,CAC7E,CAAA;iBACF;gBAED,MAAM,qBAAqB,GACzB,IAAI,CAAC,mCAAmC,CAAC,OAAO,CAAC,CAAA;gBACnD,IAAA,WAAI,EACF,WAAW,OAAO,GAAG,CAAC,OACpB,IAAI,CAAC,WACP,uBAAuB,YAAY,yBAAyB,qBAAqB,QAAQ,CAC1F,CAAA;gBACD,MAAM,IAAI,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAA;gBACvC,OAAO,EAAE,CAAA;aACV;YAED,MAAM,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAA;QACnC,CAAC;KAAA;IAED,mBAAmB,CAAC,UAAmB;QACrC,IAAI,CAAC,UAAU;YAAE,OAAO,KAAK,CAAA;QAC7B,OAAO,UAAU,IAAI,GAAG,IAAI,UAAU,GAAG,GAAG,CAAA;IAC9C,CAAC;IAED,yBAAyB,CAAC,UAAmB;QAC3C,IAAI,CAAC,UAAU;YAAE,OAAO,KAAK,CAAA;QAE7B,MAAM,oBAAoB,GAAG;YAC3B,uBAAS,CAAC,UAAU;YACpB,uBAAS,CAAC,cAAc;YACxB,uBAAS,CAAC,mBAAmB;YAC7B,uBAAS,CAAC,kBAAkB;YAC5B,uBAAS,CAAC,eAAe;SAC1B,CAAA;QAED,OAAO,oBAAoB,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAA;IAClD,CAAC;IAEK,KAAK,CAAC,YAAoB;;YAC9B,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAA;QAClE,CAAC;KAAA;IAED,mCAAmC,CAAC,OAAe;QACjD,IAAI,OAAO,GAAG,CAAC,EAAE;YACf,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAA;SACxD;QAED,IAAI,OAAO,KAAK,CAAC,EAAE;YACjB,OAAO,IAAI,CAAC,6BAA6B,CAAA;SAC1C;QAED,MAAM,OAAO,GACX,IAAI,CAAC,6BAA6B,GAAG,SAAA,IAAI,CAAC,eAAe,EAAI,OAAO,CAAA,CAAA;QACtE,MAAM,OAAO,GAAG,OAAO,GAAG,IAAI,CAAC,eAAe,CAAA;QAE9C,kEAAkE;QAClE,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,OAAO,GAAG,OAAO,CAAC,GAAG,OAAO,CAAC,CAAA;IAClE,CAAC;CACF;AAED,SAAgB,2BAA2B,CAAC,OAI3C;IACC,MAAM,MAAM,GAAG,IAAI,kBAAkB,CACnC,IAAA,+BAAkB,GAAE,EACpB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,EACpB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,eAAe,EACxB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,eAAe,CACzB,CAAA;IACD,OAAO,IAAI,qCAAyB,CAAC,MAAM,CAAC,CAAA;AAC9C,CAAC;AAZD,kEAYC"}
|
||||
7
node_modules/@actions/artifact/lib/internal/shared/config.d.ts
generated
vendored
Normal file
7
node_modules/@actions/artifact/lib/internal/shared/config.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
export declare function getUploadChunkSize(): number;
|
||||
export declare function getRuntimeToken(): string;
|
||||
export declare function getResultsServiceUrl(): string;
|
||||
export declare function isGhes(): boolean;
|
||||
export declare function getGitHubWorkspaceDir(): string;
|
||||
export declare function getConcurrency(): number;
|
||||
export declare function getUploadChunkTimeout(): number;
|
||||
63
node_modules/@actions/artifact/lib/internal/shared/config.js
generated
vendored
Normal file
63
node_modules/@actions/artifact/lib/internal/shared/config.js
generated
vendored
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getUploadChunkTimeout = exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
|
||||
const os_1 = __importDefault(require("os"));
|
||||
// Used for controlling the highWaterMark value of the zip that is being streamed
|
||||
// The same value is used as the chunk size that is use during upload to blob storage
|
||||
function getUploadChunkSize() {
|
||||
return 8 * 1024 * 1024; // 8 MB Chunks
|
||||
}
|
||||
exports.getUploadChunkSize = getUploadChunkSize;
|
||||
function getRuntimeToken() {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN'];
|
||||
if (!token) {
|
||||
throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable');
|
||||
}
|
||||
return token;
|
||||
}
|
||||
exports.getRuntimeToken = getRuntimeToken;
|
||||
function getResultsServiceUrl() {
|
||||
const resultsUrl = process.env['ACTIONS_RESULTS_URL'];
|
||||
if (!resultsUrl) {
|
||||
throw new Error('Unable to get the ACTIONS_RESULTS_URL env variable');
|
||||
}
|
||||
return new URL(resultsUrl).origin;
|
||||
}
|
||||
exports.getResultsServiceUrl = getResultsServiceUrl;
|
||||
function isGhes() {
|
||||
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
|
||||
const isGitHubHost = hostname === 'GITHUB.COM';
|
||||
const isGheHost = hostname.endsWith('.GHE.COM');
|
||||
const isLocalHost = hostname.endsWith('.LOCALHOST');
|
||||
return !isGitHubHost && !isGheHost && !isLocalHost;
|
||||
}
|
||||
exports.isGhes = isGhes;
|
||||
function getGitHubWorkspaceDir() {
|
||||
const ghWorkspaceDir = process.env['GITHUB_WORKSPACE'];
|
||||
if (!ghWorkspaceDir) {
|
||||
throw new Error('Unable to get the GITHUB_WORKSPACE env variable');
|
||||
}
|
||||
return ghWorkspaceDir;
|
||||
}
|
||||
exports.getGitHubWorkspaceDir = getGitHubWorkspaceDir;
|
||||
// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize
|
||||
// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32.
|
||||
// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300.
|
||||
function getConcurrency() {
|
||||
const numCPUs = os_1.default.cpus().length;
|
||||
if (numCPUs <= 4) {
|
||||
return 32;
|
||||
}
|
||||
const concurrency = 16 * numCPUs;
|
||||
return concurrency > 300 ? 300 : concurrency;
|
||||
}
|
||||
exports.getConcurrency = getConcurrency;
|
||||
function getUploadChunkTimeout() {
|
||||
return 30000; // 30 seconds
|
||||
}
|
||||
exports.getUploadChunkTimeout = getUploadChunkTimeout;
|
||||
//# sourceMappingURL=config.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/shared/config.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/internal/shared/config.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"config.js","sourceRoot":"","sources":["../../../src/internal/shared/config.ts"],"names":[],"mappings":";;;;;;AAAA,4CAAmB;AAEnB,iFAAiF;AACjF,qFAAqF;AACrF,SAAgB,kBAAkB;IAChC,OAAO,CAAC,GAAG,IAAI,GAAG,IAAI,CAAA,CAAC,cAAc;AACvC,CAAC;AAFD,gDAEC;AAED,SAAgB,eAAe;IAC7B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,EAAE;QACV,MAAM,IAAI,KAAK,CAAC,sDAAsD,CAAC,CAAA;KACxE;IACD,OAAO,KAAK,CAAA;AACd,CAAC;AAND,0CAMC;AAED,SAAgB,oBAAoB;IAClC,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC,CAAA;IACrD,IAAI,CAAC,UAAU,EAAE;QACf,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAA;KACtE;IAED,OAAO,IAAI,GAAG,CAAC,UAAU,CAAC,CAAC,MAAM,CAAA;AACnC,CAAC;AAPD,oDAOC;AAED,SAAgB,MAAM;IACpB,MAAM,KAAK,GAAG,IAAI,GAAG,CACnB,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,oBAAoB,CACzD,CAAA;IAED,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,WAAW,EAAE,CAAA;IACvD,MAAM,YAAY,GAAG,QAAQ,KAAK,YAAY,CAAA;IAC9C,MAAM,SAAS,GAAG,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAA;IAC/C,MAAM,WAAW,GAAG,QAAQ,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAA;IAEnD,OAAO,CAAC,YAAY,IAAI,CAAC,SAAS,IAAI,CAAC,WAAW,CAAA;AACpD,CAAC;AAXD,wBAWC;AAED,SAAgB,qBAAqB;IACnC,MAAM,cAAc,GAAG,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAA;IACtD,IAAI,CAAC,cAAc,EAAE;QACnB,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC,CAAA;KACnE;IACD,OAAO,cAAc,CAAA;AACvB,CAAC;AAND,sDAMC;AAED,gHAAgH;AAChH,uFAAuF;AACvF,2HAA2H;AAC3H,SAAgB,cAAc;IAC5B,MAAM,OAAO,GAAG,YAAE,CAAC,IAAI,EAAE,CAAC,MAAM,CAAA;IAEhC,IAAI,OAAO,IAAI,CAAC,EAAE;QAChB,OAAO,EAAE,CAAA;KACV;IAED,MAAM,WAAW,GAAG,EAAE,GAAG,OAAO,CAAA;IAChC,OAAO,WAAW,GAAG,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,CAAA;AAC9C,CAAC;AATD,wCASC;AAED,SAAgB,qBAAqB;IACnC,OAAO,KAAM,CAAA,CAAC,aAAa;AAC7B,CAAC;AAFD,sDAEC"}
|
||||
22
node_modules/@actions/artifact/lib/internal/shared/errors.d.ts
generated
vendored
Normal file
22
node_modules/@actions/artifact/lib/internal/shared/errors.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
export declare class FilesNotFoundError extends Error {
|
||||
files: string[];
|
||||
constructor(files?: string[]);
|
||||
}
|
||||
export declare class InvalidResponseError extends Error {
|
||||
constructor(message: string);
|
||||
}
|
||||
export declare class ArtifactNotFoundError extends Error {
|
||||
constructor(message?: string);
|
||||
}
|
||||
export declare class GHESNotSupportedError extends Error {
|
||||
constructor(message?: string);
|
||||
}
|
||||
export declare class NetworkError extends Error {
|
||||
code: string;
|
||||
constructor(code: string);
|
||||
static isNetworkErrorCode: (code?: string) => boolean;
|
||||
}
|
||||
export declare class UsageError extends Error {
|
||||
constructor();
|
||||
static isUsageErrorMessage: (msg?: string) => boolean;
|
||||
}
|
||||
70
node_modules/@actions/artifact/lib/internal/shared/errors.js
generated
vendored
Normal file
70
node_modules/@actions/artifact/lib/internal/shared/errors.js
generated
vendored
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.UsageError = exports.NetworkError = exports.GHESNotSupportedError = exports.ArtifactNotFoundError = exports.InvalidResponseError = exports.FilesNotFoundError = void 0;
|
||||
class FilesNotFoundError extends Error {
|
||||
constructor(files = []) {
|
||||
let message = 'No files were found to upload';
|
||||
if (files.length > 0) {
|
||||
message += `: ${files.join(', ')}`;
|
||||
}
|
||||
super(message);
|
||||
this.files = files;
|
||||
this.name = 'FilesNotFoundError';
|
||||
}
|
||||
}
|
||||
exports.FilesNotFoundError = FilesNotFoundError;
|
||||
class InvalidResponseError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = 'InvalidResponseError';
|
||||
}
|
||||
}
|
||||
exports.InvalidResponseError = InvalidResponseError;
|
||||
class ArtifactNotFoundError extends Error {
|
||||
constructor(message = 'Artifact not found') {
|
||||
super(message);
|
||||
this.name = 'ArtifactNotFoundError';
|
||||
}
|
||||
}
|
||||
exports.ArtifactNotFoundError = ArtifactNotFoundError;
|
||||
class GHESNotSupportedError extends Error {
|
||||
constructor(message = '@actions/artifact v2.0.0+, upload-artifact@v4+ and download-artifact@v4+ are not currently supported on GHES.') {
|
||||
super(message);
|
||||
this.name = 'GHESNotSupportedError';
|
||||
}
|
||||
}
|
||||
exports.GHESNotSupportedError = GHESNotSupportedError;
|
||||
class NetworkError extends Error {
|
||||
constructor(code) {
|
||||
const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`;
|
||||
super(message);
|
||||
this.code = code;
|
||||
this.name = 'NetworkError';
|
||||
}
|
||||
}
|
||||
exports.NetworkError = NetworkError;
|
||||
NetworkError.isNetworkErrorCode = (code) => {
|
||||
if (!code)
|
||||
return false;
|
||||
return [
|
||||
'ECONNRESET',
|
||||
'ENOTFOUND',
|
||||
'ETIMEDOUT',
|
||||
'ECONNREFUSED',
|
||||
'EHOSTUNREACH'
|
||||
].includes(code);
|
||||
};
|
||||
class UsageError extends Error {
|
||||
constructor() {
|
||||
const message = `Artifact storage quota has been hit. Unable to upload any new artifacts. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`;
|
||||
super(message);
|
||||
this.name = 'UsageError';
|
||||
}
|
||||
}
|
||||
exports.UsageError = UsageError;
|
||||
UsageError.isUsageErrorMessage = (msg) => {
|
||||
if (!msg)
|
||||
return false;
|
||||
return msg.includes('insufficient usage');
|
||||
};
|
||||
//# sourceMappingURL=errors.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/shared/errors.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/internal/shared/errors.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"errors.js","sourceRoot":"","sources":["../../../src/internal/shared/errors.ts"],"names":[],"mappings":";;;AAAA,MAAa,kBAAmB,SAAQ,KAAK;IAG3C,YAAY,QAAkB,EAAE;QAC9B,IAAI,OAAO,GAAG,+BAA+B,CAAA;QAC7C,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;YACpB,OAAO,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAA;SACnC;QAED,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;QAClB,IAAI,CAAC,IAAI,GAAG,oBAAoB,CAAA;IAClC,CAAC;CACF;AAbD,gDAaC;AAED,MAAa,oBAAqB,SAAQ,KAAK;IAC7C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,sBAAsB,CAAA;IACpC,CAAC;CACF;AALD,oDAKC;AAED,MAAa,qBAAsB,SAAQ,KAAK;IAC9C,YAAY,OAAO,GAAG,oBAAoB;QACxC,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,uBAAuB,CAAA;IACrC,CAAC;CACF;AALD,sDAKC;AAED,MAAa,qBAAsB,SAAQ,KAAK;IAC9C,YACE,OAAO,GAAG,+GAA+G;QAEzH,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,uBAAuB,CAAA;IACrC,CAAC;CACF;AAPD,sDAOC;AAED,MAAa,YAAa,SAAQ,KAAK;IAGrC,YAAY,IAAY;QACtB,MAAM,OAAO,GAAG,2BAA2B,IAAI,kRAAkR,CAAA;QACjU,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,IAAI,GAAG,cAAc,CAAA;IAC5B,CAAC;;AARH,oCAoBC;AAVQ,+BAAkB,GAAG,CAAC,IAAa,EAAW,EAAE;IACrD,IAAI,CAAC,IAAI;QAAE,OAAO,KAAK,CAAA;IACvB,OAAO;QACL,YAAY;QACZ,WAAW;QACX,WAAW;QACX,cAAc;QACd,cAAc;KACf,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAA;AAClB,CAAC,CAAA;AAGH,MAAa,UAAW,SAAQ,KAAK;IACnC;QACE,MAAM,OAAO,GAAG,gSAAgS,CAAA;QAChT,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,YAAY,CAAA;IAC1B,CAAC;;AALH,gCAWC;AAJQ,8BAAmB,GAAG,CAAC,GAAY,EAAW,EAAE;IACrD,IAAI,CAAC,GAAG;QAAE,OAAO,KAAK,CAAA;IACtB,OAAO,GAAG,CAAC,QAAQ,CAAC,oBAAoB,CAAC,CAAA;AAC3C,CAAC,CAAA"}
|
||||
145
node_modules/@actions/artifact/lib/internal/shared/interfaces.d.ts
generated
vendored
Normal file
145
node_modules/@actions/artifact/lib/internal/shared/interfaces.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
/**
|
||||
* Response from the server when an artifact is uploaded
|
||||
*/
|
||||
export interface UploadArtifactResponse {
|
||||
/**
|
||||
* Total size of the artifact in bytes. Not provided if no artifact was uploaded
|
||||
*/
|
||||
size?: number;
|
||||
/**
|
||||
* The id of the artifact that was created. Not provided if no artifact was uploaded
|
||||
* This ID can be used as input to other APIs to download, delete or get more information about an artifact: https://docs.github.com/en/rest/actions/artifacts
|
||||
*/
|
||||
id?: number;
|
||||
}
|
||||
/**
|
||||
* Options for uploading an artifact
|
||||
*/
|
||||
export interface UploadArtifactOptions {
|
||||
/**
|
||||
* Duration after which artifact will expire in days.
|
||||
*
|
||||
* By default artifact expires after 90 days:
|
||||
* https://docs.github.com/en/actions/configuring-and-managing-workflows/persisting-workflow-data-using-artifacts#downloading-and-deleting-artifacts-after-a-workflow-run-is-complete
|
||||
*
|
||||
* Use this option to override the default expiry.
|
||||
*
|
||||
* Min value: 1
|
||||
* Max value: 90 unless changed by repository setting
|
||||
*
|
||||
* If this is set to a greater value than the retention settings allowed, the retention on artifacts
|
||||
* will be reduced to match the max value allowed on server, and the upload process will continue. An
|
||||
* input of 0 assumes default retention setting.
|
||||
*/
|
||||
retentionDays?: number;
|
||||
/**
|
||||
* The level of compression for Zlib to be applied to the artifact archive.
|
||||
* The value can range from 0 to 9:
|
||||
* - 0: No compression
|
||||
* - 1: Best speed
|
||||
* - 6: Default compression (same as GNU Gzip)
|
||||
* - 9: Best compression
|
||||
* Higher levels will result in better compression, but will take longer to complete.
|
||||
* For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
|
||||
*/
|
||||
compressionLevel?: number;
|
||||
}
|
||||
/**
|
||||
* Response from the server when getting an artifact
|
||||
*/
|
||||
export interface GetArtifactResponse {
|
||||
/**
|
||||
* Metadata about the artifact that was found
|
||||
*/
|
||||
artifact: Artifact;
|
||||
}
|
||||
/**
|
||||
* Options for listing artifacts
|
||||
*/
|
||||
export interface ListArtifactsOptions {
|
||||
/**
|
||||
* Filter the workflow run's artifacts to the latest by name
|
||||
* In the case of reruns, this can be useful to avoid duplicates
|
||||
*/
|
||||
latest?: boolean;
|
||||
}
|
||||
/**
|
||||
* Response from the server when listing artifacts
|
||||
*/
|
||||
export interface ListArtifactsResponse {
|
||||
/**
|
||||
* A list of artifacts that were found
|
||||
*/
|
||||
artifacts: Artifact[];
|
||||
}
|
||||
/**
|
||||
* Response from the server when downloading an artifact
|
||||
*/
|
||||
export interface DownloadArtifactResponse {
|
||||
/**
|
||||
* The path where the artifact was downloaded to
|
||||
*/
|
||||
downloadPath?: string;
|
||||
}
|
||||
/**
|
||||
* Options for downloading an artifact
|
||||
*/
|
||||
export interface DownloadArtifactOptions {
|
||||
/**
|
||||
* Denotes where the artifact will be downloaded to. If not specified then the artifact is download to GITHUB_WORKSPACE
|
||||
*/
|
||||
path?: string;
|
||||
}
|
||||
/**
|
||||
* An Actions Artifact
|
||||
*/
|
||||
export interface Artifact {
|
||||
/**
|
||||
* The name of the artifact
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* The ID of the artifact
|
||||
*/
|
||||
id: number;
|
||||
/**
|
||||
* The size of the artifact in bytes
|
||||
*/
|
||||
size: number;
|
||||
/**
|
||||
* The time when the artifact was created
|
||||
*/
|
||||
createdAt?: Date;
|
||||
}
|
||||
export interface FindOptions {
|
||||
/**
|
||||
* The criteria for finding Artifact(s) out of the scope of the current run.
|
||||
*/
|
||||
findBy?: {
|
||||
/**
|
||||
* Token with actions:read permissions
|
||||
*/
|
||||
token: string;
|
||||
/**
|
||||
* WorkflowRun of the artifact(s) to lookup
|
||||
*/
|
||||
workflowRunId: number;
|
||||
/**
|
||||
* Repository owner (eg. 'actions')
|
||||
*/
|
||||
repositoryOwner: string;
|
||||
/**
|
||||
* Repository owner (eg. 'toolkit')
|
||||
*/
|
||||
repositoryName: string;
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Response from the server when deleting an artifact
|
||||
*/
|
||||
export interface DeleteArtifactResponse {
|
||||
/**
|
||||
* The id of the artifact that was deleted
|
||||
*/
|
||||
id: number;
|
||||
}
|
||||
|
|
@ -1,3 +1,3 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=contracts.js.map
|
||||
//# sourceMappingURL=interfaces.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/shared/interfaces.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/internal/shared/interfaces.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../../../src/internal/shared/interfaces.ts"],"names":[],"mappings":""}
|
||||
4
node_modules/@actions/artifact/lib/internal/shared/user-agent.d.ts
generated
vendored
Normal file
4
node_modules/@actions/artifact/lib/internal/shared/user-agent.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
|
||||
*/
|
||||
export declare function getUserAgentString(): string;
|
||||
13
node_modules/@actions/artifact/lib/internal/shared/user-agent.js
generated
vendored
Normal file
13
node_modules/@actions/artifact/lib/internal/shared/user-agent.js
generated
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getUserAgentString = void 0;
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports
|
||||
const packageJson = require('../../../package.json');
|
||||
/**
|
||||
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
|
||||
*/
|
||||
function getUserAgentString() {
|
||||
return `@actions/artifact-${packageJson.version}`;
|
||||
}
|
||||
exports.getUserAgentString = getUserAgentString;
|
||||
//# sourceMappingURL=user-agent.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/shared/user-agent.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/internal/shared/user-agent.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"user-agent.js","sourceRoot":"","sources":["../../../src/internal/shared/user-agent.ts"],"names":[],"mappings":";;;AAAA,qGAAqG;AACrG,MAAM,WAAW,GAAG,OAAO,CAAC,uBAAuB,CAAC,CAAA;AAEpD;;GAEG;AACH,SAAgB,kBAAkB;IAChC,OAAO,qBAAqB,WAAW,CAAC,OAAO,EAAE,CAAA;AACnD,CAAC;AAFD,gDAEC"}
|
||||
5
node_modules/@actions/artifact/lib/internal/shared/util.d.ts
generated
vendored
Normal file
5
node_modules/@actions/artifact/lib/internal/shared/util.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
export interface BackendIds {
|
||||
workflowRunBackendId: string;
|
||||
workflowJobRunBackendId: string;
|
||||
}
|
||||
export declare function getBackendIdsFromToken(): BackendIds;
|
||||
81
node_modules/@actions/artifact/lib/internal/shared/util.js
generated
vendored
Normal file
81
node_modules/@actions/artifact/lib/internal/shared/util.js
generated
vendored
Normal file
|
|
@ -0,0 +1,81 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getBackendIdsFromToken = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const config_1 = require("./config");
|
||||
const jwt_decode_1 = __importDefault(require("jwt-decode"));
|
||||
const InvalidJwtError = new Error('Failed to get backend IDs: The provided JWT token is invalid and/or missing claims');
|
||||
// uses the JWT token claims to get the
|
||||
// workflow run and workflow job run backend ids
|
||||
function getBackendIdsFromToken() {
|
||||
const token = (0, config_1.getRuntimeToken)();
|
||||
const decoded = (0, jwt_decode_1.default)(token);
|
||||
if (!decoded.scp) {
|
||||
throw InvalidJwtError;
|
||||
}
|
||||
/*
|
||||
* example decoded:
|
||||
* {
|
||||
* scp: "Actions.ExampleScope Actions.Results:ce7f54c7-61c7-4aae-887f-30da475f5f1a:ca395085-040a-526b-2ce8-bdc85f692774"
|
||||
* }
|
||||
*/
|
||||
const scpParts = decoded.scp.split(' ');
|
||||
if (scpParts.length === 0) {
|
||||
throw InvalidJwtError;
|
||||
}
|
||||
/*
|
||||
* example scpParts:
|
||||
* ["Actions.ExampleScope", "Actions.Results:ce7f54c7-61c7-4aae-887f-30da475f5f1a:ca395085-040a-526b-2ce8-bdc85f692774"]
|
||||
*/
|
||||
for (const scopes of scpParts) {
|
||||
const scopeParts = scopes.split(':');
|
||||
if ((scopeParts === null || scopeParts === void 0 ? void 0 : scopeParts[0]) !== 'Actions.Results') {
|
||||
// not the Actions.Results scope
|
||||
continue;
|
||||
}
|
||||
/*
|
||||
* example scopeParts:
|
||||
* ["Actions.Results", "ce7f54c7-61c7-4aae-887f-30da475f5f1a", "ca395085-040a-526b-2ce8-bdc85f692774"]
|
||||
*/
|
||||
if (scopeParts.length !== 3) {
|
||||
// missing expected number of claims
|
||||
throw InvalidJwtError;
|
||||
}
|
||||
const ids = {
|
||||
workflowRunBackendId: scopeParts[1],
|
||||
workflowJobRunBackendId: scopeParts[2]
|
||||
};
|
||||
core.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`);
|
||||
core.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`);
|
||||
return ids;
|
||||
}
|
||||
throw InvalidJwtError;
|
||||
}
|
||||
exports.getBackendIdsFromToken = getBackendIdsFromToken;
|
||||
//# sourceMappingURL=util.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/shared/util.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/internal/shared/util.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"util.js","sourceRoot":"","sources":["../../../src/internal/shared/util.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,qCAAwC;AACxC,4DAAmC;AAWnC,MAAM,eAAe,GAAG,IAAI,KAAK,CAC/B,oFAAoF,CACrF,CAAA;AAED,uCAAuC;AACvC,gDAAgD;AAChD,SAAgB,sBAAsB;IACpC,MAAM,KAAK,GAAG,IAAA,wBAAe,GAAE,CAAA;IAC/B,MAAM,OAAO,GAAG,IAAA,oBAAU,EAAe,KAAK,CAAC,CAAA;IAC/C,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE;QAChB,MAAM,eAAe,CAAA;KACtB;IAED;;;;;OAKG;IAEH,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;IACvC,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE;QACzB,MAAM,eAAe,CAAA;KACtB;IACD;;;OAGG;IAEH,KAAK,MAAM,MAAM,IAAI,QAAQ,EAAE;QAC7B,MAAM,UAAU,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;QACpC,IAAI,CAAA,UAAU,aAAV,UAAU,uBAAV,UAAU,CAAG,CAAC,CAAC,MAAK,iBAAiB,EAAE;YACzC,gCAAgC;YAChC,SAAQ;SACT;QAED;;;WAGG;QACH,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YAC3B,oCAAoC;YACpC,MAAM,eAAe,CAAA;SACtB;QAED,MAAM,GAAG,GAAG;YACV,oBAAoB,EAAE,UAAU,CAAC,CAAC,CAAC;YACnC,uBAAuB,EAAE,UAAU,CAAC,CAAC,CAAC;SACvC,CAAA;QAED,IAAI,CAAC,KAAK,CAAC,4BAA4B,GAAG,CAAC,oBAAoB,EAAE,CAAC,CAAA;QAClE,IAAI,CAAC,KAAK,CAAC,gCAAgC,GAAG,CAAC,uBAAuB,EAAE,CAAC,CAAA;QAEzE,OAAO,GAAG,CAAA;KACX;IAED,MAAM,eAAe,CAAA;AACvB,CAAC;AAnDD,wDAmDC"}
|
||||
21
node_modules/@actions/artifact/lib/internal/status-reporter.d.ts
generated
vendored
21
node_modules/@actions/artifact/lib/internal/status-reporter.d.ts
generated
vendored
|
|
@ -1,21 +0,0 @@
|
|||
/**
|
||||
* Status Reporter that displays information about the progress/status of an artifact that is being uploaded or downloaded
|
||||
*
|
||||
* Variable display time that can be adjusted using the displayFrequencyInMilliseconds variable
|
||||
* The total status of the upload/download gets displayed according to this value
|
||||
* If there is a large file that is being uploaded, extra information about the individual status can also be displayed using the updateLargeFileStatus function
|
||||
*/
|
||||
export declare class StatusReporter {
|
||||
private totalNumberOfFilesToProcess;
|
||||
private processedCount;
|
||||
private displayFrequencyInMilliseconds;
|
||||
private largeFiles;
|
||||
private totalFileStatus;
|
||||
constructor(displayFrequencyInMilliseconds: number);
|
||||
setTotalNumberOfFilesToProcess(fileTotal: number): void;
|
||||
start(): void;
|
||||
updateLargeFileStatus(fileName: string, chunkStartIndex: number, chunkEndIndex: number, totalUploadFileSize: number): void;
|
||||
stop(): void;
|
||||
incrementProcessedCount(): void;
|
||||
private formatPercentage;
|
||||
}
|
||||
52
node_modules/@actions/artifact/lib/internal/status-reporter.js
generated
vendored
52
node_modules/@actions/artifact/lib/internal/status-reporter.js
generated
vendored
|
|
@ -1,52 +0,0 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.StatusReporter = void 0;
|
||||
const core_1 = require("@actions/core");
|
||||
/**
|
||||
* Status Reporter that displays information about the progress/status of an artifact that is being uploaded or downloaded
|
||||
*
|
||||
* Variable display time that can be adjusted using the displayFrequencyInMilliseconds variable
|
||||
* The total status of the upload/download gets displayed according to this value
|
||||
* If there is a large file that is being uploaded, extra information about the individual status can also be displayed using the updateLargeFileStatus function
|
||||
*/
|
||||
class StatusReporter {
|
||||
constructor(displayFrequencyInMilliseconds) {
|
||||
this.totalNumberOfFilesToProcess = 0;
|
||||
this.processedCount = 0;
|
||||
this.largeFiles = new Map();
|
||||
this.totalFileStatus = undefined;
|
||||
this.displayFrequencyInMilliseconds = displayFrequencyInMilliseconds;
|
||||
}
|
||||
setTotalNumberOfFilesToProcess(fileTotal) {
|
||||
this.totalNumberOfFilesToProcess = fileTotal;
|
||||
this.processedCount = 0;
|
||||
}
|
||||
start() {
|
||||
// displays information about the total upload/download status
|
||||
this.totalFileStatus = setInterval(() => {
|
||||
// display 1 decimal place without any rounding
|
||||
const percentage = this.formatPercentage(this.processedCount, this.totalNumberOfFilesToProcess);
|
||||
(0, core_1.info)(`Total file count: ${this.totalNumberOfFilesToProcess} ---- Processed file #${this.processedCount} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`);
|
||||
}, this.displayFrequencyInMilliseconds);
|
||||
}
|
||||
// if there is a large file that is being uploaded in chunks, this is used to display extra information about the status of the upload
|
||||
updateLargeFileStatus(fileName, chunkStartIndex, chunkEndIndex, totalUploadFileSize) {
|
||||
// display 1 decimal place without any rounding
|
||||
const percentage = this.formatPercentage(chunkEndIndex, totalUploadFileSize);
|
||||
(0, core_1.info)(`Uploaded ${fileName} (${percentage.slice(0, percentage.indexOf('.') + 2)}%) bytes ${chunkStartIndex}:${chunkEndIndex}`);
|
||||
}
|
||||
stop() {
|
||||
if (this.totalFileStatus) {
|
||||
clearInterval(this.totalFileStatus);
|
||||
}
|
||||
}
|
||||
incrementProcessedCount() {
|
||||
this.processedCount++;
|
||||
}
|
||||
formatPercentage(numerator, denominator) {
|
||||
// toFixed() rounds, so use extra precision to display accurate information even though 4 decimal places are not displayed
|
||||
return ((numerator / denominator) * 100).toFixed(4).toString();
|
||||
}
|
||||
}
|
||||
exports.StatusReporter = StatusReporter;
|
||||
//# sourceMappingURL=status-reporter.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/status-reporter.js.map
generated
vendored
1
node_modules/@actions/artifact/lib/internal/status-reporter.js.map
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
{"version":3,"file":"status-reporter.js","sourceRoot":"","sources":["../../src/internal/status-reporter.ts"],"names":[],"mappings":";;;AAAA,wCAAkC;AAElC;;;;;;GAMG;AAEH,MAAa,cAAc;IAOzB,YAAY,8BAAsC;QAN1C,gCAA2B,GAAG,CAAC,CAAA;QAC/B,mBAAc,GAAG,CAAC,CAAA;QAElB,eAAU,GAAG,IAAI,GAAG,EAAkB,CAAA;QAI5C,IAAI,CAAC,eAAe,GAAG,SAAS,CAAA;QAChC,IAAI,CAAC,8BAA8B,GAAG,8BAA8B,CAAA;IACtE,CAAC;IAED,8BAA8B,CAAC,SAAiB;QAC9C,IAAI,CAAC,2BAA2B,GAAG,SAAS,CAAA;QAC5C,IAAI,CAAC,cAAc,GAAG,CAAC,CAAA;IACzB,CAAC;IAED,KAAK;QACH,8DAA8D;QAC9D,IAAI,CAAC,eAAe,GAAG,WAAW,CAAC,GAAG,EAAE;YACtC,+CAA+C;YAC/C,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CACtC,IAAI,CAAC,cAAc,EACnB,IAAI,CAAC,2BAA2B,CACjC,CAAA;YACD,IAAA,WAAI,EACF,qBACE,IAAI,CAAC,2BACP,yBAAyB,IAAI,CAAC,cAAc,KAAK,UAAU,CAAC,KAAK,CAC/D,CAAC,EACD,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAC5B,IAAI,CACN,CAAA;QACH,CAAC,EAAE,IAAI,CAAC,8BAA8B,CAAC,CAAA;IACzC,CAAC;IAED,sIAAsI;IACtI,qBAAqB,CACnB,QAAgB,EAChB,eAAuB,EACvB,aAAqB,EACrB,mBAA2B;QAE3B,+CAA+C;QAC/C,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,aAAa,EAAE,mBAAmB,CAAC,CAAA;QAC5E,IAAA,WAAI,EACF,YAAY,QAAQ,KAAK,UAAU,CAAC,KAAK,CACvC,CAAC,EACD,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAC5B,YAAY,eAAe,IAAI,aAAa,EAAE,CAChD,CAAA;IACH,CAAC;IAED,IAAI;QACF,IAAI,IAAI,CAAC,eAAe,EAAE;YACxB,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,CAAA;SACpC;IACH,CAAC;IAED,uBAAuB;QACrB,IAAI,CAAC,cAAc,EAAE,CAAA;IACvB,CAAC;IAEO,gBAAgB,CAAC,SAAiB,EAAE,WAAmB;QAC7D,0HAA0H;QAC1H,OAAO,CAAC,CAAC,SAAS,GAAG,WAAW,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAA;IAChE,CAAC;CACF;AAnED,wCAmEC"}
|
||||
14
node_modules/@actions/artifact/lib/internal/upload-gzip.d.ts
generated
vendored
14
node_modules/@actions/artifact/lib/internal/upload-gzip.d.ts
generated
vendored
|
|
@ -1,14 +0,0 @@
|
|||
/// <reference types="node" />
|
||||
/**
|
||||
* Creates a Gzip compressed file of an original file at the provided temporary filepath location
|
||||
* @param {string} originalFilePath filepath of whatever will be compressed. The original file will be unmodified
|
||||
* @param {string} tempFilePath the location of where the Gzip file will be created
|
||||
* @returns the size of gzip file that gets created
|
||||
*/
|
||||
export declare function createGZipFileOnDisk(originalFilePath: string, tempFilePath: string): Promise<number>;
|
||||
/**
|
||||
* Creates a GZip file in memory using a buffer. Should be used for smaller files to reduce disk I/O
|
||||
* @param originalFilePath the path to the original file that is being GZipped
|
||||
* @returns a buffer with the GZip file
|
||||
*/
|
||||
export declare function createGZipFileInBuffer(originalFilePath: string): Promise<Buffer>;
|
||||
147
node_modules/@actions/artifact/lib/internal/upload-gzip.js
generated
vendored
147
node_modules/@actions/artifact/lib/internal/upload-gzip.js
generated
vendored
|
|
@ -1,147 +0,0 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||
var m = o[Symbol.asyncIterator], i;
|
||||
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createGZipFileInBuffer = exports.createGZipFileOnDisk = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const zlib = __importStar(require("zlib"));
|
||||
const util_1 = require("util");
|
||||
const stat = (0, util_1.promisify)(fs.stat);
|
||||
/**
|
||||
* GZipping certain files that are already compressed will likely not yield further size reductions. Creating large temporary gzip
|
||||
* files then will just waste a lot of time before ultimately being discarded (especially for very large files).
|
||||
* If any of these types of files are encountered then on-disk gzip creation will be skipped and the original file will be uploaded as-is
|
||||
*/
|
||||
const gzipExemptFileExtensions = [
|
||||
'.gz',
|
||||
'.gzip',
|
||||
'.tgz',
|
||||
'.taz',
|
||||
'.Z',
|
||||
'.taZ',
|
||||
'.bz2',
|
||||
'.tbz',
|
||||
'.tbz2',
|
||||
'.tz2',
|
||||
'.lz',
|
||||
'.lzma',
|
||||
'.tlz',
|
||||
'.lzo',
|
||||
'.xz',
|
||||
'.txz',
|
||||
'.zst',
|
||||
'.zstd',
|
||||
'.tzst',
|
||||
'.zip',
|
||||
'.7z' // 7ZIP
|
||||
];
|
||||
/**
|
||||
* Creates a Gzip compressed file of an original file at the provided temporary filepath location
|
||||
* @param {string} originalFilePath filepath of whatever will be compressed. The original file will be unmodified
|
||||
* @param {string} tempFilePath the location of where the Gzip file will be created
|
||||
* @returns the size of gzip file that gets created
|
||||
*/
|
||||
function createGZipFileOnDisk(originalFilePath, tempFilePath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
for (const gzipExemptExtension of gzipExemptFileExtensions) {
|
||||
if (originalFilePath.endsWith(gzipExemptExtension)) {
|
||||
// return a really large number so that the original file gets uploaded
|
||||
return Number.MAX_SAFE_INTEGER;
|
||||
}
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
const inputStream = fs.createReadStream(originalFilePath);
|
||||
const gzip = zlib.createGzip();
|
||||
const outputStream = fs.createWriteStream(tempFilePath);
|
||||
inputStream.pipe(gzip).pipe(outputStream);
|
||||
outputStream.on('finish', () => __awaiter(this, void 0, void 0, function* () {
|
||||
// wait for stream to finish before calculating the size which is needed as part of the Content-Length header when starting an upload
|
||||
const size = (yield stat(tempFilePath)).size;
|
||||
resolve(size);
|
||||
}));
|
||||
outputStream.on('error', error => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(error);
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.createGZipFileOnDisk = createGZipFileOnDisk;
|
||||
/**
|
||||
* Creates a GZip file in memory using a buffer. Should be used for smaller files to reduce disk I/O
|
||||
* @param originalFilePath the path to the original file that is being GZipped
|
||||
* @returns a buffer with the GZip file
|
||||
*/
|
||||
function createGZipFileInBuffer(originalFilePath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||
var _a, e_1, _b, _c;
|
||||
const inputStream = fs.createReadStream(originalFilePath);
|
||||
const gzip = zlib.createGzip();
|
||||
inputStream.pipe(gzip);
|
||||
// read stream into buffer, using experimental async iterators see https://github.com/nodejs/readable-stream/issues/403#issuecomment-479069043
|
||||
const chunks = [];
|
||||
try {
|
||||
for (var _d = true, gzip_1 = __asyncValues(gzip), gzip_1_1; gzip_1_1 = yield gzip_1.next(), _a = gzip_1_1.done, !_a;) {
|
||||
_c = gzip_1_1.value;
|
||||
_d = false;
|
||||
try {
|
||||
const chunk = _c;
|
||||
chunks.push(chunk);
|
||||
}
|
||||
finally {
|
||||
_d = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (!_d && !_a && (_b = gzip_1.return)) yield _b.call(gzip_1);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
resolve(Buffer.concat(chunks));
|
||||
}));
|
||||
});
|
||||
}
|
||||
exports.createGZipFileInBuffer = createGZipFileInBuffer;
|
||||
//# sourceMappingURL=upload-gzip.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/upload-gzip.js.map
generated
vendored
1
node_modules/@actions/artifact/lib/internal/upload-gzip.js.map
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
{"version":3,"file":"upload-gzip.js","sourceRoot":"","sources":["../../src/internal/upload-gzip.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,2CAA4B;AAC5B,+BAA8B;AAC9B,MAAM,IAAI,GAAG,IAAA,gBAAS,EAAC,EAAE,CAAC,IAAI,CAAC,CAAA;AAE/B;;;;GAIG;AACH,MAAM,wBAAwB,GAAG;IAC/B,KAAK;IACL,OAAO;IACP,MAAM;IACN,MAAM;IACN,IAAI;IACJ,MAAM;IACN,MAAM;IACN,MAAM;IACN,OAAO;IACP,MAAM;IACN,KAAK;IACL,OAAO;IACP,MAAM;IACN,MAAM;IACN,KAAK;IACL,MAAM;IACN,MAAM;IACN,OAAO;IACP,OAAO;IACP,MAAM;IACN,KAAK,CAAC,OAAO;CACd,CAAA;AAED;;;;;GAKG;AACH,SAAsB,oBAAoB,CACxC,gBAAwB,EACxB,YAAoB;;QAEpB,KAAK,MAAM,mBAAmB,IAAI,wBAAwB,EAAE;YAC1D,IAAI,gBAAgB,CAAC,QAAQ,CAAC,mBAAmB,CAAC,EAAE;gBAClD,uEAAuE;gBACvE,OAAO,MAAM,CAAC,gBAAgB,CAAA;aAC/B;SACF;QAED,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,MAAM,WAAW,GAAG,EAAE,CAAC,gBAAgB,CAAC,gBAAgB,CAAC,CAAA;YACzD,MAAM,IAAI,GAAG,IAAI,CAAC,UAAU,EAAE,CAAA;YAC9B,MAAM,YAAY,GAAG,EAAE,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAA;YACvD,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC,CAAA;YACzC,YAAY,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAS,EAAE;gBACnC,qIAAqI;gBACrI,MAAM,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI,CAAA;gBAC5C,OAAO,CAAC,IAAI,CAAC,CAAA;YACf,CAAC,CAAA,CAAC,CAAA;YACF,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;gBAC/B,sCAAsC;gBACtC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;gBAClB,MAAM,CAAC,KAAK,CAAC,CAAA;YACf,CAAC,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;IACJ,CAAC;CAAA;AA3BD,oDA2BC;AAED;;;;GAIG;AACH,SAAsB,sBAAsB,CAC1C,gBAAwB;;QAExB,OAAO,IAAI,OAAO,CAAC,CAAM,OAAO,EAAC,EAAE;;YACjC,MAAM,WAAW,GAAG,EAAE,CAAC,gBAAgB,CAAC,gBAAgB,CAAC,CAAA;YACzD,MAAM,IAAI,GAAG,IAAI,CAAC,UAAU,EAAE,CAAA;YAC9B,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;YACtB,8IAA8I;YAC9I,MAAM,MAAM,GAAiB,EAAE,CAAA;;gBAC/B,KAA0B,eAAA,SAAA,cAAA,IAAI,CAAA,UAAA;oBAAJ,oBAAI;oBAAJ,WAAI;;wBAAnB,MAAM,KAAK,KAAA,CAAA;wBACpB,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;;;;;iBACnB;;;;;;;;;YACD,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAA;QAChC,CAAC,CAAA,CAAC,CAAA;IACJ,CAAC;CAAA;AAdD,wDAcC"}
|
||||
48
node_modules/@actions/artifact/lib/internal/upload-http-client.d.ts
generated
vendored
48
node_modules/@actions/artifact/lib/internal/upload-http-client.d.ts
generated
vendored
|
|
@ -1,48 +0,0 @@
|
|||
import { ArtifactResponse, UploadResults } from './contracts';
|
||||
import { UploadSpecification } from './upload-specification';
|
||||
import { UploadOptions } from './upload-options';
|
||||
export declare class UploadHttpClient {
|
||||
private uploadHttpManager;
|
||||
private statusReporter;
|
||||
constructor();
|
||||
/**
|
||||
* Creates a file container for the new artifact in the remote blob storage/file service
|
||||
* @param {string} artifactName Name of the artifact being created
|
||||
* @returns The response from the Artifact Service if the file container was successfully created
|
||||
*/
|
||||
createArtifactInFileContainer(artifactName: string, options?: UploadOptions | undefined): Promise<ArtifactResponse>;
|
||||
/**
|
||||
* Concurrently upload all of the files in chunks
|
||||
* @param {string} uploadUrl Base Url for the artifact that was created
|
||||
* @param {SearchResult[]} filesToUpload A list of information about the files being uploaded
|
||||
* @returns The size of all the files uploaded in bytes
|
||||
*/
|
||||
uploadArtifactToFileContainer(uploadUrl: string, filesToUpload: UploadSpecification[], options?: UploadOptions): Promise<UploadResults>;
|
||||
/**
|
||||
* Asynchronously uploads a file. The file is compressed and uploaded using GZip if it is determined to save space.
|
||||
* If the upload file is bigger than the max chunk size it will be uploaded via multiple calls
|
||||
* @param {number} httpClientIndex The index of the httpClient that is being used to make all of the calls
|
||||
* @param {UploadFileParameters} parameters Information about the file that needs to be uploaded
|
||||
* @returns The size of the file that was uploaded in bytes along with any failed uploads
|
||||
*/
|
||||
private uploadFileAsync;
|
||||
/**
|
||||
* Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code
|
||||
* indicates a retryable status, we try to upload the chunk as well
|
||||
* @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls
|
||||
* @param {string} resourceUrl Url of the resource that the chunk will be uploaded to
|
||||
* @param {NodeJS.ReadableStream} openStream Stream of the file that will be uploaded
|
||||
* @param {number} start Starting byte index of file that the chunk belongs to
|
||||
* @param {number} end Ending byte index of file that the chunk belongs to
|
||||
* @param {number} uploadFileSize Total size of the file in bytes that is being uploaded
|
||||
* @param {boolean} isGzip Denotes if we are uploading a Gzip compressed stream
|
||||
* @param {number} totalFileSize Original total size of the file that is being uploaded
|
||||
* @returns if the chunk was successfully uploaded
|
||||
*/
|
||||
private uploadChunk;
|
||||
/**
|
||||
* Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact.
|
||||
* Updating the size indicates that we are done uploading all the contents of the artifact
|
||||
*/
|
||||
patchArtifactSize(size: number, artifactName: string): Promise<void>;
|
||||
}
|
||||
415
node_modules/@actions/artifact/lib/internal/upload-http-client.js
generated
vendored
415
node_modules/@actions/artifact/lib/internal/upload-http-client.js
generated
vendored
|
|
@ -1,415 +0,0 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.UploadHttpClient = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const tmp = __importStar(require("tmp-promise"));
|
||||
const stream = __importStar(require("stream"));
|
||||
const utils_1 = require("./utils");
|
||||
const config_variables_1 = require("./config-variables");
|
||||
const util_1 = require("util");
|
||||
const url_1 = require("url");
|
||||
const perf_hooks_1 = require("perf_hooks");
|
||||
const status_reporter_1 = require("./status-reporter");
|
||||
const http_client_1 = require("@actions/http-client");
|
||||
const http_manager_1 = require("./http-manager");
|
||||
const upload_gzip_1 = require("./upload-gzip");
|
||||
const requestUtils_1 = require("./requestUtils");
|
||||
const stat = (0, util_1.promisify)(fs.stat);
|
||||
class UploadHttpClient {
|
||||
constructor() {
|
||||
this.uploadHttpManager = new http_manager_1.HttpManager((0, config_variables_1.getUploadFileConcurrency)(), '@actions/artifact-upload');
|
||||
this.statusReporter = new status_reporter_1.StatusReporter(10000);
|
||||
}
|
||||
/**
|
||||
* Creates a file container for the new artifact in the remote blob storage/file service
|
||||
* @param {string} artifactName Name of the artifact being created
|
||||
* @returns The response from the Artifact Service if the file container was successfully created
|
||||
*/
|
||||
createArtifactInFileContainer(artifactName, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const parameters = {
|
||||
Type: 'actions_storage',
|
||||
Name: artifactName
|
||||
};
|
||||
// calculate retention period
|
||||
if (options && options.retentionDays) {
|
||||
const maxRetentionStr = (0, config_variables_1.getRetentionDays)();
|
||||
parameters.RetentionDays = (0, utils_1.getProperRetention)(options.retentionDays, maxRetentionStr);
|
||||
}
|
||||
const data = JSON.stringify(parameters, null, 2);
|
||||
const artifactUrl = (0, utils_1.getArtifactUrl)();
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.uploadHttpManager.getClient(0);
|
||||
const headers = (0, utils_1.getUploadHeaders)('application/json', false);
|
||||
// Extra information to display when a particular HTTP code is returned
|
||||
// If a 403 is returned when trying to create a file container, the customer has exceeded
|
||||
// their storage quota so no new artifact containers can be created
|
||||
const customErrorMessages = new Map([
|
||||
[
|
||||
http_client_1.HttpCodes.Forbidden,
|
||||
(0, config_variables_1.isGhes)()
|
||||
? 'Please reference [Enabling GitHub Actions for GitHub Enterprise Server](https://docs.github.com/en/enterprise-server@3.8/admin/github-actions/enabling-github-actions-for-github-enterprise-server) to ensure Actions storage is configured correctly.'
|
||||
: 'Artifact storage quota has been hit. Unable to upload any new artifacts'
|
||||
],
|
||||
[
|
||||
http_client_1.HttpCodes.BadRequest,
|
||||
`The artifact name ${artifactName} is not valid. Request URL ${artifactUrl}`
|
||||
]
|
||||
]);
|
||||
const response = yield (0, requestUtils_1.retryHttpClientRequest)('Create Artifact Container', () => __awaiter(this, void 0, void 0, function* () { return client.post(artifactUrl, data, headers); }), customErrorMessages);
|
||||
const body = yield response.readBody();
|
||||
return JSON.parse(body);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Concurrently upload all of the files in chunks
|
||||
* @param {string} uploadUrl Base Url for the artifact that was created
|
||||
* @param {SearchResult[]} filesToUpload A list of information about the files being uploaded
|
||||
* @returns The size of all the files uploaded in bytes
|
||||
*/
|
||||
uploadArtifactToFileContainer(uploadUrl, filesToUpload, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const FILE_CONCURRENCY = (0, config_variables_1.getUploadFileConcurrency)();
|
||||
const MAX_CHUNK_SIZE = (0, config_variables_1.getUploadChunkSize)();
|
||||
core.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`);
|
||||
const parameters = [];
|
||||
// by default, file uploads will continue if there is an error unless specified differently in the options
|
||||
let continueOnError = true;
|
||||
if (options) {
|
||||
if (options.continueOnError === false) {
|
||||
continueOnError = false;
|
||||
}
|
||||
}
|
||||
// prepare the necessary parameters to upload all the files
|
||||
for (const file of filesToUpload) {
|
||||
const resourceUrl = new url_1.URL(uploadUrl);
|
||||
resourceUrl.searchParams.append('itemPath', file.uploadFilePath);
|
||||
parameters.push({
|
||||
file: file.absoluteFilePath,
|
||||
resourceUrl: resourceUrl.toString(),
|
||||
maxChunkSize: MAX_CHUNK_SIZE,
|
||||
continueOnError
|
||||
});
|
||||
}
|
||||
const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()];
|
||||
const failedItemsToReport = [];
|
||||
let currentFile = 0;
|
||||
let completedFiles = 0;
|
||||
let uploadFileSize = 0;
|
||||
let totalFileSize = 0;
|
||||
let abortPendingFileUploads = false;
|
||||
this.statusReporter.setTotalNumberOfFilesToProcess(filesToUpload.length);
|
||||
this.statusReporter.start();
|
||||
// only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors
|
||||
yield Promise.all(parallelUploads.map((index) => __awaiter(this, void 0, void 0, function* () {
|
||||
while (currentFile < filesToUpload.length) {
|
||||
const currentFileParameters = parameters[currentFile];
|
||||
currentFile += 1;
|
||||
if (abortPendingFileUploads) {
|
||||
failedItemsToReport.push(currentFileParameters.file);
|
||||
continue;
|
||||
}
|
||||
const startTime = perf_hooks_1.performance.now();
|
||||
const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters);
|
||||
if (core.isDebug()) {
|
||||
core.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`);
|
||||
}
|
||||
uploadFileSize += uploadFileResult.successfulUploadSize;
|
||||
totalFileSize += uploadFileResult.totalSize;
|
||||
if (uploadFileResult.isSuccess === false) {
|
||||
failedItemsToReport.push(currentFileParameters.file);
|
||||
if (!continueOnError) {
|
||||
// fail fast
|
||||
core.error(`aborting artifact upload`);
|
||||
abortPendingFileUploads = true;
|
||||
}
|
||||
}
|
||||
this.statusReporter.incrementProcessedCount();
|
||||
}
|
||||
})));
|
||||
this.statusReporter.stop();
|
||||
// done uploading, safety dispose all connections
|
||||
this.uploadHttpManager.disposeAndReplaceAllClients();
|
||||
core.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`);
|
||||
return {
|
||||
uploadSize: uploadFileSize,
|
||||
totalSize: totalFileSize,
|
||||
failedItems: failedItemsToReport
|
||||
};
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Asynchronously uploads a file. The file is compressed and uploaded using GZip if it is determined to save space.
|
||||
* If the upload file is bigger than the max chunk size it will be uploaded via multiple calls
|
||||
* @param {number} httpClientIndex The index of the httpClient that is being used to make all of the calls
|
||||
* @param {UploadFileParameters} parameters Information about the file that needs to be uploaded
|
||||
* @returns The size of the file that was uploaded in bytes along with any failed uploads
|
||||
*/
|
||||
uploadFileAsync(httpClientIndex, parameters) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const fileStat = yield stat(parameters.file);
|
||||
const totalFileSize = fileStat.size;
|
||||
const isFIFO = fileStat.isFIFO();
|
||||
let offset = 0;
|
||||
let isUploadSuccessful = true;
|
||||
let failedChunkSizes = 0;
|
||||
let uploadFileSize = 0;
|
||||
let isGzip = true;
|
||||
// the file that is being uploaded is less than 64k in size to increase throughput and to minimize disk I/O
|
||||
// for creating a new GZip file, an in-memory buffer is used for compression
|
||||
// with named pipes the file size is reported as zero in that case don't read the file in memory
|
||||
if (!isFIFO && totalFileSize < 65536) {
|
||||
core.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`);
|
||||
const buffer = yield (0, upload_gzip_1.createGZipFileInBuffer)(parameters.file);
|
||||
// An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in,
|
||||
// it will not properly get reset to the start of the stream if a chunk upload needs to be retried
|
||||
let openUploadStream;
|
||||
if (totalFileSize < buffer.byteLength) {
|
||||
// compression did not help with reducing the size, use a readable stream from the original file for upload
|
||||
core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`);
|
||||
openUploadStream = () => fs.createReadStream(parameters.file);
|
||||
isGzip = false;
|
||||
uploadFileSize = totalFileSize;
|
||||
}
|
||||
else {
|
||||
// create a readable stream using a PassThrough stream that is both readable and writable
|
||||
core.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`);
|
||||
openUploadStream = () => {
|
||||
const passThrough = new stream.PassThrough();
|
||||
passThrough.end(buffer);
|
||||
return passThrough;
|
||||
};
|
||||
uploadFileSize = buffer.byteLength;
|
||||
}
|
||||
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, openUploadStream, 0, uploadFileSize - 1, uploadFileSize, isGzip, totalFileSize);
|
||||
if (!result) {
|
||||
// chunk failed to upload
|
||||
isUploadSuccessful = false;
|
||||
failedChunkSizes += uploadFileSize;
|
||||
core.warning(`Aborting upload for ${parameters.file} due to failure`);
|
||||
}
|
||||
return {
|
||||
isSuccess: isUploadSuccessful,
|
||||
successfulUploadSize: uploadFileSize - failedChunkSizes,
|
||||
totalSize: totalFileSize
|
||||
};
|
||||
}
|
||||
else {
|
||||
// the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the
|
||||
// npm tmp-promise package and this file gets used to create a GZipped file
|
||||
const tempFile = yield tmp.file();
|
||||
core.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`);
|
||||
// create a GZip file of the original file being uploaded, the original file should not be modified in any way
|
||||
uploadFileSize = yield (0, upload_gzip_1.createGZipFileOnDisk)(parameters.file, tempFile.path);
|
||||
let uploadFilePath = tempFile.path;
|
||||
// compression did not help with size reduction, use the original file for upload and delete the temp GZip file
|
||||
// for named pipes totalFileSize is zero, this assumes compression did help
|
||||
if (!isFIFO && totalFileSize < uploadFileSize) {
|
||||
core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`);
|
||||
uploadFileSize = totalFileSize;
|
||||
uploadFilePath = parameters.file;
|
||||
isGzip = false;
|
||||
}
|
||||
else {
|
||||
core.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`);
|
||||
}
|
||||
let abortFileUpload = false;
|
||||
// upload only a single chunk at a time
|
||||
while (offset < uploadFileSize) {
|
||||
const chunkSize = Math.min(uploadFileSize - offset, parameters.maxChunkSize);
|
||||
const startChunkIndex = offset;
|
||||
const endChunkIndex = offset + chunkSize - 1;
|
||||
offset += parameters.maxChunkSize;
|
||||
if (abortFileUpload) {
|
||||
// if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed
|
||||
failedChunkSizes += chunkSize;
|
||||
continue;
|
||||
}
|
||||
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs.createReadStream(uploadFilePath, {
|
||||
start: startChunkIndex,
|
||||
end: endChunkIndex,
|
||||
autoClose: false
|
||||
}), startChunkIndex, endChunkIndex, uploadFileSize, isGzip, totalFileSize);
|
||||
if (!result) {
|
||||
// Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was
|
||||
// successfully uploaded so the server may report a different size for what was uploaded
|
||||
isUploadSuccessful = false;
|
||||
failedChunkSizes += chunkSize;
|
||||
core.warning(`Aborting upload for ${parameters.file} due to failure`);
|
||||
abortFileUpload = true;
|
||||
}
|
||||
else {
|
||||
// if an individual file is greater than 8MB (1024*1024*8) in size, display extra information about the upload status
|
||||
if (uploadFileSize > 8388608) {
|
||||
this.statusReporter.updateLargeFileStatus(parameters.file, startChunkIndex, endChunkIndex, uploadFileSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by
|
||||
// calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about
|
||||
core.debug(`deleting temporary gzip file ${tempFile.path}`);
|
||||
yield tempFile.cleanup();
|
||||
return {
|
||||
isSuccess: isUploadSuccessful,
|
||||
successfulUploadSize: uploadFileSize - failedChunkSizes,
|
||||
totalSize: totalFileSize
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code
|
||||
* indicates a retryable status, we try to upload the chunk as well
|
||||
* @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls
|
||||
* @param {string} resourceUrl Url of the resource that the chunk will be uploaded to
|
||||
* @param {NodeJS.ReadableStream} openStream Stream of the file that will be uploaded
|
||||
* @param {number} start Starting byte index of file that the chunk belongs to
|
||||
* @param {number} end Ending byte index of file that the chunk belongs to
|
||||
* @param {number} uploadFileSize Total size of the file in bytes that is being uploaded
|
||||
* @param {boolean} isGzip Denotes if we are uploading a Gzip compressed stream
|
||||
* @param {number} totalFileSize Original total size of the file that is being uploaded
|
||||
* @returns if the chunk was successfully uploaded
|
||||
*/
|
||||
uploadChunk(httpClientIndex, resourceUrl, openStream, start, end, uploadFileSize, isGzip, totalFileSize) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// open a new stream and read it to compute the digest
|
||||
const digest = yield (0, utils_1.digestForStream)(openStream());
|
||||
// prepare all the necessary headers before making any http call
|
||||
const headers = (0, utils_1.getUploadHeaders)('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, (0, utils_1.getContentRange)(start, end, uploadFileSize), digest);
|
||||
const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const client = this.uploadHttpManager.getClient(httpClientIndex);
|
||||
return yield client.sendStream('PUT', resourceUrl, openStream(), headers);
|
||||
});
|
||||
let retryCount = 0;
|
||||
const retryLimit = (0, config_variables_1.getRetryLimit)();
|
||||
// Increments the current retry count and then checks if the retry limit has been reached
|
||||
// If there have been too many retries, fail so the download stops
|
||||
const incrementAndCheckRetryLimit = (response) => {
|
||||
retryCount++;
|
||||
if (retryCount > retryLimit) {
|
||||
if (response) {
|
||||
(0, utils_1.displayHttpDiagnostics)(response);
|
||||
}
|
||||
core.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const backOff = (retryAfterValue) => __awaiter(this, void 0, void 0, function* () {
|
||||
this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex);
|
||||
if (retryAfterValue) {
|
||||
core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`);
|
||||
yield (0, utils_1.sleep)(retryAfterValue);
|
||||
}
|
||||
else {
|
||||
const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount);
|
||||
core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`);
|
||||
yield (0, utils_1.sleep)(backoffTime);
|
||||
}
|
||||
core.info(`Finished backoff for retry #${retryCount}, continuing with upload`);
|
||||
return;
|
||||
});
|
||||
// allow for failed chunks to be retried multiple times
|
||||
while (retryCount <= retryLimit) {
|
||||
let response;
|
||||
try {
|
||||
response = yield uploadChunkRequest();
|
||||
}
|
||||
catch (error) {
|
||||
// if an error is caught, it is usually indicative of a timeout so retry the upload
|
||||
core.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`);
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(error);
|
||||
if (incrementAndCheckRetryLimit()) {
|
||||
return false;
|
||||
}
|
||||
yield backOff();
|
||||
continue;
|
||||
}
|
||||
// Always read the body of the response. There is potential for a resource leak if the body is not read which will
|
||||
// result in the connection remaining open along with unintended consequences when trying to dispose of the client
|
||||
yield response.readBody();
|
||||
if ((0, utils_1.isSuccessStatusCode)(response.message.statusCode)) {
|
||||
return true;
|
||||
}
|
||||
else if ((0, utils_1.isRetryableStatusCode)(response.message.statusCode)) {
|
||||
core.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`);
|
||||
if (incrementAndCheckRetryLimit(response)) {
|
||||
return false;
|
||||
}
|
||||
(0, utils_1.isThrottledStatusCode)(response.message.statusCode)
|
||||
? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers))
|
||||
: yield backOff();
|
||||
}
|
||||
else {
|
||||
core.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`);
|
||||
(0, utils_1.displayHttpDiagnostics)(response);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact.
|
||||
* Updating the size indicates that we are done uploading all the contents of the artifact
|
||||
*/
|
||||
patchArtifactSize(size, artifactName) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const resourceUrl = new url_1.URL((0, utils_1.getArtifactUrl)());
|
||||
resourceUrl.searchParams.append('artifactName', artifactName);
|
||||
const parameters = { Size: size };
|
||||
const data = JSON.stringify(parameters, null, 2);
|
||||
core.debug(`URL is ${resourceUrl.toString()}`);
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.uploadHttpManager.getClient(0);
|
||||
const headers = (0, utils_1.getUploadHeaders)('application/json', false);
|
||||
// Extra information to display when a particular HTTP code is returned
|
||||
const customErrorMessages = new Map([
|
||||
[
|
||||
http_client_1.HttpCodes.NotFound,
|
||||
`An Artifact with the name ${artifactName} was not found`
|
||||
]
|
||||
]);
|
||||
// TODO retry for all possible response codes, the artifact upload is pretty much complete so it at all costs we should try to finish this
|
||||
const response = yield (0, requestUtils_1.retryHttpClientRequest)('Finalize artifact upload', () => __awaiter(this, void 0, void 0, function* () { return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages);
|
||||
yield response.readBody();
|
||||
core.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`);
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.UploadHttpClient = UploadHttpClient;
|
||||
//# sourceMappingURL=upload-http-client.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/upload-http-client.js.map
generated
vendored
1
node_modules/@actions/artifact/lib/internal/upload-http-client.js.map
generated
vendored
File diff suppressed because one or more lines are too long
34
node_modules/@actions/artifact/lib/internal/upload-options.d.ts
generated
vendored
34
node_modules/@actions/artifact/lib/internal/upload-options.d.ts
generated
vendored
|
|
@ -1,34 +0,0 @@
|
|||
export interface UploadOptions {
|
||||
/**
|
||||
* Indicates if the artifact upload should continue if file or chunk fails to upload from any error.
|
||||
* If there is a error during upload, a partial artifact will always be associated and available for
|
||||
* download at the end. The size reported will be the amount of storage that the user or org will be
|
||||
* charged for the partial artifact. Defaults to true if not specified
|
||||
*
|
||||
* If set to false, and an error is encountered, all other uploads will stop and any files or chunks
|
||||
* that were queued will not be attempted to be uploaded. The partial artifact available will only
|
||||
* include files and chunks up until the failure
|
||||
*
|
||||
* If set to true and an error is encountered, the failed file will be skipped and ignored and all
|
||||
* other queued files will be attempted to be uploaded. The partial artifact at the end will have all
|
||||
* files with the exception of the problematic files(s)/chunks(s) that failed to upload
|
||||
*
|
||||
*/
|
||||
continueOnError?: boolean;
|
||||
/**
|
||||
* Duration after which artifact will expire in days.
|
||||
*
|
||||
* By default artifact expires after 90 days:
|
||||
* https://docs.github.com/en/actions/configuring-and-managing-workflows/persisting-workflow-data-using-artifacts#downloading-and-deleting-artifacts-after-a-workflow-run-is-complete
|
||||
*
|
||||
* Use this option to override the default expiry.
|
||||
*
|
||||
* Min value: 1
|
||||
* Max value: 90 unless changed by repository setting
|
||||
*
|
||||
* If this is set to a greater value than the retention settings allowed, the retention on artifacts
|
||||
* will be reduced to match the max value allowed on server, and the upload process will continue. An
|
||||
* input of 0 assumes default retention setting.
|
||||
*/
|
||||
retentionDays?: number;
|
||||
}
|
||||
3
node_modules/@actions/artifact/lib/internal/upload-options.js
generated
vendored
3
node_modules/@actions/artifact/lib/internal/upload-options.js
generated
vendored
|
|
@ -1,3 +0,0 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=upload-options.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/upload-options.js.map
generated
vendored
1
node_modules/@actions/artifact/lib/internal/upload-options.js.map
generated
vendored
|
|
@ -1 +0,0 @@
|
|||
{"version":3,"file":"upload-options.js","sourceRoot":"","sources":["../../src/internal/upload-options.ts"],"names":[],"mappings":""}
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue