Bump artifact dependencies if CODEQL_ACTION_ARTIFACT_V2_UPGRADE enabled (#2482)
Co-authored-by: Andrew Eisenberg <aeisenberg@github.com> Co-authored-by: Henry Mercer <henrymercer@github.com>
This commit is contained in:
parent
cf5b0a9041
commit
a196a714b8
5388 changed files with 2176737 additions and 71701 deletions
6
node_modules/@azure/storage-blob/README.md
generated
vendored
6
node_modules/@azure/storage-blob/README.md
generated
vendored
|
|
@ -289,7 +289,7 @@ const blobServiceClient = new BlobServiceClient(
|
|||
|
||||
async function main() {
|
||||
let i = 1;
|
||||
let containers = blobServiceClient.listContainers();
|
||||
const containers = blobServiceClient.listContainers();
|
||||
for await (const container of containers) {
|
||||
console.log(`Container ${i++}: ${container.name}`);
|
||||
}
|
||||
|
|
@ -314,7 +314,7 @@ const blobServiceClient = new BlobServiceClient(
|
|||
|
||||
async function main() {
|
||||
let i = 1;
|
||||
let iter = blobServiceClient.listContainers();
|
||||
const iter = blobServiceClient.listContainers();
|
||||
let containerItem = await iter.next();
|
||||
while (!containerItem.done) {
|
||||
console.log(`Container ${i++}: ${containerItem.value.name}`);
|
||||
|
|
@ -406,7 +406,7 @@ async function main() {
|
|||
const containerClient = blobServiceClient.getContainerClient(containerName);
|
||||
|
||||
let i = 1;
|
||||
let blobs = containerClient.listBlobsFlat();
|
||||
const blobs = containerClient.listBlobsFlat();
|
||||
for await (const blob of blobs) {
|
||||
console.log(`Blob ${i++}: ${blob.name}`);
|
||||
}
|
||||
|
|
|
|||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js
generated
vendored
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Licensed under the MIT License.
|
||||
export {};
|
||||
//# sourceMappingURL=BatchResponse.js.map
|
||||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js.map
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"BatchResponse.js","sourceRoot":"","sources":["../../../src/BatchResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BatchSubRequest } from \"./BlobBatch\";\nimport { HttpHeaders } from \"@azure/core-http\";\n\n/**\n * The response data associated with a single request within a batch operation.\n */\nexport interface BatchSubResponse {\n /**\n * The status code of the sub operation.\n */\n status: number;\n\n /**\n * The status message of the sub operation.\n */\n statusMessage: string;\n\n /**\n * The error code of the sub operation, if the sub operation failed.\n */\n errorCode?: string;\n\n /**\n * The HTTP response headers.\n */\n headers: HttpHeaders;\n\n /**\n * The body as text.\n */\n bodyAsText?: string;\n\n /**\n * The batch sub request corresponding to the sub response.\n */\n _request: BatchSubRequest;\n}\n\n/**\n * The multipart/mixed response which contains the response for each subrequest.\n */\nexport interface ParsedBatchResponse {\n /**\n * The parsed sub responses.\n */\n subResponses: BatchSubResponse[];\n\n /**\n * The succeeded executed sub responses' count;\n */\n subResponsesSucceededCount: number;\n\n /**\n * The failed executed sub responses' count;\n */\n subResponsesFailedCount: number;\n}\n"]}
|
||||
{"version":3,"file":"BatchResponse.js","sourceRoot":"","sources":["../../../src/BatchResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport { BatchSubRequest } from \"./BlobBatch\";\nimport { HttpHeadersLike } from \"@azure/core-http-compat\";\n\n/**\n * The response data associated with a single request within a batch operation.\n */\nexport interface BatchSubResponse {\n /**\n * The status code of the sub operation.\n */\n status: number;\n\n /**\n * The status message of the sub operation.\n */\n statusMessage: string;\n\n /**\n * The error code of the sub operation, if the sub operation failed.\n */\n errorCode?: string;\n\n /**\n * The HTTP response headers.\n */\n headers: HttpHeadersLike;\n\n /**\n * The body as text.\n */\n bodyAsText?: string;\n\n /**\n * The batch sub request corresponding to the sub response.\n */\n _request: BatchSubRequest;\n}\n\n/**\n * The multipart/mixed response which contains the response for each subrequest.\n */\nexport interface ParsedBatchResponse {\n /**\n * The parsed sub responses.\n */\n subResponses: BatchSubResponse[];\n\n /**\n * The succeeded executed sub responses' count;\n */\n subResponsesSucceededCount: number;\n\n /**\n * The failed executed sub responses' count;\n */\n subResponsesFailedCount: number;\n}\n"]}
|
||||
7
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js
generated
vendored
7
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js
generated
vendored
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
import { HttpHeaders } from "@azure/core-http";
|
||||
// Licensed under the MIT License.
|
||||
import { createHttpHeaders } from "@azure/core-rest-pipeline";
|
||||
import { toHttpHeadersLike } from "@azure/core-http-compat";
|
||||
import { HTTP_VERSION_1_1, HTTP_LINE_ENDING, HeaderConstants, HTTPURLConnection, } from "./utils/constants";
|
||||
import { getBodyAsText } from "./BatchUtils";
|
||||
import { logger } from "./log";
|
||||
|
|
@ -53,7 +54,7 @@ export class BatchResponseParser {
|
|||
for (let index = 0; index < subResponseCount; index++) {
|
||||
const subResponse = subResponses[index];
|
||||
const deserializedSubResponse = {};
|
||||
deserializedSubResponse.headers = new HttpHeaders();
|
||||
deserializedSubResponse.headers = toHttpHeadersLike(createHttpHeaders());
|
||||
const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`);
|
||||
let subRespHeaderStartFound = false;
|
||||
let subRespHeaderEndFound = false;
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js
generated
vendored
|
|
@ -1,5 +1,5 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Licensed under the MIT License.
|
||||
import { blobToString } from "./utils/utils.browser";
|
||||
export async function getBodyAsText(batchResponse) {
|
||||
const blob = (await batchResponse.blobBody);
|
||||
|
|
|
|||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js.map
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"BatchUtils.browser.js","sourceRoot":"","sources":["../../../src/BatchUtils.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAErD,MAAM,CAAC,KAAK,UAAU,aAAa,CACjC,aAA8C;IAE9C,MAAM,IAAI,GAAG,CAAC,MAAM,aAAa,CAAC,QAAQ,CAAS,CAAC;IACpD,OAAO,YAAY,CAAC,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,MAAM,UAAU,cAAc,CAAC,GAAW;IACxC,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC;AAC9B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport { blobToString } from \"./utils/utils.browser\";\n\nexport async function getBodyAsText(\n batchResponse: ServiceSubmitBatchResponseModel\n): Promise<string> {\n const blob = (await batchResponse.blobBody) as Blob;\n return blobToString(blob);\n}\n\nexport function utf8ByteLength(str: string): number {\n return new Blob([str]).size;\n}\n"]}
|
||||
{"version":3,"file":"BatchUtils.browser.js","sourceRoot":"","sources":["../../../src/BatchUtils.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAErD,MAAM,CAAC,KAAK,UAAU,aAAa,CACjC,aAA8C;IAE9C,MAAM,IAAI,GAAG,CAAC,MAAM,aAAa,CAAC,QAAQ,CAAS,CAAC;IACpD,OAAO,YAAY,CAAC,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,MAAM,UAAU,cAAc,CAAC,GAAW;IACxC,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC;AAC9B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport { blobToString } from \"./utils/utils.browser\";\n\nexport async function getBodyAsText(\n batchResponse: ServiceSubmitBatchResponseModel,\n): Promise<string> {\n const blob = (await batchResponse.blobBody) as Blob;\n return blobToString(blob);\n}\n\nexport function utf8ByteLength(str: string): number {\n return new Blob([str]).size;\n}\n"]}
|
||||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js
generated
vendored
|
|
@ -1,5 +1,5 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Licensed under the MIT License.
|
||||
import { streamToBuffer2 } from "./utils/utils.node";
|
||||
import { BATCH_MAX_PAYLOAD_IN_BYTES } from "./utils/constants";
|
||||
export async function getBodyAsText(batchResponse) {
|
||||
|
|
|
|||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js.map
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"BatchUtils.js","sourceRoot":"","sources":["../../../src/BatchUtils.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,0BAA0B,EAAE,MAAM,mBAAmB,CAAC;AAE/D,MAAM,CAAC,KAAK,UAAU,aAAa,CACjC,aAA8C;IAE9C,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAC;IAEtD,MAAM,cAAc,GAAG,MAAM,eAAe,CAC1C,aAAa,CAAC,kBAA2C,EACzD,MAAM,CACP,CAAC;IAEF,6CAA6C;IAC7C,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;IAEzC,OAAO,MAAM,CAAC,QAAQ,EAAE,CAAC;AAC3B,CAAC;AAED,MAAM,UAAU,cAAc,CAAC,GAAW;IACxC,OAAO,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;AAChC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport { streamToBuffer2 } from \"./utils/utils.node\";\nimport { BATCH_MAX_PAYLOAD_IN_BYTES } from \"./utils/constants\";\n\nexport async function getBodyAsText(\n batchResponse: ServiceSubmitBatchResponseModel\n): Promise<string> {\n let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES);\n\n const responseLength = await streamToBuffer2(\n batchResponse.readableStreamBody as NodeJS.ReadableStream,\n buffer\n );\n\n // Slice the buffer to trim the empty ending.\n buffer = buffer.slice(0, responseLength);\n\n return buffer.toString();\n}\n\nexport function utf8ByteLength(str: string): number {\n return Buffer.byteLength(str);\n}\n"]}
|
||||
{"version":3,"file":"BatchUtils.js","sourceRoot":"","sources":["../../../src/BatchUtils.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,0BAA0B,EAAE,MAAM,mBAAmB,CAAC;AAE/D,MAAM,CAAC,KAAK,UAAU,aAAa,CACjC,aAA8C;IAE9C,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAC;IAEtD,MAAM,cAAc,GAAG,MAAM,eAAe,CAC1C,aAAa,CAAC,kBAA2C,EACzD,MAAM,CACP,CAAC;IAEF,6CAA6C;IAC7C,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;IAEzC,OAAO,MAAM,CAAC,QAAQ,EAAE,CAAC;AAC3B,CAAC;AAED,MAAM,UAAU,cAAc,CAAC,GAAW;IACxC,OAAO,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;AAChC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport { streamToBuffer2 } from \"./utils/utils.node\";\nimport { BATCH_MAX_PAYLOAD_IN_BYTES } from \"./utils/constants\";\n\nexport async function getBodyAsText(\n batchResponse: ServiceSubmitBatchResponseModel,\n): Promise<string> {\n let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES);\n\n const responseLength = await streamToBuffer2(\n batchResponse.readableStreamBody as NodeJS.ReadableStream,\n buffer,\n );\n\n // Slice the buffer to trim the empty ending.\n buffer = buffer.slice(0, responseLength);\n\n return buffer.toString();\n}\n\nexport function utf8ByteLength(str: string): number {\n return Buffer.byteLength(str);\n}\n"]}
|
||||
169
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js
generated
vendored
169
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js
generated
vendored
|
|
@ -1,15 +1,20 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
import { BaseRequestPolicy, deserializationPolicy, generateUuid, HttpHeaders, WebResource, isTokenCredential, bearerTokenAuthenticationPolicy, isNode, } from "@azure/core-http";
|
||||
import { SpanStatusCode } from "@azure/core-tracing";
|
||||
// Licensed under the MIT License.
|
||||
import { randomUUID } from "@azure/core-util";
|
||||
import { isTokenCredential } from "@azure/core-auth";
|
||||
import { bearerTokenAuthenticationPolicy, createEmptyPipeline, createHttpHeaders, } from "@azure/core-rest-pipeline";
|
||||
import { isNode } from "@azure/core-util";
|
||||
import { AnonymousCredential } from "./credentials/AnonymousCredential";
|
||||
import { BlobClient } from "./Clients";
|
||||
import { Mutex } from "./utils/Mutex";
|
||||
import { Pipeline } from "./Pipeline";
|
||||
import { attachCredential, getURLPath, getURLPathAndQuery, iEqual } from "./utils/utils.common";
|
||||
import { getURLPath, getURLPathAndQuery, iEqual } from "./utils/utils.common";
|
||||
import { stringifyXML } from "@azure/core-xml";
|
||||
import { HeaderConstants, BATCH_MAX_REQUEST, HTTP_VERSION_1_1, HTTP_LINE_ENDING, StorageOAuthScopes, } from "./utils/constants";
|
||||
import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential";
|
||||
import { createSpan } from "./utils/tracing";
|
||||
import { tracingClient } from "./utils/tracing";
|
||||
import { authorizeRequestOnTenantChallenge, serializationPolicy } from "@azure/core-client";
|
||||
import { storageSharedKeyCredentialPolicy } from "./policies/StorageSharedKeyCredentialPolicyV2";
|
||||
/**
|
||||
* A BlobBatch represents an aggregated set of operations on blobs.
|
||||
* Currently, only `delete` and `setAccessTier` are supported.
|
||||
|
|
@ -81,8 +86,7 @@ export class BlobBatch {
|
|||
if (!options) {
|
||||
options = {};
|
||||
}
|
||||
const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options);
|
||||
try {
|
||||
return tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => {
|
||||
this.setBatchType("delete");
|
||||
await this.addSubRequestInternal({
|
||||
url: url,
|
||||
|
|
@ -90,17 +94,7 @@ export class BlobBatch {
|
|||
}, async () => {
|
||||
await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions);
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
});
|
||||
}
|
||||
async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) {
|
||||
let url;
|
||||
|
|
@ -128,8 +122,7 @@ export class BlobBatch {
|
|||
if (!options) {
|
||||
options = {};
|
||||
}
|
||||
const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options);
|
||||
try {
|
||||
return tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => {
|
||||
this.setBatchType("setAccessTier");
|
||||
await this.addSubRequestInternal({
|
||||
url: url,
|
||||
|
|
@ -137,17 +130,7 @@ export class BlobBatch {
|
|||
}, async () => {
|
||||
await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions);
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
/**
|
||||
|
|
@ -158,7 +141,7 @@ class InnerBatchRequest {
|
|||
constructor() {
|
||||
this.operationCount = 0;
|
||||
this.body = "";
|
||||
const tempGuid = generateUuid();
|
||||
const tempGuid = randomUUID();
|
||||
// batch_{batchid}
|
||||
this.boundary = `batch_${tempGuid}`;
|
||||
// --batch_{batchid}
|
||||
|
|
@ -179,29 +162,48 @@ class InnerBatchRequest {
|
|||
* @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.
|
||||
*/
|
||||
createPipeline(credential) {
|
||||
const isAnonymousCreds = credential instanceof AnonymousCredential;
|
||||
const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory]
|
||||
const factories = new Array(policyFactoryLength);
|
||||
factories[0] = deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer
|
||||
factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers
|
||||
if (!isAnonymousCreds) {
|
||||
factories[2] = isTokenCredential(credential)
|
||||
? attachCredential(bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential)
|
||||
: credential;
|
||||
const corePipeline = createEmptyPipeline();
|
||||
corePipeline.addPolicy(serializationPolicy({
|
||||
stringifyXML,
|
||||
serializerOptions: {
|
||||
xml: {
|
||||
xmlCharKey: "#",
|
||||
},
|
||||
},
|
||||
}), { phase: "Serialize" });
|
||||
// Use batch header filter policy to exclude unnecessary headers
|
||||
corePipeline.addPolicy(batchHeaderFilterPolicy());
|
||||
// Use batch assemble policy to assemble request and intercept request from going to wire
|
||||
corePipeline.addPolicy(batchRequestAssemblePolicy(this), { afterPhase: "Sign" });
|
||||
if (isTokenCredential(credential)) {
|
||||
corePipeline.addPolicy(bearerTokenAuthenticationPolicy({
|
||||
credential,
|
||||
scopes: StorageOAuthScopes,
|
||||
challengeCallbacks: { authorizeRequestOnChallenge: authorizeRequestOnTenantChallenge },
|
||||
}), { phase: "Sign" });
|
||||
}
|
||||
factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire
|
||||
return new Pipeline(factories, {});
|
||||
else if (credential instanceof StorageSharedKeyCredential) {
|
||||
corePipeline.addPolicy(storageSharedKeyCredentialPolicy({
|
||||
accountName: credential.accountName,
|
||||
accountKey: credential.accountKey,
|
||||
}), { phase: "Sign" });
|
||||
}
|
||||
const pipeline = new Pipeline([]);
|
||||
// attach the v2 pipeline to this one
|
||||
pipeline._credential = credential;
|
||||
pipeline._corePipeline = corePipeline;
|
||||
return pipeline;
|
||||
}
|
||||
appendSubRequestToBody(request) {
|
||||
// Start to assemble sub request
|
||||
this.body += [
|
||||
this.subRequestPrefix,
|
||||
`${HeaderConstants.CONTENT_ID}: ${this.operationCount}`,
|
||||
"",
|
||||
this.subRequestPrefix, // sub request constant prefix
|
||||
`${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, // sub request's content ID
|
||||
"", // empty line after sub request's content ID
|
||||
`${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method
|
||||
].join(HTTP_LINE_ENDING);
|
||||
for (const header of request.headers.headersArray()) {
|
||||
this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`;
|
||||
for (const [name, value] of request.headers) {
|
||||
this.body += `${name}: ${value}${HTTP_LINE_ENDING}`;
|
||||
}
|
||||
this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line
|
||||
// No body to assemble for current batch request support
|
||||
|
|
@ -232,51 +234,34 @@ class InnerBatchRequest {
|
|||
return this.subRequests;
|
||||
}
|
||||
}
|
||||
class BatchRequestAssemblePolicy extends BaseRequestPolicy {
|
||||
constructor(batchRequest, nextPolicy, options) {
|
||||
super(nextPolicy, options);
|
||||
this.dummyResponse = {
|
||||
request: new WebResource(),
|
||||
status: 200,
|
||||
headers: new HttpHeaders(),
|
||||
};
|
||||
this.batchRequest = batchRequest;
|
||||
}
|
||||
async sendRequest(request) {
|
||||
await this.batchRequest.appendSubRequestToBody(request);
|
||||
return this.dummyResponse; // Intercept request from going to wire
|
||||
}
|
||||
function batchRequestAssemblePolicy(batchRequest) {
|
||||
return {
|
||||
name: "batchRequestAssemblePolicy",
|
||||
async sendRequest(request) {
|
||||
batchRequest.appendSubRequestToBody(request);
|
||||
return {
|
||||
request,
|
||||
status: 200,
|
||||
headers: createHttpHeaders(),
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
class BatchRequestAssemblePolicyFactory {
|
||||
constructor(batchRequest) {
|
||||
this.batchRequest = batchRequest;
|
||||
}
|
||||
create(nextPolicy, options) {
|
||||
return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options);
|
||||
}
|
||||
}
|
||||
class BatchHeaderFilterPolicy extends BaseRequestPolicy {
|
||||
// The base class has a protected constructor. Adding a public one to enable constructing of this class.
|
||||
/* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/
|
||||
constructor(nextPolicy, options) {
|
||||
super(nextPolicy, options);
|
||||
}
|
||||
async sendRequest(request) {
|
||||
let xMsHeaderName = "";
|
||||
for (const header of request.headers.headersArray()) {
|
||||
if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) {
|
||||
xMsHeaderName = header.name;
|
||||
function batchHeaderFilterPolicy() {
|
||||
return {
|
||||
name: "batchHeaderFilterPolicy",
|
||||
async sendRequest(request, next) {
|
||||
let xMsHeaderName = "";
|
||||
for (const [name] of request.headers) {
|
||||
if (iEqual(name, HeaderConstants.X_MS_VERSION)) {
|
||||
xMsHeaderName = name;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (xMsHeaderName !== "") {
|
||||
request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header.
|
||||
}
|
||||
return this._nextPolicy.sendRequest(request);
|
||||
}
|
||||
}
|
||||
class BatchHeaderFilterPolicyFactory {
|
||||
create(nextPolicy, options) {
|
||||
return new BatchHeaderFilterPolicy(nextPolicy, options);
|
||||
}
|
||||
if (xMsHeaderName !== "") {
|
||||
request.headers.delete(xMsHeaderName); // The subrequests should not have the x-ms-version header.
|
||||
}
|
||||
return next(request);
|
||||
},
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=BlobBatch.js.map
|
||||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js.map
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js.map
generated
vendored
File diff suppressed because one or more lines are too long
35
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js
generated
vendored
35
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js
generated
vendored
|
|
@ -1,15 +1,13 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Licensed under the MIT License.
|
||||
import { BatchResponseParser } from "./BatchResponseParser";
|
||||
import { utf8ByteLength } from "./BatchUtils";
|
||||
import { BlobBatch } from "./BlobBatch";
|
||||
import { SpanStatusCode } from "@azure/core-tracing";
|
||||
import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing";
|
||||
import { Service, Container } from "./generated/src/operations";
|
||||
import { tracingClient } from "./utils/tracing";
|
||||
import { AnonymousCredential } from "./credentials/AnonymousCredential";
|
||||
import { StorageClientContext } from "./generated/src/storageClientContext";
|
||||
import { newPipeline, isPipelineLike } from "./Pipeline";
|
||||
import { getURLPath } from "./utils/utils.common";
|
||||
import { StorageContextClient } from "./StorageContextClient";
|
||||
import { newPipeline, isPipelineLike, getCoreClientOptions, } from "./Pipeline";
|
||||
import { assertResponse, getURLPath } from "./utils/utils.common";
|
||||
/**
|
||||
* A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service.
|
||||
*
|
||||
|
|
@ -31,14 +29,14 @@ export class BlobBatchClient {
|
|||
else {
|
||||
pipeline = newPipeline(credentialOrPipeline, options);
|
||||
}
|
||||
const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions());
|
||||
const storageClientContext = new StorageContextClient(url, getCoreClientOptions(pipeline));
|
||||
const path = getURLPath(url);
|
||||
if (path && path !== "/") {
|
||||
// Container scoped.
|
||||
this.serviceOrContainerContext = new Container(storageClientContext);
|
||||
this.serviceOrContainerContext = storageClientContext.container;
|
||||
}
|
||||
else {
|
||||
this.serviceOrContainerContext = new Service(storageClientContext);
|
||||
this.serviceOrContainerContext = storageClientContext.service;
|
||||
}
|
||||
}
|
||||
/**
|
||||
|
|
@ -117,11 +115,10 @@ export class BlobBatchClient {
|
|||
if (!batchRequest || batchRequest.getSubRequests().size === 0) {
|
||||
throw new RangeError("Batch request should contain one or more sub requests.");
|
||||
}
|
||||
const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options);
|
||||
try {
|
||||
return tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => {
|
||||
const batchRequestBody = batchRequest.getHttpRequestBody();
|
||||
// ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now.
|
||||
const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
const rawBatchResponse = assertResponse(await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign({}, updatedOptions)));
|
||||
// Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202).
|
||||
const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests());
|
||||
const responseSummary = await batchResponseParser.parseBatchResponse();
|
||||
|
|
@ -137,17 +134,7 @@ export class BlobBatchClient {
|
|||
subResponsesFailedCount: responseSummary.subResponsesFailedCount,
|
||||
};
|
||||
return res;
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=BlobBatchClient.js.map
|
||||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js.map
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js.map
generated
vendored
File diff suppressed because one or more lines are too long
|
|
@ -1,5 +1,5 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Licensed under the MIT License.
|
||||
// This file is used as a shim of "BlobDownloadResponse" for some browser bundlers
|
||||
// when trying to bundle "BlobDownloadResponse"
|
||||
// "BlobDownloadResponse" class is only available in Node.js runtime
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"BlobDownloadResponse.browser.js","sourceRoot":"","sources":["../../../src/BlobDownloadResponse.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,kFAAkF;AAClF,+CAA+C;AAC/C,oEAAoE;AACpE,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// This file is used as a shim of \"BlobDownloadResponse\" for some browser bundlers\n// when trying to bundle \"BlobDownloadResponse\"\n// \"BlobDownloadResponse\" class is only available in Node.js runtime\nexport const BlobDownloadResponse = 1;\n"]}
|
||||
{"version":3,"file":"BlobDownloadResponse.browser.js","sourceRoot":"","sources":["../../../src/BlobDownloadResponse.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,kFAAkF;AAClF,+CAA+C;AAC/C,oEAAoE;AACpE,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\n// This file is used as a shim of \"BlobDownloadResponse\" for some browser bundlers\n// when trying to bundle \"BlobDownloadResponse\"\n// \"BlobDownloadResponse\" class is only available in Node.js runtime\nexport const BlobDownloadResponse = 1;\n"]}
|
||||
30
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js
generated
vendored
30
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
import { isNode } from "@azure/core-http";
|
||||
// Licensed under the MIT License.
|
||||
import { isNode } from "@azure/core-util";
|
||||
import { RetriableReadableStream, } from "./utils/RetriableReadableStream";
|
||||
/**
|
||||
* ONLY AVAILABLE IN NODE.JS RUNTIME.
|
||||
|
|
@ -13,19 +13,6 @@ import { RetriableReadableStream, } from "./utils/RetriableReadableStream";
|
|||
* Readable stream.
|
||||
*/
|
||||
export class BlobDownloadResponse {
|
||||
/**
|
||||
* Creates an instance of BlobDownloadResponse.
|
||||
*
|
||||
* @param originalResponse -
|
||||
* @param getter -
|
||||
* @param offset -
|
||||
* @param count -
|
||||
* @param options -
|
||||
*/
|
||||
constructor(originalResponse, getter, offset, count, options = {}) {
|
||||
this.originalResponse = originalResponse;
|
||||
this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options);
|
||||
}
|
||||
/**
|
||||
* Indicates that the service supports
|
||||
* requests for partial file content.
|
||||
|
|
@ -459,5 +446,18 @@ export class BlobDownloadResponse {
|
|||
get _response() {
|
||||
return this.originalResponse._response;
|
||||
}
|
||||
/**
|
||||
* Creates an instance of BlobDownloadResponse.
|
||||
*
|
||||
* @param originalResponse -
|
||||
* @param getter -
|
||||
* @param offset -
|
||||
* @param count -
|
||||
* @param options -
|
||||
*/
|
||||
constructor(originalResponse, getter, offset, count, options = {}) {
|
||||
this.originalResponse = originalResponse;
|
||||
this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=BlobDownloadResponse.js.map
|
||||
File diff suppressed because one or more lines are too long
175
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js
generated
vendored
175
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js
generated
vendored
|
|
@ -1,36 +1,13 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
import { generateUuid } from "@azure/core-http";
|
||||
import { StorageClientContext } from "./generated/src/index";
|
||||
import { SpanStatusCode } from "@azure/core-tracing";
|
||||
import { Blob as StorageBlob, Container } from "./generated/src/operations";
|
||||
// Licensed under the MIT License.
|
||||
import { randomUUID } from "@azure/core-util";
|
||||
import { ETagNone } from "./utils/constants";
|
||||
import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing";
|
||||
import { tracingClient } from "./utils/tracing";
|
||||
import { assertResponse } from "./utils/utils.common";
|
||||
/**
|
||||
* A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}.
|
||||
*/
|
||||
export class BlobLeaseClient {
|
||||
/**
|
||||
* Creates an instance of BlobLeaseClient.
|
||||
* @param client - The client to make the lease operation requests.
|
||||
* @param leaseId - Initial proposed lease id.
|
||||
*/
|
||||
constructor(client, leaseId) {
|
||||
const clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions());
|
||||
this._url = client.url;
|
||||
if (client.name === undefined) {
|
||||
this._isContainer = true;
|
||||
this._containerOrBlobOperation = new Container(clientContext);
|
||||
}
|
||||
else {
|
||||
this._isContainer = false;
|
||||
this._containerOrBlobOperation = new StorageBlob(clientContext);
|
||||
}
|
||||
if (!leaseId) {
|
||||
leaseId = generateUuid();
|
||||
}
|
||||
this._leaseId = leaseId;
|
||||
}
|
||||
/**
|
||||
* Gets the lease Id.
|
||||
*
|
||||
|
|
@ -47,6 +24,27 @@ export class BlobLeaseClient {
|
|||
get url() {
|
||||
return this._url;
|
||||
}
|
||||
/**
|
||||
* Creates an instance of BlobLeaseClient.
|
||||
* @param client - The client to make the lease operation requests.
|
||||
* @param leaseId - Initial proposed lease id.
|
||||
*/
|
||||
constructor(client, leaseId) {
|
||||
const clientContext = client.storageClientContext;
|
||||
this._url = client.url;
|
||||
if (client.name === undefined) {
|
||||
this._isContainer = true;
|
||||
this._containerOrBlobOperation = clientContext.container;
|
||||
}
|
||||
else {
|
||||
this._isContainer = false;
|
||||
this._containerOrBlobOperation = clientContext.blob;
|
||||
}
|
||||
if (!leaseId) {
|
||||
leaseId = randomUUID();
|
||||
}
|
||||
this._leaseId = leaseId;
|
||||
}
|
||||
/**
|
||||
* Establishes and manages a lock on a container for delete operations, or on a blob
|
||||
* for write and delete operations.
|
||||
|
|
@ -60,27 +58,23 @@ export class BlobLeaseClient {
|
|||
* @returns Response data for acquire lease operation.
|
||||
*/
|
||||
async acquireLease(duration, options = {}) {
|
||||
var _a, _b, _c, _d, _e, _f;
|
||||
const { span, updatedOptions } = createSpan("BlobLeaseClient-acquireLease", options);
|
||||
var _a, _b, _c, _d, _e;
|
||||
if (this._isContainer &&
|
||||
((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
|
||||
(((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) ||
|
||||
((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
|
||||
throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
|
||||
}
|
||||
try {
|
||||
return await this._containerOrBlobOperation.acquireLease(Object.assign({ abortSignal: options.abortSignal, duration, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
return tracingClient.withSpan("BlobLeaseClient-acquireLease", options, async (updatedOptions) => {
|
||||
var _a;
|
||||
return assertResponse(await this._containerOrBlobOperation.acquireLease({
|
||||
abortSignal: options.abortSignal,
|
||||
duration,
|
||||
modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
|
||||
proposedLeaseId: this._leaseId,
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
}));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* To change the ID of the lease.
|
||||
|
|
@ -93,29 +87,23 @@ export class BlobLeaseClient {
|
|||
* @returns Response data for change lease operation.
|
||||
*/
|
||||
async changeLease(proposedLeaseId, options = {}) {
|
||||
var _a, _b, _c, _d, _e, _f;
|
||||
const { span, updatedOptions } = createSpan("BlobLeaseClient-changeLease", options);
|
||||
var _a, _b, _c, _d, _e;
|
||||
if (this._isContainer &&
|
||||
((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
|
||||
(((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) ||
|
||||
((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
|
||||
throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
|
||||
}
|
||||
try {
|
||||
const response = await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
return tracingClient.withSpan("BlobLeaseClient-changeLease", options, async (updatedOptions) => {
|
||||
var _a;
|
||||
const response = assertResponse(await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, {
|
||||
abortSignal: options.abortSignal,
|
||||
modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
}));
|
||||
this._leaseId = proposedLeaseId;
|
||||
return response;
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* To free the lease if it is no longer needed so that another client may
|
||||
|
|
@ -128,27 +116,21 @@ export class BlobLeaseClient {
|
|||
* @returns Response data for release lease operation.
|
||||
*/
|
||||
async releaseLease(options = {}) {
|
||||
var _a, _b, _c, _d, _e, _f;
|
||||
const { span, updatedOptions } = createSpan("BlobLeaseClient-releaseLease", options);
|
||||
var _a, _b, _c, _d, _e;
|
||||
if (this._isContainer &&
|
||||
((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
|
||||
(((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) ||
|
||||
((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
|
||||
throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
|
||||
}
|
||||
try {
|
||||
return await this._containerOrBlobOperation.releaseLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
return tracingClient.withSpan("BlobLeaseClient-releaseLease", options, async (updatedOptions) => {
|
||||
var _a;
|
||||
return assertResponse(await this._containerOrBlobOperation.releaseLease(this._leaseId, {
|
||||
abortSignal: options.abortSignal,
|
||||
modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
}));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* To renew the lease.
|
||||
|
|
@ -160,27 +142,21 @@ export class BlobLeaseClient {
|
|||
* @returns Response data for renew lease operation.
|
||||
*/
|
||||
async renewLease(options = {}) {
|
||||
var _a, _b, _c, _d, _e, _f;
|
||||
const { span, updatedOptions } = createSpan("BlobLeaseClient-renewLease", options);
|
||||
var _a, _b, _c, _d, _e;
|
||||
if (this._isContainer &&
|
||||
((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
|
||||
(((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) ||
|
||||
((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
|
||||
throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
|
||||
}
|
||||
try {
|
||||
return await this._containerOrBlobOperation.renewLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
return tracingClient.withSpan("BlobLeaseClient-renewLease", options, async (updatedOptions) => {
|
||||
var _a;
|
||||
return this._containerOrBlobOperation.renewLease(this._leaseId, {
|
||||
abortSignal: options.abortSignal,
|
||||
modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* To end the lease but ensure that another client cannot acquire a new lease
|
||||
|
|
@ -194,28 +170,23 @@ export class BlobLeaseClient {
|
|||
* @returns Response data for break lease operation.
|
||||
*/
|
||||
async breakLease(breakPeriod, options = {}) {
|
||||
var _a, _b, _c, _d, _e, _f;
|
||||
const { span, updatedOptions } = createSpan("BlobLeaseClient-breakLease", options);
|
||||
var _a, _b, _c, _d, _e;
|
||||
if (this._isContainer &&
|
||||
((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
|
||||
(((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) ||
|
||||
((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
|
||||
throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
|
||||
}
|
||||
try {
|
||||
const operationOptions = Object.assign({ abortSignal: options.abortSignal, breakPeriod, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions));
|
||||
return await this._containerOrBlobOperation.breakLease(operationOptions);
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
return tracingClient.withSpan("BlobLeaseClient-breakLease", options, async (updatedOptions) => {
|
||||
var _a;
|
||||
const operationOptions = {
|
||||
abortSignal: options.abortSignal,
|
||||
breakPeriod,
|
||||
modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
};
|
||||
return assertResponse(await this._containerOrBlobOperation.breakLease(operationOptions));
|
||||
});
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=BlobLeaseClient.js.map
|
||||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js.map
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js.map
generated
vendored
File diff suppressed because one or more lines are too long
20
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js
generated
vendored
20
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js
generated
vendored
|
|
@ -1,5 +1,5 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Licensed under the MIT License.
|
||||
/**
|
||||
* ONLY AVAILABLE IN BROWSER RUNTIME.
|
||||
*
|
||||
|
|
@ -7,15 +7,6 @@
|
|||
* parse avor data returned by blob query.
|
||||
*/
|
||||
export class BlobQueryResponse {
|
||||
/**
|
||||
* Creates an instance of BlobQueryResponse.
|
||||
*
|
||||
* @param originalResponse -
|
||||
* @param options -
|
||||
*/
|
||||
constructor(originalResponse, _options = {}) {
|
||||
this.originalResponse = originalResponse;
|
||||
}
|
||||
/**
|
||||
* Indicates that the service supports
|
||||
* requests for partial file content.
|
||||
|
|
@ -358,5 +349,14 @@ export class BlobQueryResponse {
|
|||
get _response() {
|
||||
return this.originalResponse._response;
|
||||
}
|
||||
/**
|
||||
* Creates an instance of BlobQueryResponse.
|
||||
*
|
||||
* @param originalResponse -
|
||||
* @param options -
|
||||
*/
|
||||
constructor(originalResponse, _options = {}) {
|
||||
this.originalResponse = originalResponse;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=BlobQueryResponse.browser.js.map
|
||||
File diff suppressed because one or more lines are too long
24
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js
generated
vendored
24
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
import { isNode } from "@azure/core-http";
|
||||
// Licensed under the MIT License.
|
||||
import { isNode } from "@azure/core-util";
|
||||
import { BlobQuickQueryStream } from "./utils/BlobQuickQueryStream";
|
||||
/**
|
||||
* ONLY AVAILABLE IN NODE.JS RUNTIME.
|
||||
|
|
@ -9,16 +9,6 @@ import { BlobQuickQueryStream } from "./utils/BlobQuickQueryStream";
|
|||
* parse avor data returned by blob query.
|
||||
*/
|
||||
export class BlobQueryResponse {
|
||||
/**
|
||||
* Creates an instance of BlobQueryResponse.
|
||||
*
|
||||
* @param originalResponse -
|
||||
* @param options -
|
||||
*/
|
||||
constructor(originalResponse, options = {}) {
|
||||
this.originalResponse = originalResponse;
|
||||
this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options);
|
||||
}
|
||||
/**
|
||||
* Indicates that the service supports
|
||||
* requests for partial file content.
|
||||
|
|
@ -363,5 +353,15 @@ export class BlobQueryResponse {
|
|||
get _response() {
|
||||
return this.originalResponse._response;
|
||||
}
|
||||
/**
|
||||
* Creates an instance of BlobQueryResponse.
|
||||
*
|
||||
* @param originalResponse -
|
||||
* @param options -
|
||||
*/
|
||||
constructor(originalResponse, options = {}) {
|
||||
this.originalResponse = originalResponse;
|
||||
this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=BlobQueryResponse.js.map
|
||||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js.map
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js.map
generated
vendored
File diff suppressed because one or more lines are too long
336
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js
generated
vendored
336
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js
generated
vendored
|
|
@ -1,47 +1,26 @@
|
|||
import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib";
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
import { isTokenCredential, isNode, getDefaultProxySettings, } from "@azure/core-http";
|
||||
import { SpanStatusCode } from "@azure/core-tracing";
|
||||
import { Container, Service } from "./generated/src/operations";
|
||||
// Licensed under the MIT License.
|
||||
import { isTokenCredential } from "@azure/core-auth";
|
||||
import { getDefaultProxySettings } from "@azure/core-rest-pipeline";
|
||||
import { isNode } from "@azure/core-util";
|
||||
import { newPipeline, isPipelineLike } from "./Pipeline";
|
||||
import { ContainerClient, } from "./ContainerClient";
|
||||
import { appendToURLPath, appendToURLQuery, extractConnectionStringParts, toTags, } from "./utils/utils.common";
|
||||
import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential";
|
||||
import { AnonymousCredential } from "./credentials/AnonymousCredential";
|
||||
import "@azure/core-paging";
|
||||
import { truncatedISO8061Date } from "./utils/utils.common";
|
||||
import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing";
|
||||
import { truncatedISO8061Date, assertResponse } from "./utils/utils.common";
|
||||
import { tracingClient } from "./utils/tracing";
|
||||
import { BlobBatchClient } from "./BlobBatchClient";
|
||||
import { StorageClient } from "./StorageClient";
|
||||
import { AccountSASPermissions } from "./sas/AccountSASPermissions";
|
||||
import { generateAccountSASQueryParameters } from "./sas/AccountSASSignatureValues";
|
||||
import { generateAccountSASQueryParameters, generateAccountSASQueryParametersInternal, } from "./sas/AccountSASSignatureValues";
|
||||
import { AccountSASServices } from "./sas/AccountSASServices";
|
||||
/**
|
||||
* A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you
|
||||
* to manipulate blob containers.
|
||||
*/
|
||||
export class BlobServiceClient extends StorageClient {
|
||||
constructor(url, credentialOrPipeline,
|
||||
// Legacy, no fix for eslint error without breaking. Disable it for this interface.
|
||||
/* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/
|
||||
options) {
|
||||
let pipeline;
|
||||
if (isPipelineLike(credentialOrPipeline)) {
|
||||
pipeline = credentialOrPipeline;
|
||||
}
|
||||
else if ((isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) ||
|
||||
credentialOrPipeline instanceof AnonymousCredential ||
|
||||
isTokenCredential(credentialOrPipeline)) {
|
||||
pipeline = newPipeline(credentialOrPipeline, options);
|
||||
}
|
||||
else {
|
||||
// The second parameter is undefined. Use anonymous credential
|
||||
pipeline = newPipeline(new AnonymousCredential(), options);
|
||||
}
|
||||
super(url, pipeline);
|
||||
this.serviceContext = new Service(this.storageClientContext);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* Creates an instance of BlobServiceClient from connection string.
|
||||
|
|
@ -81,6 +60,26 @@ export class BlobServiceClient extends StorageClient {
|
|||
throw new Error("Connection string must be either an Account connection string or a SAS connection string");
|
||||
}
|
||||
}
|
||||
constructor(url, credentialOrPipeline,
|
||||
// Legacy, no fix for eslint error without breaking. Disable it for this interface.
|
||||
/* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/
|
||||
options) {
|
||||
let pipeline;
|
||||
if (isPipelineLike(credentialOrPipeline)) {
|
||||
pipeline = credentialOrPipeline;
|
||||
}
|
||||
else if ((isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) ||
|
||||
credentialOrPipeline instanceof AnonymousCredential ||
|
||||
isTokenCredential(credentialOrPipeline)) {
|
||||
pipeline = newPipeline(credentialOrPipeline, options);
|
||||
}
|
||||
else {
|
||||
// The second parameter is undefined. Use anonymous credential
|
||||
pipeline = newPipeline(new AnonymousCredential(), options);
|
||||
}
|
||||
super(url, pipeline);
|
||||
this.serviceContext = this.storageClientContext.service;
|
||||
}
|
||||
/**
|
||||
* Creates a {@link ContainerClient} object
|
||||
*
|
||||
|
|
@ -104,25 +103,14 @@ export class BlobServiceClient extends StorageClient {
|
|||
* @returns Container creation response and the corresponding container client.
|
||||
*/
|
||||
async createContainer(containerName, options = {}) {
|
||||
const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options);
|
||||
try {
|
||||
return tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => {
|
||||
const containerClient = this.getContainerClient(containerName);
|
||||
const containerCreateResponse = await containerClient.create(updatedOptions);
|
||||
return {
|
||||
containerClient,
|
||||
containerCreateResponse,
|
||||
};
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Deletes a Blob container.
|
||||
|
|
@ -132,21 +120,10 @@ export class BlobServiceClient extends StorageClient {
|
|||
* @returns Container deletion response.
|
||||
*/
|
||||
async deleteContainer(containerName, options = {}) {
|
||||
const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options);
|
||||
try {
|
||||
return tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => {
|
||||
const containerClient = this.getContainerClient(containerName);
|
||||
return await containerClient.delete(updatedOptions);
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
return containerClient.delete(updatedOptions);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Restore a previously deleted Blob container.
|
||||
|
|
@ -158,25 +135,17 @@ export class BlobServiceClient extends StorageClient {
|
|||
* @returns Container deletion response.
|
||||
*/
|
||||
async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) {
|
||||
const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options);
|
||||
try {
|
||||
return tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => {
|
||||
const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName);
|
||||
// Hack to access a protected member.
|
||||
const containerContext = new Container(containerClient["storageClientContext"]);
|
||||
const containerUndeleteResponse = await containerContext.restore(Object.assign({ deletedContainerName,
|
||||
deletedContainerVersion }, updatedOptions));
|
||||
const containerContext = containerClient["storageClientContext"].container;
|
||||
const containerUndeleteResponse = assertResponse(await containerContext.restore({
|
||||
deletedContainerName,
|
||||
deletedContainerVersion,
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
}));
|
||||
return { containerClient, containerUndeleteResponse };
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Rename an existing Blob Container.
|
||||
|
|
@ -188,25 +157,14 @@ export class BlobServiceClient extends StorageClient {
|
|||
/* eslint-disable-next-line @typescript-eslint/ban-ts-comment */
|
||||
// @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready.
|
||||
async renameContainer(sourceContainerName, destinationContainerName, options = {}) {
|
||||
var _a;
|
||||
const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options);
|
||||
try {
|
||||
return tracingClient.withSpan("BlobServiceClient-renameContainer", options, async (updatedOptions) => {
|
||||
var _a;
|
||||
const containerClient = this.getContainerClient(destinationContainerName);
|
||||
// Hack to access a protected member.
|
||||
const containerContext = new Container(containerClient["storageClientContext"]);
|
||||
const containerRenameResponse = await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId }));
|
||||
const containerContext = containerClient["storageClientContext"].container;
|
||||
const containerRenameResponse = assertResponse(await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })));
|
||||
return { containerClient, containerRenameResponse };
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Gets the properties of a storage account’s Blob service, including properties
|
||||
|
|
@ -217,20 +175,12 @@ export class BlobServiceClient extends StorageClient {
|
|||
* @returns Response data for the Service Get Properties operation.
|
||||
*/
|
||||
async getProperties(options = {}) {
|
||||
const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options);
|
||||
try {
|
||||
return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
return tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => {
|
||||
return assertResponse(await this.serviceContext.getProperties({
|
||||
abortSignal: options.abortSignal,
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
}));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Sets properties for a storage account’s Blob service endpoint, including properties
|
||||
|
|
@ -242,20 +192,12 @@ export class BlobServiceClient extends StorageClient {
|
|||
* @returns Response data for the Service Set Properties operation.
|
||||
*/
|
||||
async setProperties(properties, options = {}) {
|
||||
const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options);
|
||||
try {
|
||||
return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
return tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => {
|
||||
return assertResponse(await this.serviceContext.setProperties(properties, {
|
||||
abortSignal: options.abortSignal,
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
}));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Retrieves statistics related to replication for the Blob service. It is only
|
||||
|
|
@ -267,20 +209,12 @@ export class BlobServiceClient extends StorageClient {
|
|||
* @returns Response data for the Service Get Statistics operation.
|
||||
*/
|
||||
async getStatistics(options = {}) {
|
||||
const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options);
|
||||
try {
|
||||
return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
return tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => {
|
||||
return assertResponse(await this.serviceContext.getStatistics({
|
||||
abortSignal: options.abortSignal,
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
}));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* The Get Account Information operation returns the sku name and account kind
|
||||
|
|
@ -293,20 +227,12 @@ export class BlobServiceClient extends StorageClient {
|
|||
* @returns Response data for the Service Get Account Info operation.
|
||||
*/
|
||||
async getAccountInfo(options = {}) {
|
||||
const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options);
|
||||
try {
|
||||
return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
return tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => {
|
||||
return assertResponse(await this.serviceContext.getAccountInfo({
|
||||
abortSignal: options.abortSignal,
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
}));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Returns a list of the containers under the specified account.
|
||||
|
|
@ -323,20 +249,9 @@ export class BlobServiceClient extends StorageClient {
|
|||
* @returns Response data for the Service List Container Segment operation.
|
||||
*/
|
||||
async listContainersSegment(marker, options = {}) {
|
||||
const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options);
|
||||
try {
|
||||
return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
return tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => {
|
||||
return assertResponse(await this.serviceContext.listContainersSegment(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include, tracingOptions: updatedOptions.tracingOptions })));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* The Filter Blobs operation enables callers to list blobs across all containers whose tags
|
||||
|
|
@ -357,9 +272,14 @@ export class BlobServiceClient extends StorageClient {
|
|||
* @param options - Options to find blobs by tags.
|
||||
*/
|
||||
async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) {
|
||||
const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options);
|
||||
try {
|
||||
const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
return tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => {
|
||||
const response = assertResponse(await this.serviceContext.filterBlobs({
|
||||
abortSignal: options.abortSignal,
|
||||
where: tagFilterSqlExpression,
|
||||
marker,
|
||||
maxPageSize: options.maxPageSize,
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
}));
|
||||
const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => {
|
||||
var _a;
|
||||
let tagValue = "";
|
||||
|
|
@ -369,17 +289,7 @@ export class BlobServiceClient extends StorageClient {
|
|||
return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue });
|
||||
}) });
|
||||
return wrappedResponse;
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse.
|
||||
|
|
@ -397,8 +307,8 @@ export class BlobServiceClient extends StorageClient {
|
|||
* items. The marker value is opaque to the client.
|
||||
* @param options - Options to find blobs by tags.
|
||||
*/
|
||||
findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) {
|
||||
return __asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() {
|
||||
findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) {
|
||||
return __asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) {
|
||||
let response;
|
||||
if (!!marker || marker === undefined) {
|
||||
do {
|
||||
|
|
@ -419,20 +329,22 @@ export class BlobServiceClient extends StorageClient {
|
|||
* however, only a subset of the OData filter syntax is supported in the Blob service.
|
||||
* @param options - Options to findBlobsByTagsItems.
|
||||
*/
|
||||
findBlobsByTagsItems(tagFilterSqlExpression, options = {}) {
|
||||
return __asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() {
|
||||
var e_1, _a;
|
||||
findBlobsByTagsItems(tagFilterSqlExpression_1) {
|
||||
return __asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) {
|
||||
var _a, e_1, _b, _c;
|
||||
let marker;
|
||||
try {
|
||||
for (var _b = __asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) {
|
||||
const segment = _c.value;
|
||||
for (var _d = true, _e = __asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a; _d = true) {
|
||||
_c = _f.value;
|
||||
_d = false;
|
||||
const segment = _c;
|
||||
yield __await(yield* __asyncDelegator(__asyncValues(segment.blobs)));
|
||||
}
|
||||
}
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b));
|
||||
if (!_d && !_a && (_b = _e.return)) yield __await(_b.call(_e));
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
|
|
@ -554,8 +466,8 @@ export class BlobServiceClient extends StorageClient {
|
|||
* items. The marker value is opaque to the client.
|
||||
* @param options - Options to list containers operation.
|
||||
*/
|
||||
listSegments(marker, options = {}) {
|
||||
return __asyncGenerator(this, arguments, function* listSegments_1() {
|
||||
listSegments(marker_1) {
|
||||
return __asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) {
|
||||
let listContainersSegmentResponse;
|
||||
if (!!marker || marker === undefined) {
|
||||
do {
|
||||
|
|
@ -573,20 +485,22 @@ export class BlobServiceClient extends StorageClient {
|
|||
*
|
||||
* @param options - Options to list containers operation.
|
||||
*/
|
||||
listItems(options = {}) {
|
||||
return __asyncGenerator(this, arguments, function* listItems_1() {
|
||||
var e_2, _a;
|
||||
listItems() {
|
||||
return __asyncGenerator(this, arguments, function* listItems_1(options = {}) {
|
||||
var _a, e_2, _b, _c;
|
||||
let marker;
|
||||
try {
|
||||
for (var _b = __asyncValues(this.listSegments(marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) {
|
||||
const segment = _c.value;
|
||||
for (var _d = true, _e = __asyncValues(this.listSegments(marker, options)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a; _d = true) {
|
||||
_c = _f.value;
|
||||
_d = false;
|
||||
const segment = _c;
|
||||
yield __await(yield* __asyncDelegator(__asyncValues(segment.containerItems)));
|
||||
}
|
||||
}
|
||||
catch (e_2_1) { e_2 = { error: e_2_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b));
|
||||
if (!_d && !_a && (_b = _e.return)) yield __await(_b.call(_e));
|
||||
}
|
||||
finally { if (e_2) throw e_2.error; }
|
||||
}
|
||||
|
|
@ -716,12 +630,14 @@ export class BlobServiceClient extends StorageClient {
|
|||
* @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time
|
||||
*/
|
||||
async getUserDelegationKey(startsOn, expiresOn, options = {}) {
|
||||
const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options);
|
||||
try {
|
||||
const response = await this.serviceContext.getUserDelegationKey({
|
||||
return tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => {
|
||||
const response = assertResponse(await this.serviceContext.getUserDelegationKey({
|
||||
startsOn: truncatedISO8061Date(startsOn, false),
|
||||
expiresOn: truncatedISO8061Date(expiresOn, false),
|
||||
}, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
}, {
|
||||
abortSignal: options.abortSignal,
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
}));
|
||||
const userDelegationKey = {
|
||||
signedObjectId: response.signedObjectId,
|
||||
signedTenantId: response.signedTenantId,
|
||||
|
|
@ -733,17 +649,7 @@ export class BlobServiceClient extends StorageClient {
|
|||
};
|
||||
const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey);
|
||||
return res;
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Creates a BlobBatchClient object to conduct batch operations.
|
||||
|
|
@ -782,5 +688,31 @@ export class BlobServiceClient extends StorageClient {
|
|||
resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString();
|
||||
return appendToURLQuery(this.url, sas);
|
||||
}
|
||||
/**
|
||||
* Only available for BlobServiceClient constructed with a shared key credential.
|
||||
*
|
||||
* Generates string to sign for a Blob account Shared Access Signature (SAS) URI based on
|
||||
* the client properties and parameters passed in. The SAS is signed by the shared key credential of the client.
|
||||
*
|
||||
* @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas
|
||||
*
|
||||
* @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided.
|
||||
* @param permissions - Specifies the list of permissions to be associated with the SAS.
|
||||
* @param resourceTypes - Specifies the resource types associated with the shared access signature.
|
||||
* @param options - Optional parameters.
|
||||
* @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
|
||||
*/
|
||||
generateSasStringToSign(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) {
|
||||
if (!(this.credential instanceof StorageSharedKeyCredential)) {
|
||||
throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential");
|
||||
}
|
||||
if (expiresOn === undefined) {
|
||||
const now = new Date();
|
||||
expiresOn = new Date(now.getTime() + 3600 * 1000);
|
||||
}
|
||||
return generateAccountSASQueryParametersInternal(Object.assign({ permissions,
|
||||
expiresOn,
|
||||
resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).stringToSign;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=BlobServiceClient.js.map
|
||||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js.map
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js.map
generated
vendored
File diff suppressed because one or more lines are too long
1637
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js
generated
vendored
1637
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js
generated
vendored
File diff suppressed because it is too large
Load diff
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js.map
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js.map
generated
vendored
File diff suppressed because one or more lines are too long
461
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js
generated
vendored
461
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js
generated
vendored
|
|
@ -1,14 +1,14 @@
|
|||
import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib";
|
||||
import { getDefaultProxySettings, isNode, isTokenCredential, URLBuilder, } from "@azure/core-http";
|
||||
import { SpanStatusCode } from "@azure/core-tracing";
|
||||
import { getDefaultProxySettings, } from "@azure/core-rest-pipeline";
|
||||
import { isNode } from "@azure/core-util";
|
||||
import { isTokenCredential } from "@azure/core-auth";
|
||||
import { AnonymousCredential } from "./credentials/AnonymousCredential";
|
||||
import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential";
|
||||
import { Container } from "./generated/src/operations";
|
||||
import { newPipeline, isPipelineLike } from "./Pipeline";
|
||||
import { StorageClient } from "./StorageClient";
|
||||
import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing";
|
||||
import { appendToURLPath, appendToURLQuery, BlobNameToString, ConvertInternalResponseOfListBlobFlat, ConvertInternalResponseOfListBlobHierarchy, EscapePath, extractConnectionStringParts, isIpEndpointStyle, parseObjectReplicationRecord, toTags, truncatedISO8061Date, } from "./utils/utils.common";
|
||||
import { generateBlobSASQueryParameters } from "./sas/BlobSASSignatureValues";
|
||||
import { tracingClient } from "./utils/tracing";
|
||||
import { appendToURLPath, appendToURLQuery, assertResponse, BlobNameToString, ConvertInternalResponseOfListBlobFlat, ConvertInternalResponseOfListBlobHierarchy, EscapePath, extractConnectionStringParts, isIpEndpointStyle, parseObjectReplicationRecord, toTags, truncatedISO8061Date, } from "./utils/utils.common";
|
||||
import { generateBlobSASQueryParameters, generateBlobSASQueryParametersInternal, } from "./sas/BlobSASSignatureValues";
|
||||
import { BlobLeaseClient } from "./BlobLeaseClient";
|
||||
import { AppendBlobClient, BlobClient, BlockBlobClient, PageBlobClient, } from "./Clients";
|
||||
import { BlobBatchClient } from "./BlobBatchClient";
|
||||
|
|
@ -16,6 +16,12 @@ import { BlobBatchClient } from "./BlobBatchClient";
|
|||
* A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs.
|
||||
*/
|
||||
export class ContainerClient extends StorageClient {
|
||||
/**
|
||||
* The name of the container.
|
||||
*/
|
||||
get containerName() {
|
||||
return this._containerName;
|
||||
}
|
||||
constructor(urlOrConnectionString, credentialOrPipelineOrContainerName,
|
||||
// Legacy, no fix for eslint error without breaking. Disable it for this interface.
|
||||
/* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/
|
||||
|
|
@ -76,13 +82,7 @@ export class ContainerClient extends StorageClient {
|
|||
}
|
||||
super(url, pipeline);
|
||||
this._containerName = this.getContainerNameFromUrl();
|
||||
this.containerContext = new Container(this.storageClientContext);
|
||||
}
|
||||
/**
|
||||
* The name of the container.
|
||||
*/
|
||||
get containerName() {
|
||||
return this._containerName;
|
||||
this.containerContext = this.storageClientContext.container;
|
||||
}
|
||||
/**
|
||||
* Creates a new container under the specified account. If the container with
|
||||
|
|
@ -102,22 +102,9 @@ export class ContainerClient extends StorageClient {
|
|||
* ```
|
||||
*/
|
||||
async create(options = {}) {
|
||||
const { span, updatedOptions } = createSpan("ContainerClient-create", options);
|
||||
try {
|
||||
// Spread operator in destructuring assignments,
|
||||
// this will filter out unwanted properties from the response object into result object
|
||||
return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
return tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => {
|
||||
return assertResponse(await this.containerContext.create(updatedOptions));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Creates a new container under the specified account. If the container with
|
||||
|
|
@ -128,29 +115,21 @@ export class ContainerClient extends StorageClient {
|
|||
* @param options -
|
||||
*/
|
||||
async createIfNotExists(options = {}) {
|
||||
var _a, _b;
|
||||
const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options);
|
||||
try {
|
||||
const res = await this.create(updatedOptions);
|
||||
return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response });
|
||||
}
|
||||
catch (e) {
|
||||
if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: "Expected exception when creating a container only if it does not already exist.",
|
||||
});
|
||||
return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response });
|
||||
return tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => {
|
||||
var _a, _b;
|
||||
try {
|
||||
const res = await this.create(updatedOptions);
|
||||
return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response });
|
||||
}
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
catch (e) {
|
||||
if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") {
|
||||
return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response });
|
||||
}
|
||||
else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Returns true if the Azure container resource represented by this client exists; false otherwise.
|
||||
|
|
@ -162,31 +141,21 @@ export class ContainerClient extends StorageClient {
|
|||
* @param options -
|
||||
*/
|
||||
async exists(options = {}) {
|
||||
const { span, updatedOptions } = createSpan("ContainerClient-exists", options);
|
||||
try {
|
||||
await this.getProperties({
|
||||
abortSignal: options.abortSignal,
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
catch (e) {
|
||||
if (e.statusCode === 404) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: "Expected exception when checking container existence",
|
||||
return tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => {
|
||||
try {
|
||||
await this.getProperties({
|
||||
abortSignal: options.abortSignal,
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
});
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
catch (e) {
|
||||
if (e.statusCode === 404) {
|
||||
return false;
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Creates a {@link BlobClient}
|
||||
|
|
@ -247,20 +216,9 @@ export class ContainerClient extends StorageClient {
|
|||
if (!options.conditions) {
|
||||
options.conditions = {};
|
||||
}
|
||||
const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options);
|
||||
try {
|
||||
return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
return tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => {
|
||||
return assertResponse(await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), { tracingOptions: updatedOptions.tracingOptions })));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Marks the specified container for deletion. The container and any blobs
|
||||
|
|
@ -273,20 +231,14 @@ export class ContainerClient extends StorageClient {
|
|||
if (!options.conditions) {
|
||||
options.conditions = {};
|
||||
}
|
||||
const { span, updatedOptions } = createSpan("ContainerClient-delete", options);
|
||||
try {
|
||||
return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
return tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => {
|
||||
return assertResponse(await this.containerContext.delete({
|
||||
abortSignal: options.abortSignal,
|
||||
leaseAccessConditions: options.conditions,
|
||||
modifiedAccessConditions: options.conditions,
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
}));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Marks the specified container for deletion if it exists. The container and any blobs
|
||||
|
|
@ -296,29 +248,19 @@ export class ContainerClient extends StorageClient {
|
|||
* @param options - Options to Container Delete operation.
|
||||
*/
|
||||
async deleteIfExists(options = {}) {
|
||||
var _a, _b;
|
||||
const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options);
|
||||
try {
|
||||
const res = await this.delete(updatedOptions);
|
||||
return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response });
|
||||
}
|
||||
catch (e) {
|
||||
if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: "Expected exception when deleting a container only if it exists.",
|
||||
});
|
||||
return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response });
|
||||
return tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => {
|
||||
var _a, _b;
|
||||
try {
|
||||
const res = await this.delete(updatedOptions);
|
||||
return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response });
|
||||
}
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
catch (e) {
|
||||
if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") {
|
||||
return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response });
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Sets one or more user-defined name-value pairs for the specified container.
|
||||
|
|
@ -339,20 +281,15 @@ export class ContainerClient extends StorageClient {
|
|||
if (options.conditions.ifUnmodifiedSince) {
|
||||
throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service");
|
||||
}
|
||||
const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options);
|
||||
try {
|
||||
return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
return tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => {
|
||||
return assertResponse(await this.containerContext.setMetadata({
|
||||
abortSignal: options.abortSignal,
|
||||
leaseAccessConditions: options.conditions,
|
||||
metadata,
|
||||
modifiedAccessConditions: options.conditions,
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
}));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Gets the permissions for the specified container. The permissions indicate
|
||||
|
|
@ -369,9 +306,12 @@ export class ContainerClient extends StorageClient {
|
|||
if (!options.conditions) {
|
||||
options.conditions = {};
|
||||
}
|
||||
const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options);
|
||||
try {
|
||||
const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
return tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => {
|
||||
const response = assertResponse(await this.containerContext.getAccessPolicy({
|
||||
abortSignal: options.abortSignal,
|
||||
leaseAccessConditions: options.conditions,
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
}));
|
||||
const res = {
|
||||
_response: response._response,
|
||||
blobPublicAccess: response.blobPublicAccess,
|
||||
|
|
@ -403,17 +343,7 @@ export class ContainerClient extends StorageClient {
|
|||
});
|
||||
}
|
||||
return res;
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Sets the permissions for the specified container. The permissions indicate
|
||||
|
|
@ -434,8 +364,7 @@ export class ContainerClient extends StorageClient {
|
|||
*/
|
||||
async setAccessPolicy(access, containerAcl, options = {}) {
|
||||
options.conditions = options.conditions || {};
|
||||
const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options);
|
||||
try {
|
||||
return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
|
||||
const acl = [];
|
||||
for (const identifier of containerAcl || []) {
|
||||
acl.push({
|
||||
|
|
@ -451,18 +380,15 @@ export class ContainerClient extends StorageClient {
|
|||
id: identifier.id,
|
||||
});
|
||||
}
|
||||
return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
return assertResponse(await this.containerContext.setAccessPolicy({
|
||||
abortSignal: options.abortSignal,
|
||||
access,
|
||||
containerAcl: acl,
|
||||
leaseAccessConditions: options.conditions,
|
||||
modifiedAccessConditions: options.conditions,
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
}));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Get a {@link BlobLeaseClient} that manages leases on the container.
|
||||
|
|
@ -496,25 +422,14 @@ export class ContainerClient extends StorageClient {
|
|||
* @returns Block Blob upload response data and the corresponding BlockBlobClient instance.
|
||||
*/
|
||||
async uploadBlockBlob(blobName, body, contentLength, options = {}) {
|
||||
const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options);
|
||||
try {
|
||||
return tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => {
|
||||
const blockBlobClient = this.getBlockBlobClient(blobName);
|
||||
const response = await blockBlobClient.upload(body, contentLength, updatedOptions);
|
||||
return {
|
||||
blockBlobClient,
|
||||
response,
|
||||
};
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Marks the specified blob or snapshot for deletion. The blob is later deleted
|
||||
|
|
@ -528,24 +443,13 @@ export class ContainerClient extends StorageClient {
|
|||
* @returns Block blob deletion response data.
|
||||
*/
|
||||
async deleteBlob(blobName, options = {}) {
|
||||
const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options);
|
||||
try {
|
||||
return tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => {
|
||||
let blobClient = this.getBlobClient(blobName);
|
||||
if (options.versionId) {
|
||||
blobClient = blobClient.withVersion(options.versionId);
|
||||
}
|
||||
return await blobClient.delete(updatedOptions);
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
return blobClient.delete(updatedOptions);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* listBlobFlatSegment returns a single segment of blobs starting from the
|
||||
|
|
@ -558,25 +462,14 @@ export class ContainerClient extends StorageClient {
|
|||
* @param options - Options to Container List Blob Flat Segment operation.
|
||||
*/
|
||||
async listBlobFlatSegment(marker, options = {}) {
|
||||
const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options);
|
||||
try {
|
||||
const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => {
|
||||
const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) });
|
||||
return tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => {
|
||||
const response = assertResponse(await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions })));
|
||||
const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => {
|
||||
const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) });
|
||||
return blobItem;
|
||||
}) }) });
|
||||
return wrappedResponse;
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* listBlobHierarchySegment returns a single segment of blobs starting from
|
||||
|
|
@ -590,31 +483,18 @@ export class ContainerClient extends StorageClient {
|
|||
* @param options - Options to Container List Blob Hierarchy Segment operation.
|
||||
*/
|
||||
async listBlobHierarchySegment(delimiter, marker, options = {}) {
|
||||
var _a;
|
||||
const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options);
|
||||
try {
|
||||
const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => {
|
||||
const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) });
|
||||
return tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => {
|
||||
var _a;
|
||||
const response = assertResponse(await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions })));
|
||||
const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => {
|
||||
const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) });
|
||||
return blobItem;
|
||||
}), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => {
|
||||
const blobPrefix = {
|
||||
name: BlobNameToString(blobPrefixInternal.name),
|
||||
};
|
||||
const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) });
|
||||
return blobPrefix;
|
||||
}) }) });
|
||||
return wrappedResponse;
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse
|
||||
|
|
@ -628,8 +508,8 @@ export class ContainerClient extends StorageClient {
|
|||
* items. The marker value is opaque to the client.
|
||||
* @param options - Options to list blobs operation.
|
||||
*/
|
||||
listSegments(marker, options = {}) {
|
||||
return __asyncGenerator(this, arguments, function* listSegments_1() {
|
||||
listSegments(marker_1) {
|
||||
return __asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) {
|
||||
let listBlobsFlatSegmentResponse;
|
||||
if (!!marker || marker === undefined) {
|
||||
do {
|
||||
|
|
@ -645,20 +525,22 @@ export class ContainerClient extends StorageClient {
|
|||
*
|
||||
* @param options - Options to list blobs operation.
|
||||
*/
|
||||
listItems(options = {}) {
|
||||
return __asyncGenerator(this, arguments, function* listItems_1() {
|
||||
var e_1, _a;
|
||||
listItems() {
|
||||
return __asyncGenerator(this, arguments, function* listItems_1(options = {}) {
|
||||
var _a, e_1, _b, _c;
|
||||
let marker;
|
||||
try {
|
||||
for (var _b = __asyncValues(this.listSegments(marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) {
|
||||
const listBlobsFlatSegmentResponse = _c.value;
|
||||
for (var _d = true, _e = __asyncValues(this.listSegments(marker, options)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a; _d = true) {
|
||||
_c = _f.value;
|
||||
_d = false;
|
||||
const listBlobsFlatSegmentResponse = _c;
|
||||
yield __await(yield* __asyncDelegator(__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems)));
|
||||
}
|
||||
}
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b));
|
||||
if (!_d && !_a && (_b = _e.return)) yield __await(_b.call(_e));
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
|
|
@ -806,8 +688,8 @@ export class ContainerClient extends StorageClient {
|
|||
* items. The marker value is opaque to the client.
|
||||
* @param options - Options to list blobs operation.
|
||||
*/
|
||||
listHierarchySegments(delimiter, marker, options = {}) {
|
||||
return __asyncGenerator(this, arguments, function* listHierarchySegments_1() {
|
||||
listHierarchySegments(delimiter_1, marker_1) {
|
||||
return __asyncGenerator(this, arguments, function* listHierarchySegments_1(delimiter, marker, options = {}) {
|
||||
let listBlobsHierarchySegmentResponse;
|
||||
if (!!marker || marker === undefined) {
|
||||
do {
|
||||
|
|
@ -824,13 +706,15 @@ export class ContainerClient extends StorageClient {
|
|||
* @param delimiter - The character or string used to define the virtual hierarchy
|
||||
* @param options - Options to list blobs operation.
|
||||
*/
|
||||
listItemsByHierarchy(delimiter, options = {}) {
|
||||
return __asyncGenerator(this, arguments, function* listItemsByHierarchy_1() {
|
||||
var e_2, _a;
|
||||
listItemsByHierarchy(delimiter_1) {
|
||||
return __asyncGenerator(this, arguments, function* listItemsByHierarchy_1(delimiter, options = {}) {
|
||||
var _a, e_2, _b, _c;
|
||||
let marker;
|
||||
try {
|
||||
for (var _b = __asyncValues(this.listHierarchySegments(delimiter, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) {
|
||||
const listBlobsHierarchySegmentResponse = _c.value;
|
||||
for (var _d = true, _e = __asyncValues(this.listHierarchySegments(delimiter, marker, options)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a; _d = true) {
|
||||
_c = _f.value;
|
||||
_d = false;
|
||||
const listBlobsHierarchySegmentResponse = _c;
|
||||
const segment = listBlobsHierarchySegmentResponse.segment;
|
||||
if (segment.blobPrefixes) {
|
||||
for (const prefix of segment.blobPrefixes) {
|
||||
|
|
@ -845,7 +729,7 @@ export class ContainerClient extends StorageClient {
|
|||
catch (e_2_1) { e_2 = { error: e_2_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b));
|
||||
if (!_d && !_a && (_b = _e.return)) yield __await(_b.call(_e));
|
||||
}
|
||||
finally { if (e_2) throw e_2.error; }
|
||||
}
|
||||
|
|
@ -1009,9 +893,14 @@ export class ContainerClient extends StorageClient {
|
|||
* @param options - Options to find blobs by tags.
|
||||
*/
|
||||
async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) {
|
||||
const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options);
|
||||
try {
|
||||
const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions)));
|
||||
return tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => {
|
||||
const response = assertResponse(await this.containerContext.filterBlobs({
|
||||
abortSignal: options.abortSignal,
|
||||
where: tagFilterSqlExpression,
|
||||
marker,
|
||||
maxPageSize: options.maxPageSize,
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
}));
|
||||
const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => {
|
||||
var _a;
|
||||
let tagValue = "";
|
||||
|
|
@ -1021,17 +910,7 @@ export class ContainerClient extends StorageClient {
|
|||
return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue });
|
||||
}) });
|
||||
return wrappedResponse;
|
||||
}
|
||||
catch (e) {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: e.message,
|
||||
});
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
span.end();
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse.
|
||||
|
|
@ -1049,8 +928,8 @@ export class ContainerClient extends StorageClient {
|
|||
* items. The marker value is opaque to the client.
|
||||
* @param options - Options to find blobs by tags.
|
||||
*/
|
||||
findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) {
|
||||
return __asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() {
|
||||
findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) {
|
||||
return __asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) {
|
||||
let response;
|
||||
if (!!marker || marker === undefined) {
|
||||
do {
|
||||
|
|
@ -1071,20 +950,22 @@ export class ContainerClient extends StorageClient {
|
|||
* however, only a subset of the OData filter syntax is supported in the Blob service.
|
||||
* @param options - Options to findBlobsByTagsItems.
|
||||
*/
|
||||
findBlobsByTagsItems(tagFilterSqlExpression, options = {}) {
|
||||
return __asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() {
|
||||
var e_3, _a;
|
||||
findBlobsByTagsItems(tagFilterSqlExpression_1) {
|
||||
return __asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) {
|
||||
var _a, e_3, _b, _c;
|
||||
let marker;
|
||||
try {
|
||||
for (var _b = __asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) {
|
||||
const segment = _c.value;
|
||||
for (var _d = true, _e = __asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a; _d = true) {
|
||||
_c = _f.value;
|
||||
_d = false;
|
||||
const segment = _c;
|
||||
yield __await(yield* __asyncDelegator(__asyncValues(segment.blobs)));
|
||||
}
|
||||
}
|
||||
catch (e_3_1) { e_3 = { error: e_3_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b));
|
||||
if (!_d && !_a && (_b = _e.return)) yield __await(_b.call(_e));
|
||||
}
|
||||
finally { if (e_3) throw e_3.error; }
|
||||
}
|
||||
|
|
@ -1192,6 +1073,24 @@ export class ContainerClient extends StorageClient {
|
|||
},
|
||||
};
|
||||
}
|
||||
/**
|
||||
* The Get Account Information operation returns the sku name and account kind
|
||||
* for the specified account.
|
||||
* The Get Account Information operation is available on service versions beginning
|
||||
* with version 2018-03-28.
|
||||
* @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information
|
||||
*
|
||||
* @param options - Options to the Service Get Account Info operation.
|
||||
* @returns Response data for the Service Get Account Info operation.
|
||||
*/
|
||||
async getAccountInfo(options = {}) {
|
||||
return tracingClient.withSpan("ContainerClient-getAccountInfo", options, async (updatedOptions) => {
|
||||
return assertResponse(await this.containerContext.getAccountInfo({
|
||||
abortSignal: options.abortSignal,
|
||||
tracingOptions: updatedOptions.tracingOptions,
|
||||
}));
|
||||
});
|
||||
}
|
||||
getContainerNameFromUrl() {
|
||||
let containerName;
|
||||
try {
|
||||
|
|
@ -1200,23 +1099,23 @@ export class ContainerClient extends StorageClient {
|
|||
// "https://myaccount.blob.core.windows.net/mycontainer";
|
||||
// IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername`
|
||||
// http://localhost:10001/devstoreaccount1/containername
|
||||
const parsedUrl = URLBuilder.parse(this.url);
|
||||
if (parsedUrl.getHost().split(".")[1] === "blob") {
|
||||
const parsedUrl = new URL(this.url);
|
||||
if (parsedUrl.hostname.split(".")[1] === "blob") {
|
||||
// "https://myaccount.blob.core.windows.net/containername".
|
||||
// "https://customdomain.com/containername".
|
||||
// .getPath() -> /containername
|
||||
containerName = parsedUrl.getPath().split("/")[1];
|
||||
containerName = parsedUrl.pathname.split("/")[1];
|
||||
}
|
||||
else if (isIpEndpointStyle(parsedUrl)) {
|
||||
// IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername
|
||||
// Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername
|
||||
// .getPath() -> /devstoreaccount1/containername
|
||||
containerName = parsedUrl.getPath().split("/")[2];
|
||||
containerName = parsedUrl.pathname.split("/")[2];
|
||||
}
|
||||
else {
|
||||
// "https://customdomain.com/containername".
|
||||
// .getPath() -> /containername
|
||||
containerName = parsedUrl.getPath().split("/")[1];
|
||||
containerName = parsedUrl.pathname.split("/")[1];
|
||||
}
|
||||
// decode the encoded containerName - to get all the special characters that might be present in it
|
||||
containerName = decodeURIComponent(containerName);
|
||||
|
|
@ -1249,6 +1148,24 @@ export class ContainerClient extends StorageClient {
|
|||
resolve(appendToURLQuery(this.url, sas));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Only available for ContainerClient constructed with a shared key credential.
|
||||
*
|
||||
* Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI
|
||||
* based on the client properties and parameters passed in. The SAS is signed by the shared key credential of the client.
|
||||
*
|
||||
* @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas
|
||||
*
|
||||
* @param options - Optional parameters.
|
||||
* @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
|
||||
*/
|
||||
/* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/
|
||||
generateSasStringToSign(options) {
|
||||
if (!(this.credential instanceof StorageSharedKeyCredential)) {
|
||||
throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential");
|
||||
}
|
||||
return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName }, options), this.credential).stringToSign;
|
||||
}
|
||||
/**
|
||||
* Creates a BlobBatchClient object to conduct batch operations.
|
||||
*
|
||||
|
|
|
|||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js.map
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js.map
generated
vendored
File diff suppressed because one or more lines are too long
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js
generated
vendored
|
|
@ -1,5 +1,5 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Licensed under the MIT License.
|
||||
/**
|
||||
* Function that converts PageRange and ClearRange to a common Range object.
|
||||
* PageRange and ClearRange have start and end while Range offset and count
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"PageBlobRangeResponse.js","sourceRoot":"","sources":["../../../src/PageBlobRangeResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AA8ElC;;;;;GAKG;AACH,MAAM,UAAU,sBAAsB,CACpC,QAAqF;IAErF,MAAM,SAAS,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,SAAS,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QAC5E,MAAM,EAAE,CAAC,CAAC,KAAK;QACf,KAAK,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK;KACvB,CAAC,CAAC,CAAC;IAEJ,MAAM,UAAU,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,UAAU,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QAC9E,MAAM,EAAE,CAAC,CAAC,KAAK;QACf,KAAK,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK;KACvB,CAAC,CAAC,CAAC;IAEJ,uCACK,QAAQ,KACX,SAAS;QACT,UAAU,EACV,SAAS,kCACJ,QAAQ,CAAC,SAAS,KACrB,UAAU,EAAE;gBACV,SAAS;gBACT,UAAU;aACX,OAEH;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpResponse } from \"@azure/core-http\";\nimport {\n PageBlobGetPageRangesHeaders,\n PageBlobGetPageRangesDiffHeaders,\n PageBlobGetPageRangesResponseModel,\n PageBlobGetPageRangesDiffResponseModel,\n} from \"./generatedModels\";\nimport { Range } from \"./Range\";\n\n/**\n * List of page ranges for a blob.\n */\nexport interface PageList {\n /**\n * Valid non-overlapping page ranges.\n */\n pageRange?: Range[];\n /**\n * Present if the prevSnapshot parameter was specified and there were cleared\n * pages between the previous snapshot and the target snapshot.\n */\n clearRange?: Range[];\n}\n\n/**\n * Contains response data for the {@link BlobClient.getPageRanges} operation.\n */\nexport interface PageBlobGetPageRangesResponse extends PageList, PageBlobGetPageRangesHeaders {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: PageBlobGetPageRangesHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: PageList;\n };\n}\n\n/**\n * Contains response data for the {@link BlobClient.getPageRangesDiff} operation.\n */\nexport interface PageBlobGetPageRangesDiffResponse\n extends PageList,\n PageBlobGetPageRangesDiffHeaders {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: PageBlobGetPageRangesDiffHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: PageList;\n };\n}\n\n/**\n * Function that converts PageRange and ClearRange to a common Range object.\n * PageRange and ClearRange have start and end while Range offset and count\n * this function normalizes to Range.\n * @param response - Model PageBlob Range response\n */\nexport function rangeResponseFromModel(\n response: PageBlobGetPageRangesResponseModel | PageBlobGetPageRangesDiffResponseModel\n): PageBlobGetPageRangesResponse | PageBlobGetPageRangesDiffResponse {\n const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({\n offset: x.start,\n count: x.end - x.start,\n }));\n\n const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({\n offset: x.start,\n count: x.end - x.start,\n }));\n\n return {\n ...response,\n pageRange,\n clearRange,\n _response: {\n ...response._response,\n parsedBody: {\n pageRange,\n clearRange,\n },\n },\n };\n}\n"]}
|
||||
{"version":3,"file":"PageBlobRangeResponse.js","sourceRoot":"","sources":["../../../src/PageBlobRangeResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AA0ClC;;;;;GAKG;AACH,MAAM,UAAU,sBAAsB,CACpC,QAAqF;IAErF,MAAM,SAAS,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,SAAS,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QAC5E,MAAM,EAAE,CAAC,CAAC,KAAK;QACf,KAAK,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK;KACvB,CAAC,CAAC,CAAC;IAEJ,MAAM,UAAU,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,UAAU,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QAC9E,MAAM,EAAE,CAAC,CAAC,KAAK;QACf,KAAK,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK;KACvB,CAAC,CAAC,CAAC;IAEJ,uCACK,QAAQ,KACX,SAAS;QACT,UAAU,EACV,SAAS,kCACJ,QAAQ,CAAC,SAAS,KACrB,UAAU,EAAE;gBACV,SAAS;gBACT,UAAU;aACX,OAEH;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport {\n PageBlobGetPageRangesHeaders,\n PageBlobGetPageRangesDiffHeaders,\n PageBlobGetPageRangesResponseModel,\n PageBlobGetPageRangesDiffResponseModel,\n} from \"./generatedModels\";\nimport { Range } from \"./Range\";\nimport { ResponseWithBody } from \"./utils/utils.common\";\n\n/**\n * List of page ranges for a blob.\n */\nexport interface PageList {\n /**\n * Valid non-overlapping page ranges.\n */\n pageRange?: Range[];\n /**\n * Present if the prevSnapshot parameter was specified and there were cleared\n * pages between the previous snapshot and the target snapshot.\n */\n clearRange?: Range[];\n}\n\n/**\n * Contains response data for the {@link BlobClient.getPageRanges} operation.\n */\nexport interface PageBlobGetPageRangesResponse\n extends PageList,\n PageBlobGetPageRangesHeaders,\n ResponseWithBody<PageBlobGetPageRangesHeaders, PageList> {}\n\n/**\n * Contains response data for the {@link BlobClient.getPageRangesDiff} operation.\n */\nexport interface PageBlobGetPageRangesDiffResponse\n extends PageList,\n PageBlobGetPageRangesDiffHeaders,\n ResponseWithBody<PageBlobGetPageRangesDiffHeaders, PageList> {}\n\n/**\n * Function that converts PageRange and ClearRange to a common Range object.\n * PageRange and ClearRange have start and end while Range offset and count\n * this function normalizes to Range.\n * @param response - Model PageBlob Range response\n */\nexport function rangeResponseFromModel(\n response: PageBlobGetPageRangesResponseModel | PageBlobGetPageRangesDiffResponseModel,\n): PageBlobGetPageRangesResponse | PageBlobGetPageRangesDiffResponse {\n const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({\n offset: x.start,\n count: x.end - x.start,\n }));\n\n const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({\n offset: x.start,\n count: x.end - x.start,\n }));\n\n return {\n ...response,\n pageRange,\n clearRange,\n _response: {\n ...response._response,\n parsedBody: {\n pageRange,\n clearRange,\n },\n },\n };\n}\n"]}
|
||||
236
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js
generated
vendored
236
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js
generated
vendored
|
|
@ -1,18 +1,25 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
import { BaseRequestPolicy, deserializationPolicy, disableResponseDecompressionPolicy, HttpHeaders, RequestPolicyOptions, WebResource, proxyPolicy, isNode, isTokenCredential, tracingPolicy, logPolicy, keepAlivePolicy, generateClientRequestIdPolicy, } from "@azure/core-http";
|
||||
// Licensed under the MIT License.
|
||||
import { __rest } from "tslib";
|
||||
import { convertHttpClient, createRequestPolicyFactoryPolicy, } from "@azure/core-http-compat";
|
||||
import { bearerTokenAuthenticationPolicy, decompressResponsePolicyName, } from "@azure/core-rest-pipeline";
|
||||
import { authorizeRequestOnTenantChallenge, createClientPipeline } from "@azure/core-client";
|
||||
import { parseXML, stringifyXML } from "@azure/core-xml";
|
||||
import { isTokenCredential } from "@azure/core-auth";
|
||||
import { logger } from "./log";
|
||||
import { StorageBrowserPolicyFactory } from "./StorageBrowserPolicyFactory";
|
||||
import { StorageRetryPolicyFactory } from "./StorageRetryPolicyFactory";
|
||||
import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential";
|
||||
import { AnonymousCredential } from "./credentials/AnonymousCredential";
|
||||
import { StorageOAuthScopes, StorageBlobLoggingAllowedHeaderNames, StorageBlobLoggingAllowedQueryParameters, } from "./utils/constants";
|
||||
import { TelemetryPolicyFactory } from "./TelemetryPolicyFactory";
|
||||
import { StorageOAuthScopes, StorageBlobLoggingAllowedHeaderNames, StorageBlobLoggingAllowedQueryParameters, SDK_VERSION, } from "./utils/constants";
|
||||
import { getCachedDefaultHttpClient } from "./utils/cache";
|
||||
import { attachCredential } from "./utils/utils.common";
|
||||
import { storageBearerTokenChallengeAuthenticationPolicy } from "./policies/StorageBearerTokenChallengeAuthenticationPolicy";
|
||||
import { storageBrowserPolicy } from "./policies/StorageBrowserPolicyV2";
|
||||
import { storageRetryPolicy } from "./policies/StorageRetryPolicyV2";
|
||||
import { storageSharedKeyCredentialPolicy } from "./policies/StorageSharedKeyCredentialPolicyV2";
|
||||
import { StorageBrowserPolicyFactory } from "./StorageBrowserPolicyFactory";
|
||||
import { storageCorrectContentLengthPolicy } from "./policies/StorageCorrectContentLengthPolicy";
|
||||
// Export following interfaces and types for customers who want to implement their
|
||||
// own RequestPolicy or HTTPClient
|
||||
export { BaseRequestPolicy, StorageOAuthScopes, deserializationPolicy, HttpHeaders, WebResource, RequestPolicyOptions, };
|
||||
export { StorageOAuthScopes, };
|
||||
/**
|
||||
* A helper to decide if a given argument satisfies the Pipeline contract
|
||||
* @param pipeline - An argument that may be a Pipeline
|
||||
|
|
@ -44,9 +51,7 @@ export class Pipeline {
|
|||
*/
|
||||
constructor(factories, options = {}) {
|
||||
this.factories = factories;
|
||||
// when options.httpClient is not specified, passing in a DefaultHttpClient instance to
|
||||
// avoid each client creating its own http client.
|
||||
this.options = Object.assign(Object.assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() });
|
||||
this.options = options;
|
||||
}
|
||||
/**
|
||||
* Transfer Pipeline object to ServiceClientOptions object which is required by
|
||||
|
|
@ -69,39 +74,188 @@ export class Pipeline {
|
|||
* @returns A new Pipeline object.
|
||||
*/
|
||||
export function newPipeline(credential, pipelineOptions = {}) {
|
||||
var _a;
|
||||
if (credential === undefined) {
|
||||
if (!credential) {
|
||||
credential = new AnonymousCredential();
|
||||
}
|
||||
// Order is important. Closer to the API at the top & closer to the network at the bottom.
|
||||
// The credential's policy factory must appear close to the wire so it can sign any
|
||||
// changes made by other factories (like UniqueRequestIDPolicyFactory)
|
||||
const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions);
|
||||
const factories = [
|
||||
tracingPolicy({ userAgent: telemetryPolicy.telemetryString }),
|
||||
keepAlivePolicy(pipelineOptions.keepAliveOptions),
|
||||
telemetryPolicy,
|
||||
generateClientRequestIdPolicy(),
|
||||
new StorageBrowserPolicyFactory(),
|
||||
new StorageRetryPolicyFactory(pipelineOptions.retryOptions),
|
||||
// Default deserializationPolicy is provided by protocol layer
|
||||
// Use customized XML char key of "#" so we could deserialize metadata
|
||||
// with "_" key
|
||||
deserializationPolicy(undefined, { xmlCharKey: "#" }),
|
||||
logPolicy({
|
||||
logger: logger.info,
|
||||
allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames,
|
||||
allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters,
|
||||
}),
|
||||
const pipeline = new Pipeline([], pipelineOptions);
|
||||
pipeline._credential = credential;
|
||||
return pipeline;
|
||||
}
|
||||
function processDownlevelPipeline(pipeline) {
|
||||
const knownFactoryFunctions = [
|
||||
isAnonymousCredential,
|
||||
isStorageSharedKeyCredential,
|
||||
isCoreHttpBearerTokenFactory,
|
||||
isStorageBrowserPolicyFactory,
|
||||
isStorageRetryPolicyFactory,
|
||||
isStorageTelemetryPolicyFactory,
|
||||
isCoreHttpPolicyFactory,
|
||||
];
|
||||
if (isNode) {
|
||||
// policies only available in Node.js runtime, not in browsers
|
||||
factories.push(proxyPolicy(pipelineOptions.proxyOptions));
|
||||
factories.push(disableResponseDecompressionPolicy());
|
||||
if (pipeline.factories.length) {
|
||||
const novelFactories = pipeline.factories.filter((factory) => {
|
||||
return !knownFactoryFunctions.some((knownFactory) => knownFactory(factory));
|
||||
});
|
||||
if (novelFactories.length) {
|
||||
const hasInjector = novelFactories.some((factory) => isInjectorPolicyFactory(factory));
|
||||
// if there are any left over, wrap in a requestPolicyFactoryPolicy
|
||||
return {
|
||||
wrappedPolicies: createRequestPolicyFactoryPolicy(novelFactories),
|
||||
afterRetry: hasInjector,
|
||||
};
|
||||
}
|
||||
}
|
||||
factories.push(isTokenCredential(credential)
|
||||
? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential)
|
||||
: credential);
|
||||
return new Pipeline(factories, pipelineOptions);
|
||||
return undefined;
|
||||
}
|
||||
export function getCoreClientOptions(pipeline) {
|
||||
var _a;
|
||||
const _b = pipeline.options, { httpClient: v1Client } = _b, restOptions = __rest(_b, ["httpClient"]);
|
||||
let httpClient = pipeline._coreHttpClient;
|
||||
if (!httpClient) {
|
||||
httpClient = v1Client ? convertHttpClient(v1Client) : getCachedDefaultHttpClient();
|
||||
pipeline._coreHttpClient = httpClient;
|
||||
}
|
||||
let corePipeline = pipeline._corePipeline;
|
||||
if (!corePipeline) {
|
||||
const packageDetails = `azsdk-js-azure-storage-blob/${SDK_VERSION}`;
|
||||
const userAgentPrefix = restOptions.userAgentOptions && restOptions.userAgentOptions.userAgentPrefix
|
||||
? `${restOptions.userAgentOptions.userAgentPrefix} ${packageDetails}`
|
||||
: `${packageDetails}`;
|
||||
corePipeline = createClientPipeline(Object.assign(Object.assign({}, restOptions), { loggingOptions: {
|
||||
additionalAllowedHeaderNames: StorageBlobLoggingAllowedHeaderNames,
|
||||
additionalAllowedQueryParameters: StorageBlobLoggingAllowedQueryParameters,
|
||||
logger: logger.info,
|
||||
}, userAgentOptions: {
|
||||
userAgentPrefix,
|
||||
}, serializationOptions: {
|
||||
stringifyXML,
|
||||
serializerOptions: {
|
||||
xml: {
|
||||
// Use customized XML char key of "#" so we can deserialize metadata
|
||||
// with "_" key
|
||||
xmlCharKey: "#",
|
||||
},
|
||||
},
|
||||
}, deserializationOptions: {
|
||||
parseXML,
|
||||
serializerOptions: {
|
||||
xml: {
|
||||
// Use customized XML char key of "#" so we can deserialize metadata
|
||||
// with "_" key
|
||||
xmlCharKey: "#",
|
||||
},
|
||||
},
|
||||
} }));
|
||||
corePipeline.removePolicy({ phase: "Retry" });
|
||||
corePipeline.removePolicy({ name: decompressResponsePolicyName });
|
||||
corePipeline.addPolicy(storageCorrectContentLengthPolicy());
|
||||
corePipeline.addPolicy(storageRetryPolicy(restOptions.retryOptions), { phase: "Retry" });
|
||||
corePipeline.addPolicy(storageBrowserPolicy());
|
||||
const downlevelResults = processDownlevelPipeline(pipeline);
|
||||
if (downlevelResults) {
|
||||
corePipeline.addPolicy(downlevelResults.wrappedPolicies, downlevelResults.afterRetry ? { afterPhase: "Retry" } : undefined);
|
||||
}
|
||||
const credential = getCredentialFromPipeline(pipeline);
|
||||
if (isTokenCredential(credential)) {
|
||||
corePipeline.addPolicy(bearerTokenAuthenticationPolicy({
|
||||
credential,
|
||||
scopes: (_a = restOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes,
|
||||
challengeCallbacks: { authorizeRequestOnChallenge: authorizeRequestOnTenantChallenge },
|
||||
}), { phase: "Sign" });
|
||||
}
|
||||
else if (credential instanceof StorageSharedKeyCredential) {
|
||||
corePipeline.addPolicy(storageSharedKeyCredentialPolicy({
|
||||
accountName: credential.accountName,
|
||||
accountKey: credential.accountKey,
|
||||
}), { phase: "Sign" });
|
||||
}
|
||||
pipeline._corePipeline = corePipeline;
|
||||
}
|
||||
return Object.assign(Object.assign({}, restOptions), { allowInsecureConnection: true, httpClient, pipeline: corePipeline });
|
||||
}
|
||||
export function getCredentialFromPipeline(pipeline) {
|
||||
// see if we squirreled one away on the type itself
|
||||
if (pipeline._credential) {
|
||||
return pipeline._credential;
|
||||
}
|
||||
// if it came from another package, loop over the factories and look for one like before
|
||||
let credential = new AnonymousCredential();
|
||||
for (const factory of pipeline.factories) {
|
||||
if (isTokenCredential(factory.credential)) {
|
||||
// Only works if the factory has been attached a "credential" property.
|
||||
// We do that in newPipeline() when using TokenCredential.
|
||||
credential = factory.credential;
|
||||
}
|
||||
else if (isStorageSharedKeyCredential(factory)) {
|
||||
return factory;
|
||||
}
|
||||
}
|
||||
return credential;
|
||||
}
|
||||
function isStorageSharedKeyCredential(factory) {
|
||||
if (factory instanceof StorageSharedKeyCredential) {
|
||||
return true;
|
||||
}
|
||||
return factory.constructor.name === "StorageSharedKeyCredential";
|
||||
}
|
||||
function isAnonymousCredential(factory) {
|
||||
if (factory instanceof AnonymousCredential) {
|
||||
return true;
|
||||
}
|
||||
return factory.constructor.name === "AnonymousCredential";
|
||||
}
|
||||
function isCoreHttpBearerTokenFactory(factory) {
|
||||
return isTokenCredential(factory.credential);
|
||||
}
|
||||
function isStorageBrowserPolicyFactory(factory) {
|
||||
if (factory instanceof StorageBrowserPolicyFactory) {
|
||||
return true;
|
||||
}
|
||||
return factory.constructor.name === "StorageBrowserPolicyFactory";
|
||||
}
|
||||
function isStorageRetryPolicyFactory(factory) {
|
||||
if (factory instanceof StorageRetryPolicyFactory) {
|
||||
return true;
|
||||
}
|
||||
return factory.constructor.name === "StorageRetryPolicyFactory";
|
||||
}
|
||||
function isStorageTelemetryPolicyFactory(factory) {
|
||||
return factory.constructor.name === "TelemetryPolicyFactory";
|
||||
}
|
||||
function isInjectorPolicyFactory(factory) {
|
||||
return factory.constructor.name === "InjectorPolicyFactory";
|
||||
}
|
||||
function isCoreHttpPolicyFactory(factory) {
|
||||
const knownPolicies = [
|
||||
"GenerateClientRequestIdPolicy",
|
||||
"TracingPolicy",
|
||||
"LogPolicy",
|
||||
"ProxyPolicy",
|
||||
"DisableResponseDecompressionPolicy",
|
||||
"KeepAlivePolicy",
|
||||
"DeserializationPolicy",
|
||||
];
|
||||
const mockHttpClient = {
|
||||
sendRequest: async (request) => {
|
||||
return {
|
||||
request,
|
||||
headers: request.headers.clone(),
|
||||
status: 500,
|
||||
};
|
||||
},
|
||||
};
|
||||
const mockRequestPolicyOptions = {
|
||||
log(_logLevel, _message) {
|
||||
/* do nothing */
|
||||
},
|
||||
shouldLog(_logLevel) {
|
||||
return false;
|
||||
},
|
||||
};
|
||||
const policyInstance = factory.create(mockHttpClient, mockRequestPolicyOptions);
|
||||
const policyName = policyInstance.constructor.name;
|
||||
// bundlers sometimes add a custom suffix to the class name to make it unique
|
||||
return knownPolicies.some((knownPolicyName) => {
|
||||
return policyName.startsWith(knownPolicyName);
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=Pipeline.js.map
|
||||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js.map
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js.map
generated
vendored
File diff suppressed because one or more lines are too long
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js
generated
vendored
|
|
@ -1,5 +1,5 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Licensed under the MIT License.
|
||||
/**
|
||||
* Generate a range string. For example:
|
||||
*
|
||||
|
|
|
|||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js.map
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"Range.js","sourceRoot":"","sources":["../../../src/Range.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAkBlC;;;;;;GAMG;AACH,MAAM,UAAU,aAAa,CAAC,MAAa;IACzC,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;QACrB,MAAM,IAAI,UAAU,CAAC,wCAAwC,CAAC,CAAC;KAChE;IACD,IAAI,MAAM,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,EAAE;QACrC,MAAM,IAAI,UAAU,CAClB,mGAAmG,CACpG,CAAC;KACH;IACD,OAAO,MAAM,CAAC,KAAK;QACjB,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,IAAI,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,EAAE;QAC9D,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,GAAG,CAAC;AAChC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Range for Blob Service Operations.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-the-range-header-for-blob-service-operations\n */\nexport interface Range {\n /**\n * StartByte, larger than or equal 0.\n */\n offset: number;\n /**\n * Optional. Count of bytes, larger than 0.\n * If not provided, will return bytes from offset to the end.\n */\n count?: number;\n}\n\n/**\n * Generate a range string. For example:\n *\n * \"bytes=255-\" or \"bytes=0-511\"\n *\n * @param iRange -\n */\nexport function rangeToString(iRange: Range): string {\n if (iRange.offset < 0) {\n throw new RangeError(`Range.offset cannot be smaller than 0.`);\n }\n if (iRange.count && iRange.count <= 0) {\n throw new RangeError(\n `Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`\n );\n }\n return iRange.count\n ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}`\n : `bytes=${iRange.offset}-`;\n}\n"]}
|
||||
{"version":3,"file":"Range.js","sourceRoot":"","sources":["../../../src/Range.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAkBlC;;;;;;GAMG;AACH,MAAM,UAAU,aAAa,CAAC,MAAa;IACzC,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACtB,MAAM,IAAI,UAAU,CAAC,wCAAwC,CAAC,CAAC;IACjE,CAAC;IACD,IAAI,MAAM,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,EAAE,CAAC;QACtC,MAAM,IAAI,UAAU,CAClB,mGAAmG,CACpG,CAAC;IACJ,CAAC;IACD,OAAO,MAAM,CAAC,KAAK;QACjB,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,IAAI,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,EAAE;QAC9D,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,GAAG,CAAC;AAChC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\n/**\n * Range for Blob Service Operations.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-the-range-header-for-blob-service-operations\n */\nexport interface Range {\n /**\n * StartByte, larger than or equal 0.\n */\n offset: number;\n /**\n * Optional. Count of bytes, larger than 0.\n * If not provided, will return bytes from offset to the end.\n */\n count?: number;\n}\n\n/**\n * Generate a range string. For example:\n *\n * \"bytes=255-\" or \"bytes=0-511\"\n *\n * @param iRange -\n */\nexport function rangeToString(iRange: Range): string {\n if (iRange.offset < 0) {\n throw new RangeError(`Range.offset cannot be smaller than 0.`);\n }\n if (iRange.count && iRange.count <= 0) {\n throw new RangeError(\n `Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`,\n );\n }\n return iRange.count\n ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}`\n : `bytes=${iRange.offset}-`;\n}\n"]}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Licensed under the MIT License.
|
||||
import { StorageBrowserPolicy } from "./policies/StorageBrowserPolicy";
|
||||
export { StorageBrowserPolicy };
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"StorageBrowserPolicyFactory.js","sourceRoot":"","sources":["../../../src/StorageBrowserPolicyFactory.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,oBAAoB,EAAE,MAAM,iCAAiC,CAAC;AACvE,OAAO,EAAE,oBAAoB,EAAE,CAAC;AAEhC;;GAEG;AACH,MAAM,OAAO,2BAA2B;IACtC;;;;;OAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B;QACpE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IACvD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { StorageBrowserPolicy } from \"./policies/StorageBrowserPolicy\";\nexport { StorageBrowserPolicy };\n\n/**\n * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects.\n */\nexport class StorageBrowserPolicyFactory implements RequestPolicyFactory {\n /**\n * Creates a StorageBrowserPolicyFactory object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageBrowserPolicy {\n return new StorageBrowserPolicy(nextPolicy, options);\n }\n}\n"]}
|
||||
{"version":3,"file":"StorageBrowserPolicyFactory.js","sourceRoot":"","sources":["../../../src/StorageBrowserPolicyFactory.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAOlC,OAAO,EAAE,oBAAoB,EAAE,MAAM,iCAAiC,CAAC;AACvE,OAAO,EAAE,oBAAoB,EAAE,CAAC;AAEhC;;GAEG;AACH,MAAM,OAAO,2BAA2B;IACtC;;;;;OAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B;QACpE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IACvD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport {\n RequestPolicy,\n RequestPolicyOptionsLike as RequestPolicyOptions,\n RequestPolicyFactory,\n} from \"@azure/core-http-compat\";\nimport { StorageBrowserPolicy } from \"./policies/StorageBrowserPolicy\";\nexport { StorageBrowserPolicy };\n\n/**\n * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects.\n */\nexport class StorageBrowserPolicyFactory implements RequestPolicyFactory {\n /**\n * Creates a StorageBrowserPolicyFactory object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageBrowserPolicy {\n return new StorageBrowserPolicy(nextPolicy, options);\n }\n}\n"]}
|
||||
23
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js
generated
vendored
23
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js
generated
vendored
|
|
@ -1,10 +1,8 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
import { StorageClientContext } from "./generated/src/storageClientContext";
|
||||
// Licensed under the MIT License.
|
||||
import { StorageContextClient } from "./StorageContextClient";
|
||||
import { getCoreClientOptions, getCredentialFromPipeline } from "./Pipeline";
|
||||
import { escapeURLPath, getURLScheme, iEqual, getAccountNameFromUrl } from "./utils/utils.common";
|
||||
import { AnonymousCredential } from "./credentials/AnonymousCredential";
|
||||
import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential";
|
||||
import { isTokenCredential, isNode } from "@azure/core-http";
|
||||
/**
|
||||
* A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient}
|
||||
* and etc.
|
||||
|
|
@ -20,20 +18,9 @@ export class StorageClient {
|
|||
this.url = escapeURLPath(url);
|
||||
this.accountName = getAccountNameFromUrl(url);
|
||||
this.pipeline = pipeline;
|
||||
this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions());
|
||||
this.storageClientContext = new StorageContextClient(this.url, getCoreClientOptions(pipeline));
|
||||
this.isHttps = iEqual(getURLScheme(this.url) || "", "https");
|
||||
this.credential = new AnonymousCredential();
|
||||
for (const factory of this.pipeline.factories) {
|
||||
if ((isNode && factory instanceof StorageSharedKeyCredential) ||
|
||||
factory instanceof AnonymousCredential) {
|
||||
this.credential = factory;
|
||||
}
|
||||
else if (isTokenCredential(factory.credential)) {
|
||||
// Only works if the factory has been attached a "credential" property.
|
||||
// We do that in newPipeline() when using TokenCredential.
|
||||
this.credential = factory.credential;
|
||||
}
|
||||
}
|
||||
this.credential = getCredentialFromPipeline(pipeline);
|
||||
// Override protocol layer's default content-type
|
||||
const storageClientContext = this.storageClientContext;
|
||||
storageClientContext.requestContentType = undefined;
|
||||
|
|
|
|||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js.map
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"StorageClient.js","sourceRoot":"","sources":["../../../src/StorageClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,oBAAoB,EAAE,MAAM,sCAAsC,CAAC;AAE5E,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,MAAM,EAAE,qBAAqB,EAAE,MAAM,sBAAsB,CAAC;AAClG,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,EAAE,0BAA0B,EAAE,MAAM,0CAA0C,CAAC;AACtF,OAAO,EAAmB,iBAAiB,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAa9E;;;GAGG;AACH,MAAM,OAAgB,aAAa;IAyBjC;;;;OAIG;IACH,YAAsB,GAAW,EAAE,QAAsB;QACvD,iFAAiF;QACjF,IAAI,CAAC,GAAG,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;QAC9B,IAAI,CAAC,WAAW,GAAG,qBAAqB,CAAC,GAAG,CAAC,CAAC;QAC9C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,oBAAoB,GAAG,IAAI,oBAAoB,CAClD,IAAI,CAAC,GAAG,EACR,QAAQ,CAAC,sBAAsB,EAAE,CAClC,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC;QAE7D,IAAI,CAAC,UAAU,GAAG,IAAI,mBAAmB,EAAE,CAAC;QAC5C,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,CAAC,SAAS,EAAE;YAC7C,IACE,CAAC,MAAM,IAAI,OAAO,YAAY,0BAA0B,CAAC;gBACzD,OAAO,YAAY,mBAAmB,EACtC;gBACA,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC;aAC3B;iBAAM,IAAI,iBAAiB,CAAE,OAAe,CAAC,UAAU,CAAC,EAAE;gBACzD,uEAAuE;gBACvE,0DAA0D;gBAC1D,IAAI,CAAC,UAAU,GAAI,OAAe,CAAC,UAAU,CAAC;aAC/C;SACF;QAED,iDAAiD;QACjD,MAAM,oBAAoB,GAAG,IAAI,CAAC,oBAA2B,CAAC;QAC9D,oBAAoB,CAAC,kBAAkB,GAAG,SAAS,CAAC;IACtD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { StorageClientContext } from \"./generated/src/storageClientContext\";\nimport { PipelineLike } from \"./Pipeline\";\nimport { escapeURLPath, getURLScheme, iEqual, getAccountNameFromUrl } from \"./utils/utils.common\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { TokenCredential, isTokenCredential, isNode } from \"@azure/core-http\";\nimport { OperationTracingOptions } from \"@azure/core-tracing\";\n\n/**\n * An interface for options common to every remote operation.\n */\nexport interface CommonOptions {\n /**\n * Options to configure spans created when tracing is enabled.\n */\n tracingOptions?: OperationTracingOptions;\n}\n\n/**\n * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient}\n * and etc.\n */\nexport abstract class StorageClient {\n /**\n * Encoded URL string value.\n */\n public readonly url: string;\n public readonly accountName: string;\n /**\n * Request policy pipeline.\n *\n * @internal\n */\n protected readonly pipeline: PipelineLike;\n /**\n * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n public readonly credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n /**\n * StorageClient is a reference to protocol layer operations entry, which is\n * generated by AutoRest generator.\n */\n protected readonly storageClientContext: StorageClientContext;\n /**\n */\n protected readonly isHttps: boolean;\n\n /**\n * Creates an instance of StorageClient.\n * @param url - url to resource\n * @param pipeline - request policy pipeline.\n */\n protected constructor(url: string, pipeline: PipelineLike) {\n // URL should be encoded and only once, protocol layer shouldn't encode URL again\n this.url = escapeURLPath(url);\n this.accountName = getAccountNameFromUrl(url);\n this.pipeline = pipeline;\n this.storageClientContext = new StorageClientContext(\n this.url,\n pipeline.toServiceClientOptions()\n );\n\n this.isHttps = iEqual(getURLScheme(this.url) || \"\", \"https\");\n\n this.credential = new AnonymousCredential();\n for (const factory of this.pipeline.factories) {\n if (\n (isNode && factory instanceof StorageSharedKeyCredential) ||\n factory instanceof AnonymousCredential\n ) {\n this.credential = factory;\n } else if (isTokenCredential((factory as any).credential)) {\n // Only works if the factory has been attached a \"credential\" property.\n // We do that in newPipeline() when using TokenCredential.\n this.credential = (factory as any).credential;\n }\n }\n\n // Override protocol layer's default content-type\n const storageClientContext = this.storageClientContext as any;\n storageClientContext.requestContentType = undefined;\n }\n}\n"]}
|
||||
{"version":3,"file":"StorageClient.js","sourceRoot":"","sources":["../../../src/StorageClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,oBAAoB,EAAE,yBAAyB,EAAgB,MAAM,YAAY,CAAC;AAC3F,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,MAAM,EAAE,qBAAqB,EAAE,MAAM,sBAAsB,CAAC;AAgBlG;;;GAGG;AACH,MAAM,OAAgB,aAAa;IAyBjC;;;;OAIG;IACH,YAAsB,GAAW,EAAE,QAAsB;QACvD,iFAAiF;QACjF,IAAI,CAAC,GAAG,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;QAC9B,IAAI,CAAC,WAAW,GAAG,qBAAqB,CAAC,GAAG,CAAC,CAAC;QAC9C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,oBAAoB,GAAG,IAAI,oBAAoB,CAAC,IAAI,CAAC,GAAG,EAAE,oBAAoB,CAAC,QAAQ,CAAC,CAAC,CAAC;QAE/F,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC;QAE7D,IAAI,CAAC,UAAU,GAAG,yBAAyB,CAAC,QAAQ,CAAC,CAAC;QAEtD,iDAAiD;QACjD,MAAM,oBAAoB,GAAG,IAAI,CAAC,oBAA2B,CAAC;QAC9D,oBAAoB,CAAC,kBAAkB,GAAG,SAAS,CAAC;IACtD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport { StorageClient as StorageClientContext } from \"./generated/src/\";\nimport { StorageContextClient } from \"./StorageContextClient\";\nimport { getCoreClientOptions, getCredentialFromPipeline, PipelineLike } from \"./Pipeline\";\nimport { escapeURLPath, getURLScheme, iEqual, getAccountNameFromUrl } from \"./utils/utils.common\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { TokenCredential } from \"@azure/core-auth\";\nimport { OperationTracingOptions } from \"@azure/core-tracing\";\n\n/**\n * An interface for options common to every remote operation.\n */\nexport interface CommonOptions {\n /**\n * Options to configure spans created when tracing is enabled.\n */\n tracingOptions?: OperationTracingOptions;\n}\n\n/**\n * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient}\n * and etc.\n */\nexport abstract class StorageClient {\n /**\n * Encoded URL string value.\n */\n public readonly url: string;\n public readonly accountName: string;\n /**\n * Request policy pipeline.\n *\n * @internal\n */\n protected readonly pipeline: PipelineLike;\n /**\n * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n public readonly credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n /**\n * StorageClient is a reference to protocol layer operations entry, which is\n * generated by AutoRest generator.\n */\n protected readonly storageClientContext: StorageClientContext;\n /**\n */\n protected readonly isHttps: boolean;\n\n /**\n * Creates an instance of StorageClient.\n * @param url - url to resource\n * @param pipeline - request policy pipeline.\n */\n protected constructor(url: string, pipeline: PipelineLike) {\n // URL should be encoded and only once, protocol layer shouldn't encode URL again\n this.url = escapeURLPath(url);\n this.accountName = getAccountNameFromUrl(url);\n this.pipeline = pipeline;\n this.storageClientContext = new StorageContextClient(this.url, getCoreClientOptions(pipeline));\n\n this.isHttps = iEqual(getURLScheme(this.url) || \"\", \"https\");\n\n this.credential = getCredentialFromPipeline(pipeline);\n\n // Override protocol layer's default content-type\n const storageClientContext = this.storageClientContext as any;\n storageClientContext.requestContentType = undefined;\n }\n}\n"]}
|
||||
17
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageContextClient.js
generated
vendored
Normal file
17
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageContextClient.js
generated
vendored
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT License.
|
||||
import { StorageClient } from "./generated/src";
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
export class StorageContextClient extends StorageClient {
|
||||
async sendOperationRequest(operationArguments, operationSpec) {
|
||||
const operationSpecToSend = Object.assign({}, operationSpec);
|
||||
if (operationSpecToSend.path === "/{containerName}" ||
|
||||
operationSpecToSend.path === "/{containerName}/{blob}") {
|
||||
operationSpecToSend.path = "";
|
||||
}
|
||||
return super.sendOperationRequest(operationArguments, operationSpecToSend);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=StorageContextClient.js.map
|
||||
1
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageContextClient.js.map
generated
vendored
Normal file
1
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageContextClient.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"StorageContextClient.js","sourceRoot":"","sources":["../../../src/StorageContextClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAEhD;;GAEG;AACH,MAAM,OAAO,oBAAqB,SAAQ,aAAa;IACrD,KAAK,CAAC,oBAAoB,CACxB,kBAAsC,EACtC,aAA4B;QAE5B,MAAM,mBAAmB,qBAAQ,aAAa,CAAE,CAAC;QAEjD,IACE,mBAAmB,CAAC,IAAI,KAAK,kBAAkB;YAC/C,mBAAmB,CAAC,IAAI,KAAK,yBAAyB,EACtD,CAAC;YACD,mBAAmB,CAAC,IAAI,GAAG,EAAE,CAAC;QAChC,CAAC;QACD,OAAO,KAAK,CAAC,oBAAoB,CAAC,kBAAkB,EAAE,mBAAmB,CAAC,CAAC;IAC7E,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport { OperationArguments, OperationSpec } from \"@azure/core-client\";\nimport { StorageClient } from \"./generated/src\";\n\n/**\n * @internal\n */\nexport class StorageContextClient extends StorageClient {\n async sendOperationRequest<T>(\n operationArguments: OperationArguments,\n operationSpec: OperationSpec,\n ): Promise<T> {\n const operationSpecToSend = { ...operationSpec };\n\n if (\n operationSpecToSend.path === \"/{containerName}\" ||\n operationSpecToSend.path === \"/{containerName}/{blob}\"\n ) {\n operationSpecToSend.path = \"\";\n }\n return super.sendOperationRequest(operationArguments, operationSpecToSend);\n }\n}\n"]}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Licensed under the MIT License.
|
||||
import { StorageRetryPolicy, StorageRetryPolicyType } from "./policies/StorageRetryPolicy";
|
||||
export { StorageRetryPolicyType, StorageRetryPolicy };
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"StorageRetryPolicyFactory.js","sourceRoot":"","sources":["../../../src/StorageRetryPolicyFactory.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,kBAAkB,EAAE,sBAAsB,EAAE,MAAM,+BAA+B,CAAC;AAE3F,OAAO,EAAE,sBAAsB,EAAE,kBAAkB,EAAE,CAAC;AAmDtD;;GAEG;AACH,MAAM,OAAO,yBAAyB;IAGpC;;;OAGG;IACH,YAAY,YAAkC;QAC5C,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B;QACpE,OAAO,IAAI,kBAAkB,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;IACxE,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { StorageRetryPolicy, StorageRetryPolicyType } from \"./policies/StorageRetryPolicy\";\n\nexport { StorageRetryPolicyType, StorageRetryPolicy };\n\n/**\n * Storage Blob retry options interface.\n */\nexport interface StorageRetryOptions {\n /**\n * Optional. StorageRetryPolicyType, default is exponential retry policy.\n */\n readonly retryPolicyType?: StorageRetryPolicyType;\n\n /**\n * Optional. Max try number of attempts, default is 4.\n * A value of 1 means 1 try and no retries.\n * A value smaller than 1 means default retry number of attempts.\n */\n readonly maxTries?: number;\n\n /**\n * Optional. Indicates the maximum time in ms allowed for any single try of an HTTP request.\n * A value of zero or undefined means no default timeout on SDK client, Azure\n * Storage server's default timeout policy will be used.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations\n */\n readonly tryTimeoutInMs?: number;\n\n /**\n * Optional. Specifies the amount of delay to use before retrying an operation (default is 4s or 4 * 1000ms).\n * The delay increases (exponentially or linearly) with each retry up to a maximum specified by\n * maxRetryDelayInMs. If you specify 0, then you must also specify 0 for maxRetryDelayInMs.\n */\n readonly retryDelayInMs?: number;\n\n /**\n * Optional. Specifies the maximum delay allowed before retrying an operation (default is 120s or 120 * 1000ms).\n * If you specify 0, then you must also specify 0 for retryDelayInMs.\n */\n readonly maxRetryDelayInMs?: number;\n\n /**\n * If a secondaryHost is specified, retries will be tried against this host. If secondaryHost is undefined\n * (the default) then operations are not retried against another host.\n *\n * NOTE: Before setting this field, make sure you understand the issues around\n * reading stale and potentially-inconsistent data at\n * {@link https://docs.microsoft.com/en-us/azure/storage/common/storage-designing-ha-apps-with-ragrs}\n */\n readonly secondaryHost?: string;\n}\n\n/**\n * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects.\n */\nexport class StorageRetryPolicyFactory implements RequestPolicyFactory {\n private retryOptions?: StorageRetryOptions;\n\n /**\n * Creates an instance of StorageRetryPolicyFactory.\n * @param retryOptions -\n */\n constructor(retryOptions?: StorageRetryOptions) {\n this.retryOptions = retryOptions;\n }\n\n /**\n * Creates a StorageRetryPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy {\n return new StorageRetryPolicy(nextPolicy, options, this.retryOptions);\n }\n}\n"]}
|
||||
{"version":3,"file":"StorageRetryPolicyFactory.js","sourceRoot":"","sources":["../../../src/StorageRetryPolicyFactory.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAOlC,OAAO,EAAE,kBAAkB,EAAE,sBAAsB,EAAE,MAAM,+BAA+B,CAAC;AAE3F,OAAO,EAAE,sBAAsB,EAAE,kBAAkB,EAAE,CAAC;AAmDtD;;GAEG;AACH,MAAM,OAAO,yBAAyB;IAGpC;;;OAGG;IACH,YAAY,YAAkC;QAC5C,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B;QACpE,OAAO,IAAI,kBAAkB,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;IACxE,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport {\n RequestPolicy,\n RequestPolicyOptionsLike as RequestPolicyOptions,\n RequestPolicyFactory,\n} from \"@azure/core-http-compat\";\nimport { StorageRetryPolicy, StorageRetryPolicyType } from \"./policies/StorageRetryPolicy\";\n\nexport { StorageRetryPolicyType, StorageRetryPolicy };\n\n/**\n * Storage Blob retry options interface.\n */\nexport interface StorageRetryOptions {\n /**\n * Optional. StorageRetryPolicyType, default is exponential retry policy.\n */\n readonly retryPolicyType?: StorageRetryPolicyType;\n\n /**\n * Optional. Max try number of attempts, default is 4.\n * A value of 1 means 1 try and no retries.\n * A value smaller than 1 means default retry number of attempts.\n */\n readonly maxTries?: number;\n\n /**\n * Optional. Indicates the maximum time in ms allowed for any single try of an HTTP request.\n * A value of zero or undefined means no default timeout on SDK client, Azure\n * Storage server's default timeout policy will be used.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations\n */\n readonly tryTimeoutInMs?: number;\n\n /**\n * Optional. Specifies the amount of delay to use before retrying an operation (default is 4s or 4 * 1000ms).\n * The delay increases (exponentially or linearly) with each retry up to a maximum specified by\n * maxRetryDelayInMs. If you specify 0, then you must also specify 0 for maxRetryDelayInMs.\n */\n readonly retryDelayInMs?: number;\n\n /**\n * Optional. Specifies the maximum delay allowed before retrying an operation (default is 120s or 120 * 1000ms).\n * If you specify 0, then you must also specify 0 for retryDelayInMs.\n */\n readonly maxRetryDelayInMs?: number;\n\n /**\n * If a secondaryHost is specified, retries will be tried against this host. If secondaryHost is undefined\n * (the default) then operations are not retried against another host.\n *\n * NOTE: Before setting this field, make sure you understand the issues around\n * reading stale and potentially-inconsistent data at\n * {@link https://docs.microsoft.com/en-us/azure/storage/common/storage-designing-ha-apps-with-ragrs}\n */\n readonly secondaryHost?: string;\n}\n\n/**\n * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects.\n */\nexport class StorageRetryPolicyFactory implements RequestPolicyFactory {\n private retryOptions?: StorageRetryOptions;\n\n /**\n * Creates an instance of StorageRetryPolicyFactory.\n * @param retryOptions -\n */\n constructor(retryOptions?: StorageRetryOptions) {\n this.retryOptions = retryOptions;\n }\n\n /**\n * Creates a StorageRetryPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy {\n return new StorageRetryPolicy(nextPolicy, options, this.retryOptions);\n }\n}\n"]}
|
||||
50
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js
generated
vendored
50
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js
generated
vendored
|
|
@ -1,50 +0,0 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
import { isNode, } from "@azure/core-http";
|
||||
import * as os from "os";
|
||||
import { TelemetryPolicy } from "./policies/TelemetryPolicy";
|
||||
import { SDK_VERSION } from "./utils/constants";
|
||||
/**
|
||||
* TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects.
|
||||
*/
|
||||
export class TelemetryPolicyFactory {
|
||||
/**
|
||||
* Creates an instance of TelemetryPolicyFactory.
|
||||
* @param telemetry -
|
||||
*/
|
||||
constructor(telemetry) {
|
||||
const userAgentInfo = [];
|
||||
if (isNode) {
|
||||
if (telemetry) {
|
||||
const telemetryString = telemetry.userAgentPrefix || "";
|
||||
if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) {
|
||||
userAgentInfo.push(telemetryString);
|
||||
}
|
||||
}
|
||||
// e.g. azsdk-js-storageblob/10.0.0
|
||||
const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`;
|
||||
if (userAgentInfo.indexOf(libInfo) === -1) {
|
||||
userAgentInfo.push(libInfo);
|
||||
}
|
||||
// e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299)
|
||||
let runtimeInfo = `(NODE-VERSION ${process.version})`;
|
||||
if (os) {
|
||||
runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`;
|
||||
}
|
||||
if (userAgentInfo.indexOf(runtimeInfo) === -1) {
|
||||
userAgentInfo.push(runtimeInfo);
|
||||
}
|
||||
}
|
||||
this.telemetryString = userAgentInfo.join(" ");
|
||||
}
|
||||
/**
|
||||
* Creates a TelemetryPolicy object.
|
||||
*
|
||||
* @param nextPolicy -
|
||||
* @param options -
|
||||
*/
|
||||
create(nextPolicy, options) {
|
||||
return new TelemetryPolicy(nextPolicy, options, this.telemetryString);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=TelemetryPolicyFactory.js.map
|
||||
|
|
@ -1 +0,0 @@
|
|||
{"version":3,"file":"TelemetryPolicyFactory.js","sourceRoot":"","sources":["../../../src/TelemetryPolicyFactory.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,MAAM,GAKP,MAAM,kBAAkB,CAAC;AAC1B,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AAEzB,OAAO,EAAE,eAAe,EAAE,MAAM,4BAA4B,CAAC;AAC7D,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAEhD;;GAEG;AACH,MAAM,OAAO,sBAAsB;IAMjC;;;OAGG;IACH,YAAY,SAA4B;QACtC,MAAM,aAAa,GAAa,EAAE,CAAC;QAEnC,IAAI,MAAM,EAAE;YACV,IAAI,SAAS,EAAE;gBACb,MAAM,eAAe,GAAG,SAAS,CAAC,eAAe,IAAI,EAAE,CAAC;gBACxD,IAAI,eAAe,CAAC,MAAM,GAAG,CAAC,IAAI,aAAa,CAAC,OAAO,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC,EAAE;oBAC/E,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;iBACrC;aACF;YAED,mCAAmC;YACnC,MAAM,OAAO,GAAG,wBAAwB,WAAW,EAAE,CAAC;YACtD,IAAI,aAAa,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE;gBACzC,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;aAC7B;YAED,mDAAmD;YACnD,IAAI,WAAW,GAAG,iBAAiB,OAAO,CAAC,OAAO,GAAG,CAAC;YACtD,IAAI,EAAE,EAAE;gBACN,WAAW,GAAG,iBAAiB,OAAO,CAAC,OAAO,KAAK,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,OAAO,EAAE,GAAG,CAAC;aACjF;YACD,IAAI,aAAa,CAAC,OAAO,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC7C,aAAa,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;aACjC;SACF;QAED,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IACjD,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B;QACpE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;IACxE,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n isNode,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n UserAgentOptions,\n} from \"@azure/core-http\";\nimport * as os from \"os\";\n\nimport { TelemetryPolicy } from \"./policies/TelemetryPolicy\";\nimport { SDK_VERSION } from \"./utils/constants\";\n\n/**\n * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects.\n */\nexport class TelemetryPolicyFactory implements RequestPolicyFactory {\n /**\n * @internal\n */\n public readonly telemetryString: string;\n\n /**\n * Creates an instance of TelemetryPolicyFactory.\n * @param telemetry -\n */\n constructor(telemetry?: UserAgentOptions) {\n const userAgentInfo: string[] = [];\n\n if (isNode) {\n if (telemetry) {\n const telemetryString = telemetry.userAgentPrefix || \"\";\n if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) {\n userAgentInfo.push(telemetryString);\n }\n }\n\n // e.g. azsdk-js-storageblob/10.0.0\n const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`;\n if (userAgentInfo.indexOf(libInfo) === -1) {\n userAgentInfo.push(libInfo);\n }\n\n // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299)\n let runtimeInfo = `(NODE-VERSION ${process.version})`;\n if (os) {\n runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`;\n }\n if (userAgentInfo.indexOf(runtimeInfo) === -1) {\n userAgentInfo.push(runtimeInfo);\n }\n }\n\n this.telemetryString = userAgentInfo.join(\" \");\n }\n\n /**\n * Creates a TelemetryPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): TelemetryPolicy {\n return new TelemetryPolicy(nextPolicy, options, this.telemetryString);\n }\n}\n"]}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Licensed under the MIT License.
|
||||
import { AnonymousCredentialPolicy } from "../policies/AnonymousCredentialPolicy";
|
||||
import { Credential } from "./Credential";
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"AnonymousCredential.js","sourceRoot":"","sources":["../../../../src/credentials/AnonymousCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,OAAO,EAAE,yBAAyB,EAAE,MAAM,uCAAuC,CAAC;AAClF,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAE1C;;;;;GAKG;AACH,MAAM,OAAO,mBAAoB,SAAQ,UAAU;IACjD;;;;;OAKG;IACI,MAAM,CACX,UAAyB,EACzB,OAA6B;QAE7B,OAAO,IAAI,yBAAyB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC5D,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { AnonymousCredentialPolicy } from \"../policies/AnonymousCredentialPolicy\";\nimport { Credential } from \"./Credential\";\n\n/**\n * AnonymousCredential provides a credentialPolicyCreator member used to create\n * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with\n * HTTP(S) requests that read public resources or for use with Shared Access\n * Signatures (SAS).\n */\nexport class AnonymousCredential extends Credential {\n /**\n * Creates an {@link AnonymousCredentialPolicy} object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): AnonymousCredentialPolicy {\n return new AnonymousCredentialPolicy(nextPolicy, options);\n }\n}\n"]}
|
||||
{"version":3,"file":"AnonymousCredential.js","sourceRoot":"","sources":["../../../../src/credentials/AnonymousCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAOlC,OAAO,EAAE,yBAAyB,EAAE,MAAM,uCAAuC,CAAC;AAClF,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAE1C;;;;;GAKG;AACH,MAAM,OAAO,mBAAoB,SAAQ,UAAU;IACjD;;;;;OAKG;IACI,MAAM,CACX,UAAyB,EACzB,OAA6B;QAE7B,OAAO,IAAI,yBAAyB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC5D,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport {\n RequestPolicy,\n RequestPolicyOptionsLike as RequestPolicyOptions,\n} from \"@azure/core-http-compat\";\n\nimport { AnonymousCredentialPolicy } from \"../policies/AnonymousCredentialPolicy\";\nimport { Credential } from \"./Credential\";\n\n/**\n * AnonymousCredential provides a credentialPolicyCreator member used to create\n * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with\n * HTTP(S) requests that read public resources or for use with Shared Access\n * Signatures (SAS).\n */\nexport class AnonymousCredential extends Credential {\n /**\n * Creates an {@link AnonymousCredentialPolicy} object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n ): AnonymousCredentialPolicy {\n return new AnonymousCredentialPolicy(nextPolicy, options);\n }\n}\n"]}
|
||||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js
generated
vendored
|
|
@ -1,5 +1,5 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Licensed under the MIT License.
|
||||
/**
|
||||
* Credential is an abstract class for Azure Storage HTTP requests signing. This
|
||||
* class will host an credentialPolicyCreator factory which generates CredentialPolicy.
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"Credential.js","sourceRoot":"","sources":["../../../../src/credentials/Credential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAKlC;;;GAGG;AACH,MAAM,OAAgB,UAAU;IAC9B;;;;;OAKG;IACI,MAAM,CAAC,WAA0B,EAAE,QAA8B;QACtE,MAAM,IAAI,KAAK,CAAC,mDAAmD,CAAC,CAAC;IACvE,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { CredentialPolicy } from \"../policies/CredentialPolicy\";\n\n/**\n * Credential is an abstract class for Azure Storage HTTP requests signing. This\n * class will host an credentialPolicyCreator factory which generates CredentialPolicy.\n */\nexport abstract class Credential implements RequestPolicyFactory {\n /**\n * Creates a RequestPolicy object.\n *\n * @param _nextPolicy -\n * @param _options -\n */\n public create(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions): RequestPolicy {\n throw new Error(\"Method should be implemented in children classes.\");\n }\n}\n\n/**\n * A factory function that creates a new CredentialPolicy that uses the provided nextPolicy.\n */\nexport type CredentialPolicyCreator = (\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n) => CredentialPolicy;\n"]}
|
||||
{"version":3,"file":"Credential.js","sourceRoot":"","sources":["../../../../src/credentials/Credential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AASlC;;;GAGG;AACH,MAAM,OAAgB,UAAU;IAC9B;;;;;OAKG;IACI,MAAM,CAAC,WAA0B,EAAE,QAA8B;QACtE,MAAM,IAAI,KAAK,CAAC,mDAAmD,CAAC,CAAC;IACvE,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport {\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike as RequestPolicyOptions,\n} from \"@azure/core-http-compat\";\nimport { CredentialPolicy } from \"../policies/CredentialPolicy\";\n\n/**\n * Credential is an abstract class for Azure Storage HTTP requests signing. This\n * class will host an credentialPolicyCreator factory which generates CredentialPolicy.\n */\nexport abstract class Credential implements RequestPolicyFactory {\n /**\n * Creates a RequestPolicy object.\n *\n * @param _nextPolicy -\n * @param _options -\n */\n public create(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions): RequestPolicy {\n throw new Error(\"Method should be implemented in children classes.\");\n }\n}\n\n/**\n * A factory function that creates a new CredentialPolicy that uses the provided nextPolicy.\n */\nexport type CredentialPolicyCreator = (\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n) => CredentialPolicy;\n"]}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Licensed under the MIT License.
|
||||
export class StorageSharedKeyCredential {
|
||||
}
|
||||
//# sourceMappingURL=StorageSharedKeyCredential.browser.js.map
|
||||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"StorageSharedKeyCredential.browser.js","sourceRoot":"","sources":["../../../../src/credentials/StorageSharedKeyCredential.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,OAAO,0BAA0B;CAAG","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport class StorageSharedKeyCredential {}\n"]}
|
||||
{"version":3,"file":"StorageSharedKeyCredential.browser.js","sourceRoot":"","sources":["../../../../src/credentials/StorageSharedKeyCredential.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,OAAO,0BAA0B;CAAG","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nexport class StorageSharedKeyCredential {}\n"]}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Licensed under the MIT License.
|
||||
import { createHmac } from "crypto";
|
||||
import { StorageSharedKeyCredentialPolicy } from "../policies/StorageSharedKeyCredentialPolicy";
|
||||
import { Credential } from "./Credential";
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"StorageSharedKeyCredential.js","sourceRoot":"","sources":["../../../../src/credentials/StorageSharedKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAGpC,OAAO,EAAE,gCAAgC,EAAE,MAAM,8CAA8C,CAAC;AAChG,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAE1C;;;;GAIG;AACH,MAAM,OAAO,0BAA2B,SAAQ,UAAU;IAWxD;;;;OAIG;IACH,YAAY,WAAmB,EAAE,UAAkB;QACjD,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;IACtD,CAAC;IAED;;;;;OAKG;IACI,MAAM,CACX,UAAyB,EACzB,OAA6B;QAE7B,OAAO,IAAI,gCAAgC,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC;IACzE,CAAC;IAED;;;;OAIG;IACI,iBAAiB,CAAC,YAAoB;QAC3C,OAAO,UAAU,CAAC,QAAQ,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IAC7F,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createHmac } from \"crypto\";\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { StorageSharedKeyCredentialPolicy } from \"../policies/StorageSharedKeyCredentialPolicy\";\nimport { Credential } from \"./Credential\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * StorageSharedKeyCredential for account key authorization of Azure Storage service.\n */\nexport class StorageSharedKeyCredential extends Credential {\n /**\n * Azure Storage account name; readonly.\n */\n public readonly accountName: string;\n\n /**\n * Azure Storage account key; readonly.\n */\n private readonly accountKey: Buffer;\n\n /**\n * Creates an instance of StorageSharedKeyCredential.\n * @param accountName -\n * @param accountKey -\n */\n constructor(accountName: string, accountKey: string) {\n super();\n this.accountName = accountName;\n this.accountKey = Buffer.from(accountKey, \"base64\");\n }\n\n /**\n * Creates a StorageSharedKeyCredentialPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): StorageSharedKeyCredentialPolicy {\n return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this);\n }\n\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n public computeHMACSHA256(stringToSign: string): string {\n return createHmac(\"sha256\", this.accountKey).update(stringToSign, \"utf8\").digest(\"base64\");\n }\n}\n"]}
|
||||
{"version":3,"file":"StorageSharedKeyCredential.js","sourceRoot":"","sources":["../../../../src/credentials/StorageSharedKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAMpC,OAAO,EAAE,gCAAgC,EAAE,MAAM,8CAA8C,CAAC;AAChG,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAE1C;;;;GAIG;AACH,MAAM,OAAO,0BAA2B,SAAQ,UAAU;IAWxD;;;;OAIG;IACH,YAAY,WAAmB,EAAE,UAAkB;QACjD,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;IACtD,CAAC;IAED;;;;;OAKG;IACI,MAAM,CACX,UAAyB,EACzB,OAA6B;QAE7B,OAAO,IAAI,gCAAgC,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC;IACzE,CAAC;IAED;;;;OAIG;IACI,iBAAiB,CAAC,YAAoB;QAC3C,OAAO,UAAU,CAAC,QAAQ,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IAC7F,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport { createHmac } from \"crypto\";\nimport {\n RequestPolicy,\n RequestPolicyOptionsLike as RequestPolicyOptions,\n} from \"@azure/core-http-compat\";\n\nimport { StorageSharedKeyCredentialPolicy } from \"../policies/StorageSharedKeyCredentialPolicy\";\nimport { Credential } from \"./Credential\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * StorageSharedKeyCredential for account key authorization of Azure Storage service.\n */\nexport class StorageSharedKeyCredential extends Credential {\n /**\n * Azure Storage account name; readonly.\n */\n public readonly accountName: string;\n\n /**\n * Azure Storage account key; readonly.\n */\n private readonly accountKey: Buffer;\n\n /**\n * Creates an instance of StorageSharedKeyCredential.\n * @param accountName -\n * @param accountKey -\n */\n constructor(accountName: string, accountKey: string) {\n super();\n this.accountName = accountName;\n this.accountKey = Buffer.from(accountKey, \"base64\");\n }\n\n /**\n * Creates a StorageSharedKeyCredentialPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n ): StorageSharedKeyCredentialPolicy {\n return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this);\n }\n\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n public computeHMACSHA256(stringToSign: string): string {\n return createHmac(\"sha256\", this.accountKey).update(stringToSign, \"utf8\").digest(\"base64\");\n }\n}\n"]}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Licensed under the MIT License.
|
||||
export class UserDelegationKeyCredential {
|
||||
}
|
||||
//# sourceMappingURL=UserDelegationKeyCredential.browser.js.map
|
||||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"UserDelegationKeyCredential.browser.js","sourceRoot":"","sources":["../../../../src/credentials/UserDelegationKeyCredential.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,OAAO,2BAA2B;CAAG","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport class UserDelegationKeyCredential {}\n"]}
|
||||
{"version":3,"file":"UserDelegationKeyCredential.browser.js","sourceRoot":"","sources":["../../../../src/credentials/UserDelegationKeyCredential.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,OAAO,2BAA2B;CAAG","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nexport class UserDelegationKeyCredential {}\n"]}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Licensed under the MIT License.
|
||||
import { createHmac } from "crypto";
|
||||
/**
|
||||
* ONLY AVAILABLE IN NODE.JS RUNTIME.
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"UserDelegationKeyCredential.js","sourceRoot":"","sources":["../../../../src/credentials/UserDelegationKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAGpC;;;;;GAKG;AACH,MAAM,OAAO,2BAA2B;IAgBtC;;;;OAIG;IACH,YAAY,WAAmB,EAAE,iBAAoC;QACnE,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,iBAAiB,GAAG,iBAAiB,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,iBAAiB,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;IAC5D,CAAC;IAED;;;;OAIG;IACI,iBAAiB,CAAC,YAAoB;QAC3C,gEAAgE;QAEhE,OAAO,UAAU,CAAC,QAAQ,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IACtF,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createHmac } from \"crypto\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * UserDelegationKeyCredential is only used for generation of user delegation SAS.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas\n */\nexport class UserDelegationKeyCredential {\n /**\n * Azure Storage account name; readonly.\n */\n public readonly accountName: string;\n\n /**\n * Azure Storage user delegation key; readonly.\n */\n public readonly userDelegationKey: UserDelegationKey;\n\n /**\n * Key value in Buffer type.\n */\n private readonly key: Buffer;\n\n /**\n * Creates an instance of UserDelegationKeyCredential.\n * @param accountName -\n * @param userDelegationKey -\n */\n constructor(accountName: string, userDelegationKey: UserDelegationKey) {\n this.accountName = accountName;\n this.userDelegationKey = userDelegationKey;\n this.key = Buffer.from(userDelegationKey.value, \"base64\");\n }\n\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n public computeHMACSHA256(stringToSign: string): string {\n // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`);\n\n return createHmac(\"sha256\", this.key).update(stringToSign, \"utf8\").digest(\"base64\");\n }\n}\n"]}
|
||||
{"version":3,"file":"UserDelegationKeyCredential.js","sourceRoot":"","sources":["../../../../src/credentials/UserDelegationKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAGpC;;;;;GAKG;AACH,MAAM,OAAO,2BAA2B;IAgBtC;;;;OAIG;IACH,YAAY,WAAmB,EAAE,iBAAoC;QACnE,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,iBAAiB,GAAG,iBAAiB,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,iBAAiB,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;IAC5D,CAAC;IAED;;;;OAIG;IACI,iBAAiB,CAAC,YAAoB;QAC3C,gEAAgE;QAEhE,OAAO,UAAU,CAAC,QAAQ,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IACtF,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport { createHmac } from \"crypto\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * UserDelegationKeyCredential is only used for generation of user delegation SAS.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas\n */\nexport class UserDelegationKeyCredential {\n /**\n * Azure Storage account name; readonly.\n */\n public readonly accountName: string;\n\n /**\n * Azure Storage user delegation key; readonly.\n */\n public readonly userDelegationKey: UserDelegationKey;\n\n /**\n * Key value in Buffer type.\n */\n private readonly key: Buffer;\n\n /**\n * Creates an instance of UserDelegationKeyCredential.\n * @param accountName -\n * @param userDelegationKey -\n */\n constructor(accountName: string, userDelegationKey: UserDelegationKey) {\n this.accountName = accountName;\n this.userDelegationKey = userDelegationKey;\n this.key = Buffer.from(userDelegationKey.value, \"base64\");\n }\n\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n public computeHMACSHA256(stringToSign: string): string {\n // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`);\n\n return createHmac(\"sha256\", this.key).update(stringToSign, \"utf8\").digest(\"base64\");\n }\n}\n"]}
|
||||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js
generated
vendored
|
|
@ -7,5 +7,5 @@
|
|||
*/
|
||||
export * from "./models";
|
||||
export { StorageClient } from "./storageClient";
|
||||
export { StorageClientContext } from "./storageClientContext";
|
||||
export * from "./operationsInterfaces";
|
||||
//# sourceMappingURL=index.js.map
|
||||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../../src/generated/src/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,cAAc,UAAU,CAAC;AACzB,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nexport * from \"./models\";\nexport { StorageClient } from \"./storageClient\";\nexport { StorageClientContext } from \"./storageClientContext\";\n"]}
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../../src/generated/src/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,cAAc,UAAU,CAAC;AACzB,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,cAAc,wBAAwB,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nexport * from \"./models\";\nexport { StorageClient } from \"./storageClient\";\nexport * from \"./operationsInterfaces\";\n"]}
|
||||
249
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js
generated
vendored
249
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js
generated
vendored
|
|
@ -5,5 +5,252 @@
|
|||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
export {};
|
||||
/** Known values of {@link EncryptionAlgorithmType} that the service accepts. */
|
||||
export var KnownEncryptionAlgorithmType;
|
||||
(function (KnownEncryptionAlgorithmType) {
|
||||
/** AES256 */
|
||||
KnownEncryptionAlgorithmType["AES256"] = "AES256";
|
||||
})(KnownEncryptionAlgorithmType || (KnownEncryptionAlgorithmType = {}));
|
||||
/** Known values of {@link BlobExpiryOptions} that the service accepts. */
|
||||
export var KnownBlobExpiryOptions;
|
||||
(function (KnownBlobExpiryOptions) {
|
||||
/** NeverExpire */
|
||||
KnownBlobExpiryOptions["NeverExpire"] = "NeverExpire";
|
||||
/** RelativeToCreation */
|
||||
KnownBlobExpiryOptions["RelativeToCreation"] = "RelativeToCreation";
|
||||
/** RelativeToNow */
|
||||
KnownBlobExpiryOptions["RelativeToNow"] = "RelativeToNow";
|
||||
/** Absolute */
|
||||
KnownBlobExpiryOptions["Absolute"] = "Absolute";
|
||||
})(KnownBlobExpiryOptions || (KnownBlobExpiryOptions = {}));
|
||||
/** Known values of {@link StorageErrorCode} that the service accepts. */
|
||||
export var KnownStorageErrorCode;
|
||||
(function (KnownStorageErrorCode) {
|
||||
/** AccountAlreadyExists */
|
||||
KnownStorageErrorCode["AccountAlreadyExists"] = "AccountAlreadyExists";
|
||||
/** AccountBeingCreated */
|
||||
KnownStorageErrorCode["AccountBeingCreated"] = "AccountBeingCreated";
|
||||
/** AccountIsDisabled */
|
||||
KnownStorageErrorCode["AccountIsDisabled"] = "AccountIsDisabled";
|
||||
/** AuthenticationFailed */
|
||||
KnownStorageErrorCode["AuthenticationFailed"] = "AuthenticationFailed";
|
||||
/** AuthorizationFailure */
|
||||
KnownStorageErrorCode["AuthorizationFailure"] = "AuthorizationFailure";
|
||||
/** ConditionHeadersNotSupported */
|
||||
KnownStorageErrorCode["ConditionHeadersNotSupported"] = "ConditionHeadersNotSupported";
|
||||
/** ConditionNotMet */
|
||||
KnownStorageErrorCode["ConditionNotMet"] = "ConditionNotMet";
|
||||
/** EmptyMetadataKey */
|
||||
KnownStorageErrorCode["EmptyMetadataKey"] = "EmptyMetadataKey";
|
||||
/** InsufficientAccountPermissions */
|
||||
KnownStorageErrorCode["InsufficientAccountPermissions"] = "InsufficientAccountPermissions";
|
||||
/** InternalError */
|
||||
KnownStorageErrorCode["InternalError"] = "InternalError";
|
||||
/** InvalidAuthenticationInfo */
|
||||
KnownStorageErrorCode["InvalidAuthenticationInfo"] = "InvalidAuthenticationInfo";
|
||||
/** InvalidHeaderValue */
|
||||
KnownStorageErrorCode["InvalidHeaderValue"] = "InvalidHeaderValue";
|
||||
/** InvalidHttpVerb */
|
||||
KnownStorageErrorCode["InvalidHttpVerb"] = "InvalidHttpVerb";
|
||||
/** InvalidInput */
|
||||
KnownStorageErrorCode["InvalidInput"] = "InvalidInput";
|
||||
/** InvalidMd5 */
|
||||
KnownStorageErrorCode["InvalidMd5"] = "InvalidMd5";
|
||||
/** InvalidMetadata */
|
||||
KnownStorageErrorCode["InvalidMetadata"] = "InvalidMetadata";
|
||||
/** InvalidQueryParameterValue */
|
||||
KnownStorageErrorCode["InvalidQueryParameterValue"] = "InvalidQueryParameterValue";
|
||||
/** InvalidRange */
|
||||
KnownStorageErrorCode["InvalidRange"] = "InvalidRange";
|
||||
/** InvalidResourceName */
|
||||
KnownStorageErrorCode["InvalidResourceName"] = "InvalidResourceName";
|
||||
/** InvalidUri */
|
||||
KnownStorageErrorCode["InvalidUri"] = "InvalidUri";
|
||||
/** InvalidXmlDocument */
|
||||
KnownStorageErrorCode["InvalidXmlDocument"] = "InvalidXmlDocument";
|
||||
/** InvalidXmlNodeValue */
|
||||
KnownStorageErrorCode["InvalidXmlNodeValue"] = "InvalidXmlNodeValue";
|
||||
/** Md5Mismatch */
|
||||
KnownStorageErrorCode["Md5Mismatch"] = "Md5Mismatch";
|
||||
/** MetadataTooLarge */
|
||||
KnownStorageErrorCode["MetadataTooLarge"] = "MetadataTooLarge";
|
||||
/** MissingContentLengthHeader */
|
||||
KnownStorageErrorCode["MissingContentLengthHeader"] = "MissingContentLengthHeader";
|
||||
/** MissingRequiredQueryParameter */
|
||||
KnownStorageErrorCode["MissingRequiredQueryParameter"] = "MissingRequiredQueryParameter";
|
||||
/** MissingRequiredHeader */
|
||||
KnownStorageErrorCode["MissingRequiredHeader"] = "MissingRequiredHeader";
|
||||
/** MissingRequiredXmlNode */
|
||||
KnownStorageErrorCode["MissingRequiredXmlNode"] = "MissingRequiredXmlNode";
|
||||
/** MultipleConditionHeadersNotSupported */
|
||||
KnownStorageErrorCode["MultipleConditionHeadersNotSupported"] = "MultipleConditionHeadersNotSupported";
|
||||
/** OperationTimedOut */
|
||||
KnownStorageErrorCode["OperationTimedOut"] = "OperationTimedOut";
|
||||
/** OutOfRangeInput */
|
||||
KnownStorageErrorCode["OutOfRangeInput"] = "OutOfRangeInput";
|
||||
/** OutOfRangeQueryParameterValue */
|
||||
KnownStorageErrorCode["OutOfRangeQueryParameterValue"] = "OutOfRangeQueryParameterValue";
|
||||
/** RequestBodyTooLarge */
|
||||
KnownStorageErrorCode["RequestBodyTooLarge"] = "RequestBodyTooLarge";
|
||||
/** ResourceTypeMismatch */
|
||||
KnownStorageErrorCode["ResourceTypeMismatch"] = "ResourceTypeMismatch";
|
||||
/** RequestUrlFailedToParse */
|
||||
KnownStorageErrorCode["RequestUrlFailedToParse"] = "RequestUrlFailedToParse";
|
||||
/** ResourceAlreadyExists */
|
||||
KnownStorageErrorCode["ResourceAlreadyExists"] = "ResourceAlreadyExists";
|
||||
/** ResourceNotFound */
|
||||
KnownStorageErrorCode["ResourceNotFound"] = "ResourceNotFound";
|
||||
/** ServerBusy */
|
||||
KnownStorageErrorCode["ServerBusy"] = "ServerBusy";
|
||||
/** UnsupportedHeader */
|
||||
KnownStorageErrorCode["UnsupportedHeader"] = "UnsupportedHeader";
|
||||
/** UnsupportedXmlNode */
|
||||
KnownStorageErrorCode["UnsupportedXmlNode"] = "UnsupportedXmlNode";
|
||||
/** UnsupportedQueryParameter */
|
||||
KnownStorageErrorCode["UnsupportedQueryParameter"] = "UnsupportedQueryParameter";
|
||||
/** UnsupportedHttpVerb */
|
||||
KnownStorageErrorCode["UnsupportedHttpVerb"] = "UnsupportedHttpVerb";
|
||||
/** AppendPositionConditionNotMet */
|
||||
KnownStorageErrorCode["AppendPositionConditionNotMet"] = "AppendPositionConditionNotMet";
|
||||
/** BlobAlreadyExists */
|
||||
KnownStorageErrorCode["BlobAlreadyExists"] = "BlobAlreadyExists";
|
||||
/** BlobImmutableDueToPolicy */
|
||||
KnownStorageErrorCode["BlobImmutableDueToPolicy"] = "BlobImmutableDueToPolicy";
|
||||
/** BlobNotFound */
|
||||
KnownStorageErrorCode["BlobNotFound"] = "BlobNotFound";
|
||||
/** BlobOverwritten */
|
||||
KnownStorageErrorCode["BlobOverwritten"] = "BlobOverwritten";
|
||||
/** BlobTierInadequateForContentLength */
|
||||
KnownStorageErrorCode["BlobTierInadequateForContentLength"] = "BlobTierInadequateForContentLength";
|
||||
/** BlobUsesCustomerSpecifiedEncryption */
|
||||
KnownStorageErrorCode["BlobUsesCustomerSpecifiedEncryption"] = "BlobUsesCustomerSpecifiedEncryption";
|
||||
/** BlockCountExceedsLimit */
|
||||
KnownStorageErrorCode["BlockCountExceedsLimit"] = "BlockCountExceedsLimit";
|
||||
/** BlockListTooLong */
|
||||
KnownStorageErrorCode["BlockListTooLong"] = "BlockListTooLong";
|
||||
/** CannotChangeToLowerTier */
|
||||
KnownStorageErrorCode["CannotChangeToLowerTier"] = "CannotChangeToLowerTier";
|
||||
/** CannotVerifyCopySource */
|
||||
KnownStorageErrorCode["CannotVerifyCopySource"] = "CannotVerifyCopySource";
|
||||
/** ContainerAlreadyExists */
|
||||
KnownStorageErrorCode["ContainerAlreadyExists"] = "ContainerAlreadyExists";
|
||||
/** ContainerBeingDeleted */
|
||||
KnownStorageErrorCode["ContainerBeingDeleted"] = "ContainerBeingDeleted";
|
||||
/** ContainerDisabled */
|
||||
KnownStorageErrorCode["ContainerDisabled"] = "ContainerDisabled";
|
||||
/** ContainerNotFound */
|
||||
KnownStorageErrorCode["ContainerNotFound"] = "ContainerNotFound";
|
||||
/** ContentLengthLargerThanTierLimit */
|
||||
KnownStorageErrorCode["ContentLengthLargerThanTierLimit"] = "ContentLengthLargerThanTierLimit";
|
||||
/** CopyAcrossAccountsNotSupported */
|
||||
KnownStorageErrorCode["CopyAcrossAccountsNotSupported"] = "CopyAcrossAccountsNotSupported";
|
||||
/** CopyIdMismatch */
|
||||
KnownStorageErrorCode["CopyIdMismatch"] = "CopyIdMismatch";
|
||||
/** FeatureVersionMismatch */
|
||||
KnownStorageErrorCode["FeatureVersionMismatch"] = "FeatureVersionMismatch";
|
||||
/** IncrementalCopyBlobMismatch */
|
||||
KnownStorageErrorCode["IncrementalCopyBlobMismatch"] = "IncrementalCopyBlobMismatch";
|
||||
/** IncrementalCopyOfEarlierVersionSnapshotNotAllowed */
|
||||
KnownStorageErrorCode["IncrementalCopyOfEarlierVersionSnapshotNotAllowed"] = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed";
|
||||
/** IncrementalCopySourceMustBeSnapshot */
|
||||
KnownStorageErrorCode["IncrementalCopySourceMustBeSnapshot"] = "IncrementalCopySourceMustBeSnapshot";
|
||||
/** InfiniteLeaseDurationRequired */
|
||||
KnownStorageErrorCode["InfiniteLeaseDurationRequired"] = "InfiniteLeaseDurationRequired";
|
||||
/** InvalidBlobOrBlock */
|
||||
KnownStorageErrorCode["InvalidBlobOrBlock"] = "InvalidBlobOrBlock";
|
||||
/** InvalidBlobTier */
|
||||
KnownStorageErrorCode["InvalidBlobTier"] = "InvalidBlobTier";
|
||||
/** InvalidBlobType */
|
||||
KnownStorageErrorCode["InvalidBlobType"] = "InvalidBlobType";
|
||||
/** InvalidBlockId */
|
||||
KnownStorageErrorCode["InvalidBlockId"] = "InvalidBlockId";
|
||||
/** InvalidBlockList */
|
||||
KnownStorageErrorCode["InvalidBlockList"] = "InvalidBlockList";
|
||||
/** InvalidOperation */
|
||||
KnownStorageErrorCode["InvalidOperation"] = "InvalidOperation";
|
||||
/** InvalidPageRange */
|
||||
KnownStorageErrorCode["InvalidPageRange"] = "InvalidPageRange";
|
||||
/** InvalidSourceBlobType */
|
||||
KnownStorageErrorCode["InvalidSourceBlobType"] = "InvalidSourceBlobType";
|
||||
/** InvalidSourceBlobUrl */
|
||||
KnownStorageErrorCode["InvalidSourceBlobUrl"] = "InvalidSourceBlobUrl";
|
||||
/** InvalidVersionForPageBlobOperation */
|
||||
KnownStorageErrorCode["InvalidVersionForPageBlobOperation"] = "InvalidVersionForPageBlobOperation";
|
||||
/** LeaseAlreadyPresent */
|
||||
KnownStorageErrorCode["LeaseAlreadyPresent"] = "LeaseAlreadyPresent";
|
||||
/** LeaseAlreadyBroken */
|
||||
KnownStorageErrorCode["LeaseAlreadyBroken"] = "LeaseAlreadyBroken";
|
||||
/** LeaseIdMismatchWithBlobOperation */
|
||||
KnownStorageErrorCode["LeaseIdMismatchWithBlobOperation"] = "LeaseIdMismatchWithBlobOperation";
|
||||
/** LeaseIdMismatchWithContainerOperation */
|
||||
KnownStorageErrorCode["LeaseIdMismatchWithContainerOperation"] = "LeaseIdMismatchWithContainerOperation";
|
||||
/** LeaseIdMismatchWithLeaseOperation */
|
||||
KnownStorageErrorCode["LeaseIdMismatchWithLeaseOperation"] = "LeaseIdMismatchWithLeaseOperation";
|
||||
/** LeaseIdMissing */
|
||||
KnownStorageErrorCode["LeaseIdMissing"] = "LeaseIdMissing";
|
||||
/** LeaseIsBreakingAndCannotBeAcquired */
|
||||
KnownStorageErrorCode["LeaseIsBreakingAndCannotBeAcquired"] = "LeaseIsBreakingAndCannotBeAcquired";
|
||||
/** LeaseIsBreakingAndCannotBeChanged */
|
||||
KnownStorageErrorCode["LeaseIsBreakingAndCannotBeChanged"] = "LeaseIsBreakingAndCannotBeChanged";
|
||||
/** LeaseIsBrokenAndCannotBeRenewed */
|
||||
KnownStorageErrorCode["LeaseIsBrokenAndCannotBeRenewed"] = "LeaseIsBrokenAndCannotBeRenewed";
|
||||
/** LeaseLost */
|
||||
KnownStorageErrorCode["LeaseLost"] = "LeaseLost";
|
||||
/** LeaseNotPresentWithBlobOperation */
|
||||
KnownStorageErrorCode["LeaseNotPresentWithBlobOperation"] = "LeaseNotPresentWithBlobOperation";
|
||||
/** LeaseNotPresentWithContainerOperation */
|
||||
KnownStorageErrorCode["LeaseNotPresentWithContainerOperation"] = "LeaseNotPresentWithContainerOperation";
|
||||
/** LeaseNotPresentWithLeaseOperation */
|
||||
KnownStorageErrorCode["LeaseNotPresentWithLeaseOperation"] = "LeaseNotPresentWithLeaseOperation";
|
||||
/** MaxBlobSizeConditionNotMet */
|
||||
KnownStorageErrorCode["MaxBlobSizeConditionNotMet"] = "MaxBlobSizeConditionNotMet";
|
||||
/** NoAuthenticationInformation */
|
||||
KnownStorageErrorCode["NoAuthenticationInformation"] = "NoAuthenticationInformation";
|
||||
/** NoPendingCopyOperation */
|
||||
KnownStorageErrorCode["NoPendingCopyOperation"] = "NoPendingCopyOperation";
|
||||
/** OperationNotAllowedOnIncrementalCopyBlob */
|
||||
KnownStorageErrorCode["OperationNotAllowedOnIncrementalCopyBlob"] = "OperationNotAllowedOnIncrementalCopyBlob";
|
||||
/** PendingCopyOperation */
|
||||
KnownStorageErrorCode["PendingCopyOperation"] = "PendingCopyOperation";
|
||||
/** PreviousSnapshotCannotBeNewer */
|
||||
KnownStorageErrorCode["PreviousSnapshotCannotBeNewer"] = "PreviousSnapshotCannotBeNewer";
|
||||
/** PreviousSnapshotNotFound */
|
||||
KnownStorageErrorCode["PreviousSnapshotNotFound"] = "PreviousSnapshotNotFound";
|
||||
/** PreviousSnapshotOperationNotSupported */
|
||||
KnownStorageErrorCode["PreviousSnapshotOperationNotSupported"] = "PreviousSnapshotOperationNotSupported";
|
||||
/** SequenceNumberConditionNotMet */
|
||||
KnownStorageErrorCode["SequenceNumberConditionNotMet"] = "SequenceNumberConditionNotMet";
|
||||
/** SequenceNumberIncrementTooLarge */
|
||||
KnownStorageErrorCode["SequenceNumberIncrementTooLarge"] = "SequenceNumberIncrementTooLarge";
|
||||
/** SnapshotCountExceeded */
|
||||
KnownStorageErrorCode["SnapshotCountExceeded"] = "SnapshotCountExceeded";
|
||||
/** SnapshotOperationRateExceeded */
|
||||
KnownStorageErrorCode["SnapshotOperationRateExceeded"] = "SnapshotOperationRateExceeded";
|
||||
/** SnapshotsPresent */
|
||||
KnownStorageErrorCode["SnapshotsPresent"] = "SnapshotsPresent";
|
||||
/** SourceConditionNotMet */
|
||||
KnownStorageErrorCode["SourceConditionNotMet"] = "SourceConditionNotMet";
|
||||
/** SystemInUse */
|
||||
KnownStorageErrorCode["SystemInUse"] = "SystemInUse";
|
||||
/** TargetConditionNotMet */
|
||||
KnownStorageErrorCode["TargetConditionNotMet"] = "TargetConditionNotMet";
|
||||
/** UnauthorizedBlobOverwrite */
|
||||
KnownStorageErrorCode["UnauthorizedBlobOverwrite"] = "UnauthorizedBlobOverwrite";
|
||||
/** BlobBeingRehydrated */
|
||||
KnownStorageErrorCode["BlobBeingRehydrated"] = "BlobBeingRehydrated";
|
||||
/** BlobArchived */
|
||||
KnownStorageErrorCode["BlobArchived"] = "BlobArchived";
|
||||
/** BlobNotArchived */
|
||||
KnownStorageErrorCode["BlobNotArchived"] = "BlobNotArchived";
|
||||
/** AuthorizationSourceIPMismatch */
|
||||
KnownStorageErrorCode["AuthorizationSourceIPMismatch"] = "AuthorizationSourceIPMismatch";
|
||||
/** AuthorizationProtocolMismatch */
|
||||
KnownStorageErrorCode["AuthorizationProtocolMismatch"] = "AuthorizationProtocolMismatch";
|
||||
/** AuthorizationPermissionMismatch */
|
||||
KnownStorageErrorCode["AuthorizationPermissionMismatch"] = "AuthorizationPermissionMismatch";
|
||||
/** AuthorizationServiceMismatch */
|
||||
KnownStorageErrorCode["AuthorizationServiceMismatch"] = "AuthorizationServiceMismatch";
|
||||
/** AuthorizationResourceTypeMismatch */
|
||||
KnownStorageErrorCode["AuthorizationResourceTypeMismatch"] = "AuthorizationResourceTypeMismatch";
|
||||
})(KnownStorageErrorCode || (KnownStorageErrorCode = {}));
|
||||
//# sourceMappingURL=index.js.map
|
||||
File diff suppressed because one or more lines are too long
4734
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js
generated
vendored
4734
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js
generated
vendored
File diff suppressed because it is too large
Load diff
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load diff
File diff suppressed because one or more lines are too long
|
|
@ -5,11 +5,11 @@
|
|||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
import * as coreHttp from "@azure/core-http";
|
||||
import * as coreClient from "@azure/core-client";
|
||||
import * as Mappers from "../models/mappers";
|
||||
import * as Parameters from "../models/parameters";
|
||||
/** Class representing a AppendBlob. */
|
||||
export class AppendBlob {
|
||||
/** Class containing AppendBlob operations. */
|
||||
export class AppendBlobImpl {
|
||||
/**
|
||||
* Initialize a new instance of the class AppendBlob class.
|
||||
* @param client Reference to the service client
|
||||
|
|
@ -23,11 +23,7 @@ export class AppendBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
create(contentLength, options) {
|
||||
const operationArguments = {
|
||||
contentLength,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, createOperationSpec);
|
||||
return this.client.sendOperationRequest({ contentLength, options }, createOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Append Block operation commits a new block of data to the end of an existing append blob. The
|
||||
|
|
@ -38,12 +34,7 @@ export class AppendBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
appendBlock(contentLength, body, options) {
|
||||
const operationArguments = {
|
||||
contentLength,
|
||||
body,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec);
|
||||
return this.client.sendOperationRequest({ contentLength, body, options }, appendBlockOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Append Block operation commits a new block of data to the end of an existing append blob where
|
||||
|
|
@ -55,12 +46,7 @@ export class AppendBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
appendBlockFromUrl(sourceUrl, contentLength, options) {
|
||||
const operationArguments = {
|
||||
sourceUrl,
|
||||
contentLength,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec);
|
||||
return this.client.sendOperationRequest({ sourceUrl, contentLength, options }, appendBlockFromUrlOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version
|
||||
|
|
@ -68,26 +54,22 @@ export class AppendBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
seal(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, sealOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, sealOperationSpec);
|
||||
}
|
||||
}
|
||||
// Operation Specifications
|
||||
const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);
|
||||
const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);
|
||||
const xmlSerializer = coreClient.createSerializer(Mappers, /* isXml */ true);
|
||||
const createOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
201: {
|
||||
headersMapper: Mappers.AppendBlobCreateHeaders
|
||||
headersMapper: Mappers.AppendBlobCreateHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.AppendBlobCreateExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.AppendBlobCreateExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [Parameters.timeoutInSeconds],
|
||||
urlParameters: [Parameters.url],
|
||||
|
|
@ -117,22 +99,22 @@ const createOperationSpec = {
|
|||
Parameters.encryptionScope,
|
||||
Parameters.blobTagsString,
|
||||
Parameters.legalHold1,
|
||||
Parameters.blobType1
|
||||
Parameters.blobType1,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const appendBlockOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
201: {
|
||||
headersMapper: Mappers.AppendBlobAppendBlockHeaders
|
||||
headersMapper: Mappers.AppendBlobAppendBlockHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.AppendBlobAppendBlockExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.AppendBlobAppendBlockExceptionHeaders,
|
||||
},
|
||||
},
|
||||
requestBody: Parameters.body1,
|
||||
queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22],
|
||||
|
|
@ -156,22 +138,24 @@ const appendBlockOperationSpec = {
|
|||
Parameters.contentType1,
|
||||
Parameters.accept2,
|
||||
Parameters.maxSize,
|
||||
Parameters.appendPosition
|
||||
Parameters.appendPosition,
|
||||
],
|
||||
isXML: true,
|
||||
contentType: "application/xml; charset=utf-8",
|
||||
mediaType: "binary",
|
||||
serializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const appendBlockFromUrlOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
201: {
|
||||
headersMapper: Mappers.AppendBlobAppendBlockFromUrlHeaders
|
||||
headersMapper: Mappers.AppendBlobAppendBlockFromUrlHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22],
|
||||
urlParameters: [Parameters.url],
|
||||
|
|
@ -201,22 +185,22 @@ const appendBlockFromUrlOperationSpec = {
|
|||
Parameters.sourceContentCrc64,
|
||||
Parameters.maxSize,
|
||||
Parameters.appendPosition,
|
||||
Parameters.sourceRange1
|
||||
Parameters.sourceRange1,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const sealOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
200: {
|
||||
headersMapper: Mappers.AppendBlobSealHeaders
|
||||
headersMapper: Mappers.AppendBlobSealHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.AppendBlobSealExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.AppendBlobSealExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [Parameters.timeoutInSeconds, Parameters.comp23],
|
||||
urlParameters: [Parameters.url],
|
||||
|
|
@ -229,9 +213,9 @@ const sealOperationSpec = {
|
|||
Parameters.ifUnmodifiedSince,
|
||||
Parameters.ifMatch,
|
||||
Parameters.ifNoneMatch,
|
||||
Parameters.appendPosition
|
||||
Parameters.appendPosition,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
//# sourceMappingURL=appendBlob.js.map
|
||||
File diff suppressed because one or more lines are too long
418
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js
generated
vendored
418
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js
generated
vendored
File diff suppressed because it is too large
Load diff
File diff suppressed because one or more lines are too long
|
|
@ -5,11 +5,11 @@
|
|||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
import * as coreHttp from "@azure/core-http";
|
||||
import * as coreClient from "@azure/core-client";
|
||||
import * as Mappers from "../models/mappers";
|
||||
import * as Parameters from "../models/parameters";
|
||||
/** Class representing a BlockBlob. */
|
||||
export class BlockBlob {
|
||||
/** Class containing BlockBlob operations. */
|
||||
export class BlockBlobImpl {
|
||||
/**
|
||||
* Initialize a new instance of the class BlockBlob class.
|
||||
* @param client Reference to the service client
|
||||
|
|
@ -27,12 +27,7 @@ export class BlockBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
upload(contentLength, body, options) {
|
||||
const operationArguments = {
|
||||
contentLength,
|
||||
body,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, uploadOperationSpec);
|
||||
return this.client.sendOperationRequest({ contentLength, body, options }, uploadOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read
|
||||
|
|
@ -48,12 +43,7 @@ export class BlockBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
putBlobFromUrl(contentLength, copySource, options) {
|
||||
const operationArguments = {
|
||||
contentLength,
|
||||
copySource,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec);
|
||||
return this.client.sendOperationRequest({ contentLength, copySource, options }, putBlobFromUrlOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Stage Block operation creates a new block to be committed as part of a blob
|
||||
|
|
@ -65,13 +55,7 @@ export class BlockBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
stageBlock(blockId, contentLength, body, options) {
|
||||
const operationArguments = {
|
||||
blockId,
|
||||
contentLength,
|
||||
body,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec);
|
||||
return this.client.sendOperationRequest({ blockId, contentLength, body, options }, stageBlockOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Stage Block operation creates a new block to be committed as part of a blob where the contents
|
||||
|
|
@ -84,13 +68,7 @@ export class BlockBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
stageBlockFromURL(blockId, contentLength, sourceUrl, options) {
|
||||
const operationArguments = {
|
||||
blockId,
|
||||
contentLength,
|
||||
sourceUrl,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec);
|
||||
return this.client.sendOperationRequest({ blockId, contentLength, sourceUrl, options }, stageBlockFromURLOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Commit Block List operation writes a blob by specifying the list of block IDs that make up the
|
||||
|
|
@ -104,11 +82,7 @@ export class BlockBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
commitBlockList(blocks, options) {
|
||||
const operationArguments = {
|
||||
blocks,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec);
|
||||
return this.client.sendOperationRequest({ blocks, options }, commitBlockListOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block
|
||||
|
|
@ -118,27 +92,22 @@ export class BlockBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
getBlockList(listType, options) {
|
||||
const operationArguments = {
|
||||
listType,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec);
|
||||
return this.client.sendOperationRequest({ listType, options }, getBlockListOperationSpec);
|
||||
}
|
||||
}
|
||||
// Operation Specifications
|
||||
const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);
|
||||
const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);
|
||||
const xmlSerializer = coreClient.createSerializer(Mappers, /* isXml */ true);
|
||||
const uploadOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
201: {
|
||||
headersMapper: Mappers.BlockBlobUploadHeaders
|
||||
headersMapper: Mappers.BlockBlobUploadHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.BlockBlobUploadExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.BlockBlobUploadExceptionHeaders,
|
||||
},
|
||||
},
|
||||
requestBody: Parameters.body1,
|
||||
queryParameters: [Parameters.timeoutInSeconds],
|
||||
|
|
@ -173,22 +142,24 @@ const uploadOperationSpec = {
|
|||
Parameters.transactionalContentCrc64,
|
||||
Parameters.contentType1,
|
||||
Parameters.accept2,
|
||||
Parameters.blobType2
|
||||
Parameters.blobType2,
|
||||
],
|
||||
isXML: true,
|
||||
contentType: "application/xml; charset=utf-8",
|
||||
mediaType: "binary",
|
||||
serializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const putBlobFromUrlOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
201: {
|
||||
headersMapper: Mappers.BlockBlobPutBlobFromUrlHeaders
|
||||
headersMapper: Mappers.BlockBlobPutBlobFromUrlHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.BlockBlobPutBlobFromUrlExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.BlockBlobPutBlobFromUrlExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [Parameters.timeoutInSeconds],
|
||||
urlParameters: [Parameters.url],
|
||||
|
|
@ -227,28 +198,28 @@ const putBlobFromUrlOperationSpec = {
|
|||
Parameters.copySourceTags,
|
||||
Parameters.transactionalContentMD5,
|
||||
Parameters.blobType2,
|
||||
Parameters.copySourceBlobProperties
|
||||
Parameters.copySourceBlobProperties,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const stageBlockOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
201: {
|
||||
headersMapper: Mappers.BlockBlobStageBlockHeaders
|
||||
headersMapper: Mappers.BlockBlobStageBlockHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.BlockBlobStageBlockExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.BlockBlobStageBlockExceptionHeaders,
|
||||
},
|
||||
},
|
||||
requestBody: Parameters.body1,
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.comp24,
|
||||
Parameters.blockId
|
||||
Parameters.blockId,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
|
|
@ -263,27 +234,29 @@ const stageBlockOperationSpec = {
|
|||
Parameters.transactionalContentMD5,
|
||||
Parameters.transactionalContentCrc64,
|
||||
Parameters.contentType1,
|
||||
Parameters.accept2
|
||||
Parameters.accept2,
|
||||
],
|
||||
isXML: true,
|
||||
contentType: "application/xml; charset=utf-8",
|
||||
mediaType: "binary",
|
||||
serializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const stageBlockFromURLOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
201: {
|
||||
headersMapper: Mappers.BlockBlobStageBlockFromURLHeaders
|
||||
headersMapper: Mappers.BlockBlobStageBlockFromURLHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.BlockBlobStageBlockFromURLExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.BlockBlobStageBlockFromURLExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.comp24,
|
||||
Parameters.blockId
|
||||
Parameters.blockId,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
|
|
@ -304,22 +277,22 @@ const stageBlockFromURLOperationSpec = {
|
|||
Parameters.copySourceAuthorization,
|
||||
Parameters.sourceUrl,
|
||||
Parameters.sourceContentCrc64,
|
||||
Parameters.sourceRange1
|
||||
Parameters.sourceRange1,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const commitBlockListOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
201: {
|
||||
headersMapper: Mappers.BlockBlobCommitBlockListHeaders
|
||||
headersMapper: Mappers.BlockBlobCommitBlockListHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.BlockBlobCommitBlockListExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.BlockBlobCommitBlockListExceptionHeaders,
|
||||
},
|
||||
},
|
||||
requestBody: Parameters.blocks,
|
||||
queryParameters: [Parameters.timeoutInSeconds, Parameters.comp25],
|
||||
|
|
@ -352,12 +325,12 @@ const commitBlockListOperationSpec = {
|
|||
Parameters.blobTagsString,
|
||||
Parameters.legalHold1,
|
||||
Parameters.transactionalContentMD5,
|
||||
Parameters.transactionalContentCrc64
|
||||
Parameters.transactionalContentCrc64,
|
||||
],
|
||||
isXML: true,
|
||||
contentType: "application/xml; charset=utf-8",
|
||||
mediaType: "xml",
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const getBlockListOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
|
|
@ -365,18 +338,18 @@ const getBlockListOperationSpec = {
|
|||
responses: {
|
||||
200: {
|
||||
bodyMapper: Mappers.BlockList,
|
||||
headersMapper: Mappers.BlockBlobGetBlockListHeaders
|
||||
headersMapper: Mappers.BlockBlobGetBlockListHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.BlockBlobGetBlockListExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.BlockBlobGetBlockListExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.snapshot,
|
||||
Parameters.comp25,
|
||||
Parameters.listType
|
||||
Parameters.listType,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
|
|
@ -384,9 +357,9 @@ const getBlockListOperationSpec = {
|
|||
Parameters.requestId,
|
||||
Parameters.accept1,
|
||||
Parameters.leaseId,
|
||||
Parameters.ifTags
|
||||
Parameters.ifTags,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
//# sourceMappingURL=blockBlob.js.map
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -5,11 +5,11 @@
|
|||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
import * as coreHttp from "@azure/core-http";
|
||||
import * as coreClient from "@azure/core-client";
|
||||
import * as Mappers from "../models/mappers";
|
||||
import * as Parameters from "../models/parameters";
|
||||
/** Class representing a Container. */
|
||||
export class Container {
|
||||
/** Class containing Container operations. */
|
||||
export class ContainerImpl {
|
||||
/**
|
||||
* Initialize a new instance of the class Container class.
|
||||
* @param client Reference to the service client
|
||||
|
|
@ -23,10 +23,7 @@ export class Container {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
create(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, createOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, createOperationSpec);
|
||||
}
|
||||
/**
|
||||
* returns all user-defined metadata and system properties for the specified container. The data
|
||||
|
|
@ -34,10 +31,7 @@ export class Container {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
getProperties(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec);
|
||||
}
|
||||
/**
|
||||
* operation marks the specified container for deletion. The container and any blobs contained within
|
||||
|
|
@ -45,20 +39,14 @@ export class Container {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
delete(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, deleteOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, deleteOperationSpec);
|
||||
}
|
||||
/**
|
||||
* operation sets one or more user-defined name-value pairs for the specified container.
|
||||
* @param options The options parameters.
|
||||
*/
|
||||
setMetadata(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, setMetadataOperationSpec);
|
||||
}
|
||||
/**
|
||||
* gets the permissions for the specified container. The permissions indicate whether container data
|
||||
|
|
@ -66,10 +54,7 @@ export class Container {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
getAccessPolicy(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, getAccessPolicyOperationSpec);
|
||||
}
|
||||
/**
|
||||
* sets the permissions for the specified container. The permissions indicate whether blobs in a
|
||||
|
|
@ -77,20 +62,14 @@ export class Container {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
setAccessPolicy(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, setAccessPolicyOperationSpec);
|
||||
}
|
||||
/**
|
||||
* Restores a previously-deleted container.
|
||||
* @param options The options parameters.
|
||||
*/
|
||||
restore(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, restoreOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, restoreOperationSpec);
|
||||
}
|
||||
/**
|
||||
* Renames an existing container.
|
||||
|
|
@ -98,11 +77,7 @@ export class Container {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
rename(sourceContainerName, options) {
|
||||
const operationArguments = {
|
||||
sourceContainerName,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, renameOperationSpec);
|
||||
return this.client.sendOperationRequest({ sourceContainerName, options }, renameOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Batch operation allows multiple API calls to be embedded into a single HTTP request.
|
||||
|
|
@ -113,13 +88,7 @@ export class Container {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
submitBatch(contentLength, multipartContentType, body, options) {
|
||||
const operationArguments = {
|
||||
contentLength,
|
||||
multipartContentType,
|
||||
body,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec);
|
||||
return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Filter Blobs operation enables callers to list blobs in a container whose tags match a given
|
||||
|
|
@ -127,10 +96,7 @@ export class Container {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
filterBlobs(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec);
|
||||
}
|
||||
/**
|
||||
* [Update] establishes and manages a lock on a container for delete operations. The lock duration can
|
||||
|
|
@ -138,10 +104,7 @@ export class Container {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
acquireLease(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec);
|
||||
}
|
||||
/**
|
||||
* [Update] establishes and manages a lock on a container for delete operations. The lock duration can
|
||||
|
|
@ -150,11 +113,7 @@ export class Container {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
releaseLease(leaseId, options) {
|
||||
const operationArguments = {
|
||||
leaseId,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec);
|
||||
return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec);
|
||||
}
|
||||
/**
|
||||
* [Update] establishes and manages a lock on a container for delete operations. The lock duration can
|
||||
|
|
@ -163,11 +122,7 @@ export class Container {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
renewLease(leaseId, options) {
|
||||
const operationArguments = {
|
||||
leaseId,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec);
|
||||
return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec);
|
||||
}
|
||||
/**
|
||||
* [Update] establishes and manages a lock on a container for delete operations. The lock duration can
|
||||
|
|
@ -175,10 +130,7 @@ export class Container {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
breakLease(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec);
|
||||
}
|
||||
/**
|
||||
* [Update] establishes and manages a lock on a container for delete operations. The lock duration can
|
||||
|
|
@ -190,22 +142,14 @@ export class Container {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
changeLease(leaseId, proposedLeaseId, options) {
|
||||
const operationArguments = {
|
||||
leaseId,
|
||||
proposedLeaseId,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec);
|
||||
return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec);
|
||||
}
|
||||
/**
|
||||
* [Update] The List Blobs operation returns a list of the blobs under the specified container
|
||||
* @param options The options parameters.
|
||||
*/
|
||||
listBlobFlatSegment(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, listBlobFlatSegmentOperationSpec);
|
||||
}
|
||||
/**
|
||||
* [Update] The List Blobs operation returns a list of the blobs under the specified container
|
||||
|
|
@ -216,36 +160,29 @@ export class Container {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
listBlobHierarchySegment(delimiter, options) {
|
||||
const operationArguments = {
|
||||
delimiter,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec);
|
||||
return this.client.sendOperationRequest({ delimiter, options }, listBlobHierarchySegmentOperationSpec);
|
||||
}
|
||||
/**
|
||||
* Returns the sku name and account kind
|
||||
* @param options The options parameters.
|
||||
*/
|
||||
getAccountInfo(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec);
|
||||
}
|
||||
}
|
||||
// Operation Specifications
|
||||
const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);
|
||||
const xmlSerializer = coreClient.createSerializer(Mappers, /* isXml */ true);
|
||||
const createOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
201: {
|
||||
headersMapper: Mappers.ContainerCreateHeaders
|
||||
headersMapper: Mappers.ContainerCreateHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerCreateExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ContainerCreateExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],
|
||||
urlParameters: [Parameters.url],
|
||||
|
|
@ -256,22 +193,22 @@ const createOperationSpec = {
|
|||
Parameters.metadata,
|
||||
Parameters.access,
|
||||
Parameters.defaultEncryptionScope,
|
||||
Parameters.preventEncryptionScopeOverride
|
||||
Parameters.preventEncryptionScopeOverride,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const getPropertiesOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
httpMethod: "GET",
|
||||
responses: {
|
||||
200: {
|
||||
headersMapper: Mappers.ContainerGetPropertiesHeaders
|
||||
headersMapper: Mappers.ContainerGetPropertiesHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerGetPropertiesExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ContainerGetPropertiesExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],
|
||||
urlParameters: [Parameters.url],
|
||||
|
|
@ -279,22 +216,22 @@ const getPropertiesOperationSpec = {
|
|||
Parameters.version,
|
||||
Parameters.requestId,
|
||||
Parameters.accept1,
|
||||
Parameters.leaseId
|
||||
Parameters.leaseId,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const deleteOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
httpMethod: "DELETE",
|
||||
responses: {
|
||||
202: {
|
||||
headersMapper: Mappers.ContainerDeleteHeaders
|
||||
headersMapper: Mappers.ContainerDeleteHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerDeleteExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ContainerDeleteExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],
|
||||
urlParameters: [Parameters.url],
|
||||
|
|
@ -304,27 +241,27 @@ const deleteOperationSpec = {
|
|||
Parameters.accept1,
|
||||
Parameters.leaseId,
|
||||
Parameters.ifModifiedSince,
|
||||
Parameters.ifUnmodifiedSince
|
||||
Parameters.ifUnmodifiedSince,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const setMetadataOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
200: {
|
||||
headersMapper: Mappers.ContainerSetMetadataHeaders
|
||||
headersMapper: Mappers.ContainerSetMetadataHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerSetMetadataExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ContainerSetMetadataExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.restype2,
|
||||
Parameters.comp6
|
||||
Parameters.comp6,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
|
|
@ -333,10 +270,10 @@ const setMetadataOperationSpec = {
|
|||
Parameters.accept1,
|
||||
Parameters.metadata,
|
||||
Parameters.leaseId,
|
||||
Parameters.ifModifiedSince
|
||||
Parameters.ifModifiedSince,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const getAccessPolicyOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
|
|
@ -347,53 +284,53 @@ const getAccessPolicyOperationSpec = {
|
|||
type: {
|
||||
name: "Sequence",
|
||||
element: {
|
||||
type: { name: "Composite", className: "SignedIdentifier" }
|
||||
}
|
||||
type: { name: "Composite", className: "SignedIdentifier" },
|
||||
},
|
||||
},
|
||||
serializedName: "SignedIdentifiers",
|
||||
xmlName: "SignedIdentifiers",
|
||||
xmlIsWrapped: true,
|
||||
xmlElementName: "SignedIdentifier"
|
||||
xmlElementName: "SignedIdentifier",
|
||||
},
|
||||
headersMapper: Mappers.ContainerGetAccessPolicyHeaders
|
||||
headersMapper: Mappers.ContainerGetAccessPolicyHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerGetAccessPolicyExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ContainerGetAccessPolicyExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.restype2,
|
||||
Parameters.comp7
|
||||
Parameters.comp7,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
Parameters.version,
|
||||
Parameters.requestId,
|
||||
Parameters.accept1,
|
||||
Parameters.leaseId
|
||||
Parameters.leaseId,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const setAccessPolicyOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
200: {
|
||||
headersMapper: Mappers.ContainerSetAccessPolicyHeaders
|
||||
headersMapper: Mappers.ContainerSetAccessPolicyHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerSetAccessPolicyExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ContainerSetAccessPolicyExceptionHeaders,
|
||||
},
|
||||
},
|
||||
requestBody: Parameters.containerAcl,
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.restype2,
|
||||
Parameters.comp7
|
||||
Parameters.comp7,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
|
|
@ -404,29 +341,29 @@ const setAccessPolicyOperationSpec = {
|
|||
Parameters.access,
|
||||
Parameters.leaseId,
|
||||
Parameters.ifModifiedSince,
|
||||
Parameters.ifUnmodifiedSince
|
||||
Parameters.ifUnmodifiedSince,
|
||||
],
|
||||
isXML: true,
|
||||
contentType: "application/xml; charset=utf-8",
|
||||
mediaType: "xml",
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const restoreOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
201: {
|
||||
headersMapper: Mappers.ContainerRestoreHeaders
|
||||
headersMapper: Mappers.ContainerRestoreHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerRestoreExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ContainerRestoreExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.restype2,
|
||||
Parameters.comp8
|
||||
Parameters.comp8,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
|
|
@ -434,27 +371,27 @@ const restoreOperationSpec = {
|
|||
Parameters.requestId,
|
||||
Parameters.accept1,
|
||||
Parameters.deletedContainerName,
|
||||
Parameters.deletedContainerVersion
|
||||
Parameters.deletedContainerVersion,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const renameOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
200: {
|
||||
headersMapper: Mappers.ContainerRenameHeaders
|
||||
headersMapper: Mappers.ContainerRenameHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerRenameExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ContainerRenameExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.restype2,
|
||||
Parameters.comp9
|
||||
Parameters.comp9,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
|
|
@ -462,10 +399,10 @@ const renameOperationSpec = {
|
|||
Parameters.requestId,
|
||||
Parameters.accept1,
|
||||
Parameters.sourceContainerName,
|
||||
Parameters.sourceLeaseId
|
||||
Parameters.sourceLeaseId,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const submitBatchOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
|
|
@ -474,34 +411,33 @@ const submitBatchOperationSpec = {
|
|||
202: {
|
||||
bodyMapper: {
|
||||
type: { name: "Stream" },
|
||||
serializedName: "parsedResponse"
|
||||
serializedName: "parsedResponse",
|
||||
},
|
||||
headersMapper: Mappers.ContainerSubmitBatchHeaders
|
||||
headersMapper: Mappers.ContainerSubmitBatchHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerSubmitBatchExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ContainerSubmitBatchExceptionHeaders,
|
||||
},
|
||||
},
|
||||
requestBody: Parameters.body,
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.comp4,
|
||||
Parameters.restype2
|
||||
Parameters.restype2,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
Parameters.contentType,
|
||||
Parameters.accept,
|
||||
Parameters.version,
|
||||
Parameters.requestId,
|
||||
Parameters.contentLength,
|
||||
Parameters.multipartContentType
|
||||
Parameters.multipartContentType,
|
||||
],
|
||||
isXML: true,
|
||||
contentType: "application/xml; charset=utf-8",
|
||||
mediaType: "xml",
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const filterBlobsOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
|
|
@ -509,12 +445,12 @@ const filterBlobsOperationSpec = {
|
|||
responses: {
|
||||
200: {
|
||||
bodyMapper: Mappers.FilterBlobSegment,
|
||||
headersMapper: Mappers.ContainerFilterBlobsHeaders
|
||||
headersMapper: Mappers.ContainerFilterBlobsHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerFilterBlobsExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ContainerFilterBlobsExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
|
|
@ -522,33 +458,33 @@ const filterBlobsOperationSpec = {
|
|||
Parameters.maxPageSize,
|
||||
Parameters.comp5,
|
||||
Parameters.where,
|
||||
Parameters.restype2
|
||||
Parameters.restype2,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
Parameters.version,
|
||||
Parameters.requestId,
|
||||
Parameters.accept1
|
||||
Parameters.accept1,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const acquireLeaseOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
201: {
|
||||
headersMapper: Mappers.ContainerAcquireLeaseHeaders
|
||||
headersMapper: Mappers.ContainerAcquireLeaseHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerAcquireLeaseExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ContainerAcquireLeaseExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.restype2,
|
||||
Parameters.comp10
|
||||
Parameters.comp10,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
|
|
@ -559,27 +495,27 @@ const acquireLeaseOperationSpec = {
|
|||
Parameters.ifUnmodifiedSince,
|
||||
Parameters.action,
|
||||
Parameters.duration,
|
||||
Parameters.proposedLeaseId
|
||||
Parameters.proposedLeaseId,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const releaseLeaseOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
200: {
|
||||
headersMapper: Mappers.ContainerReleaseLeaseHeaders
|
||||
headersMapper: Mappers.ContainerReleaseLeaseHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerReleaseLeaseExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ContainerReleaseLeaseExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.restype2,
|
||||
Parameters.comp10
|
||||
Parameters.comp10,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
|
|
@ -589,27 +525,27 @@ const releaseLeaseOperationSpec = {
|
|||
Parameters.ifModifiedSince,
|
||||
Parameters.ifUnmodifiedSince,
|
||||
Parameters.action1,
|
||||
Parameters.leaseId1
|
||||
Parameters.leaseId1,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const renewLeaseOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
200: {
|
||||
headersMapper: Mappers.ContainerRenewLeaseHeaders
|
||||
headersMapper: Mappers.ContainerRenewLeaseHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerRenewLeaseExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ContainerRenewLeaseExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.restype2,
|
||||
Parameters.comp10
|
||||
Parameters.comp10,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
|
|
@ -619,27 +555,27 @@ const renewLeaseOperationSpec = {
|
|||
Parameters.ifModifiedSince,
|
||||
Parameters.ifUnmodifiedSince,
|
||||
Parameters.leaseId1,
|
||||
Parameters.action2
|
||||
Parameters.action2,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const breakLeaseOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
202: {
|
||||
headersMapper: Mappers.ContainerBreakLeaseHeaders
|
||||
headersMapper: Mappers.ContainerBreakLeaseHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerBreakLeaseExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ContainerBreakLeaseExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.restype2,
|
||||
Parameters.comp10
|
||||
Parameters.comp10,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
|
|
@ -649,27 +585,27 @@ const breakLeaseOperationSpec = {
|
|||
Parameters.ifModifiedSince,
|
||||
Parameters.ifUnmodifiedSince,
|
||||
Parameters.action3,
|
||||
Parameters.breakPeriod
|
||||
Parameters.breakPeriod,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const changeLeaseOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
200: {
|
||||
headersMapper: Mappers.ContainerChangeLeaseHeaders
|
||||
headersMapper: Mappers.ContainerChangeLeaseHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerChangeLeaseExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ContainerChangeLeaseExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.restype2,
|
||||
Parameters.comp10
|
||||
Parameters.comp10,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
|
|
@ -680,10 +616,10 @@ const changeLeaseOperationSpec = {
|
|||
Parameters.ifUnmodifiedSince,
|
||||
Parameters.leaseId1,
|
||||
Parameters.action4,
|
||||
Parameters.proposedLeaseId1
|
||||
Parameters.proposedLeaseId1,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const listBlobFlatSegmentOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
|
|
@ -691,43 +627,12 @@ const listBlobFlatSegmentOperationSpec = {
|
|||
responses: {
|
||||
200: {
|
||||
bodyMapper: Mappers.ListBlobsFlatSegmentResponse,
|
||||
headersMapper: Mappers.ContainerListBlobFlatSegmentHeaders
|
||||
headersMapper: Mappers.ContainerListBlobFlatSegmentHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerListBlobFlatSegmentExceptionHeaders
|
||||
}
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.comp2,
|
||||
Parameters.prefix,
|
||||
Parameters.marker,
|
||||
Parameters.maxPageSize,
|
||||
Parameters.restype2,
|
||||
Parameters.include1
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
Parameters.version,
|
||||
Parameters.requestId,
|
||||
Parameters.accept1
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
};
|
||||
const listBlobHierarchySegmentOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
httpMethod: "GET",
|
||||
responses: {
|
||||
200: {
|
||||
bodyMapper: Mappers.ListBlobsHierarchySegmentResponse,
|
||||
headersMapper: Mappers.ContainerListBlobHierarchySegmentHeaders
|
||||
headersMapper: Mappers.ContainerListBlobFlatSegmentExceptionHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerListBlobHierarchySegmentExceptionHeaders
|
||||
}
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
|
|
@ -737,33 +642,72 @@ const listBlobHierarchySegmentOperationSpec = {
|
|||
Parameters.maxPageSize,
|
||||
Parameters.restype2,
|
||||
Parameters.include1,
|
||||
Parameters.delimiter
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
Parameters.version,
|
||||
Parameters.requestId,
|
||||
Parameters.accept1
|
||||
Parameters.accept1,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const listBlobHierarchySegmentOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
httpMethod: "GET",
|
||||
responses: {
|
||||
200: {
|
||||
bodyMapper: Mappers.ListBlobsHierarchySegmentResponse,
|
||||
headersMapper: Mappers.ContainerListBlobHierarchySegmentHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerListBlobHierarchySegmentExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.comp2,
|
||||
Parameters.prefix,
|
||||
Parameters.marker,
|
||||
Parameters.maxPageSize,
|
||||
Parameters.restype2,
|
||||
Parameters.include1,
|
||||
Parameters.delimiter,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
Parameters.version,
|
||||
Parameters.requestId,
|
||||
Parameters.accept1,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const getAccountInfoOperationSpec = {
|
||||
path: "/{containerName}",
|
||||
httpMethod: "GET",
|
||||
responses: {
|
||||
200: {
|
||||
headersMapper: Mappers.ContainerGetAccountInfoHeaders
|
||||
headersMapper: Mappers.ContainerGetAccountInfoHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ContainerGetAccountInfoExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ContainerGetAccountInfoExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [Parameters.comp, Parameters.restype1],
|
||||
queryParameters: [
|
||||
Parameters.comp,
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.restype1,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [Parameters.version, Parameters.accept1],
|
||||
headerParameters: [
|
||||
Parameters.version,
|
||||
Parameters.requestId,
|
||||
Parameters.accept1,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
//# sourceMappingURL=container.js.map
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -5,11 +5,11 @@
|
|||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
import * as coreHttp from "@azure/core-http";
|
||||
import * as coreClient from "@azure/core-client";
|
||||
import * as Mappers from "../models/mappers";
|
||||
import * as Parameters from "../models/parameters";
|
||||
/** Class representing a PageBlob. */
|
||||
export class PageBlob {
|
||||
/** Class containing PageBlob operations. */
|
||||
export class PageBlobImpl {
|
||||
/**
|
||||
* Initialize a new instance of the class PageBlob class.
|
||||
* @param client Reference to the service client
|
||||
|
|
@ -25,12 +25,7 @@ export class PageBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
create(contentLength, blobContentLength, options) {
|
||||
const operationArguments = {
|
||||
contentLength,
|
||||
blobContentLength,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, createOperationSpec);
|
||||
return this.client.sendOperationRequest({ contentLength, blobContentLength, options }, createOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Upload Pages operation writes a range of pages to a page blob
|
||||
|
|
@ -39,12 +34,7 @@ export class PageBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
uploadPages(contentLength, body, options) {
|
||||
const operationArguments = {
|
||||
contentLength,
|
||||
body,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec);
|
||||
return this.client.sendOperationRequest({ contentLength, body, options }, uploadPagesOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Clear Pages operation clears a set of pages from a page blob
|
||||
|
|
@ -52,11 +42,7 @@ export class PageBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
clearPages(contentLength, options) {
|
||||
const operationArguments = {
|
||||
contentLength,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec);
|
||||
return this.client.sendOperationRequest({ contentLength, options }, clearPagesOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Upload Pages operation writes a range of pages to a page blob where the contents are read from a
|
||||
|
|
@ -70,14 +56,7 @@ export class PageBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) {
|
||||
const operationArguments = {
|
||||
sourceUrl,
|
||||
sourceRange,
|
||||
contentLength,
|
||||
range,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec);
|
||||
return this.client.sendOperationRequest({ sourceUrl, sourceRange, contentLength, range, options }, uploadPagesFromURLOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a
|
||||
|
|
@ -85,10 +64,7 @@ export class PageBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
getPageRanges(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, getPageRangesOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were
|
||||
|
|
@ -96,10 +72,7 @@ export class PageBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
getPageRangesDiff(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, getPageRangesDiffOperationSpec);
|
||||
}
|
||||
/**
|
||||
* Resize the Blob
|
||||
|
|
@ -108,11 +81,7 @@ export class PageBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
resize(blobContentLength, options) {
|
||||
const operationArguments = {
|
||||
blobContentLength,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, resizeOperationSpec);
|
||||
return this.client.sendOperationRequest({ blobContentLength, options }, resizeOperationSpec);
|
||||
}
|
||||
/**
|
||||
* Update the sequence number of the blob
|
||||
|
|
@ -122,11 +91,7 @@ export class PageBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
updateSequenceNumber(sequenceNumberAction, options) {
|
||||
const operationArguments = {
|
||||
sequenceNumberAction,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec);
|
||||
return this.client.sendOperationRequest({ sequenceNumberAction, options }, updateSequenceNumberOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob.
|
||||
|
|
@ -141,27 +106,22 @@ export class PageBlob {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
copyIncremental(copySource, options) {
|
||||
const operationArguments = {
|
||||
copySource,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec);
|
||||
return this.client.sendOperationRequest({ copySource, options }, copyIncrementalOperationSpec);
|
||||
}
|
||||
}
|
||||
// Operation Specifications
|
||||
const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);
|
||||
const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);
|
||||
const xmlSerializer = coreClient.createSerializer(Mappers, /* isXml */ true);
|
||||
const createOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
201: {
|
||||
headersMapper: Mappers.PageBlobCreateHeaders
|
||||
headersMapper: Mappers.PageBlobCreateHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.PageBlobCreateExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.PageBlobCreateExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [Parameters.timeoutInSeconds],
|
||||
urlParameters: [Parameters.url],
|
||||
|
|
@ -194,22 +154,22 @@ const createOperationSpec = {
|
|||
Parameters.legalHold1,
|
||||
Parameters.blobType,
|
||||
Parameters.blobContentLength,
|
||||
Parameters.blobSequenceNumber
|
||||
Parameters.blobSequenceNumber,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const uploadPagesOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
201: {
|
||||
headersMapper: Mappers.PageBlobUploadPagesHeaders
|
||||
headersMapper: Mappers.PageBlobUploadPagesHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders,
|
||||
},
|
||||
},
|
||||
requestBody: Parameters.body1,
|
||||
queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],
|
||||
|
|
@ -236,22 +196,24 @@ const uploadPagesOperationSpec = {
|
|||
Parameters.pageWrite,
|
||||
Parameters.ifSequenceNumberLessThanOrEqualTo,
|
||||
Parameters.ifSequenceNumberLessThan,
|
||||
Parameters.ifSequenceNumberEqualTo
|
||||
Parameters.ifSequenceNumberEqualTo,
|
||||
],
|
||||
isXML: true,
|
||||
contentType: "application/xml; charset=utf-8",
|
||||
mediaType: "binary",
|
||||
serializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const clearPagesOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
201: {
|
||||
headersMapper: Mappers.PageBlobClearPagesHeaders
|
||||
headersMapper: Mappers.PageBlobClearPagesHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.PageBlobClearPagesExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.PageBlobClearPagesExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],
|
||||
urlParameters: [Parameters.url],
|
||||
|
|
@ -274,22 +236,22 @@ const clearPagesOperationSpec = {
|
|||
Parameters.ifSequenceNumberLessThanOrEqualTo,
|
||||
Parameters.ifSequenceNumberLessThan,
|
||||
Parameters.ifSequenceNumberEqualTo,
|
||||
Parameters.pageWrite1
|
||||
Parameters.pageWrite1,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const uploadPagesFromURLOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
201: {
|
||||
headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders
|
||||
headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],
|
||||
urlParameters: [Parameters.url],
|
||||
|
|
@ -321,10 +283,10 @@ const uploadPagesFromURLOperationSpec = {
|
|||
Parameters.sourceUrl,
|
||||
Parameters.sourceRange,
|
||||
Parameters.sourceContentCrc64,
|
||||
Parameters.range1
|
||||
Parameters.range1,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const getPageRangesOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
|
|
@ -332,48 +294,12 @@ const getPageRangesOperationSpec = {
|
|||
responses: {
|
||||
200: {
|
||||
bodyMapper: Mappers.PageList,
|
||||
headersMapper: Mappers.PageBlobGetPageRangesHeaders
|
||||
headersMapper: Mappers.PageBlobGetPageRangesHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders
|
||||
}
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.marker,
|
||||
Parameters.maxPageSize,
|
||||
Parameters.snapshot,
|
||||
Parameters.comp20
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
Parameters.version,
|
||||
Parameters.requestId,
|
||||
Parameters.accept1,
|
||||
Parameters.leaseId,
|
||||
Parameters.ifModifiedSince,
|
||||
Parameters.ifUnmodifiedSince,
|
||||
Parameters.range,
|
||||
Parameters.ifMatch,
|
||||
Parameters.ifNoneMatch,
|
||||
Parameters.ifTags
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
};
|
||||
const getPageRangesDiffOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "GET",
|
||||
responses: {
|
||||
200: {
|
||||
bodyMapper: Mappers.PageList,
|
||||
headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders
|
||||
headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders
|
||||
}
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
|
|
@ -381,7 +307,6 @@ const getPageRangesDiffOperationSpec = {
|
|||
Parameters.maxPageSize,
|
||||
Parameters.snapshot,
|
||||
Parameters.comp20,
|
||||
Parameters.prevsnapshot
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
|
|
@ -395,22 +320,59 @@ const getPageRangesDiffOperationSpec = {
|
|||
Parameters.ifMatch,
|
||||
Parameters.ifNoneMatch,
|
||||
Parameters.ifTags,
|
||||
Parameters.prevSnapshotUrl
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const getPageRangesDiffOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "GET",
|
||||
responses: {
|
||||
200: {
|
||||
bodyMapper: Mappers.PageList,
|
||||
headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.marker,
|
||||
Parameters.maxPageSize,
|
||||
Parameters.snapshot,
|
||||
Parameters.comp20,
|
||||
Parameters.prevsnapshot,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
Parameters.version,
|
||||
Parameters.requestId,
|
||||
Parameters.accept1,
|
||||
Parameters.leaseId,
|
||||
Parameters.ifModifiedSince,
|
||||
Parameters.ifUnmodifiedSince,
|
||||
Parameters.range,
|
||||
Parameters.ifMatch,
|
||||
Parameters.ifNoneMatch,
|
||||
Parameters.ifTags,
|
||||
Parameters.prevSnapshotUrl,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const resizeOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
200: {
|
||||
headersMapper: Mappers.PageBlobResizeHeaders
|
||||
headersMapper: Mappers.PageBlobResizeHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.PageBlobResizeExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.PageBlobResizeExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],
|
||||
urlParameters: [Parameters.url],
|
||||
|
|
@ -428,22 +390,22 @@ const resizeOperationSpec = {
|
|||
Parameters.ifNoneMatch,
|
||||
Parameters.ifTags,
|
||||
Parameters.encryptionScope,
|
||||
Parameters.blobContentLength
|
||||
Parameters.blobContentLength,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const updateSequenceNumberOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
200: {
|
||||
headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders
|
||||
headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],
|
||||
urlParameters: [Parameters.url],
|
||||
|
|
@ -458,22 +420,22 @@ const updateSequenceNumberOperationSpec = {
|
|||
Parameters.ifNoneMatch,
|
||||
Parameters.ifTags,
|
||||
Parameters.blobSequenceNumber,
|
||||
Parameters.sequenceNumberAction
|
||||
Parameters.sequenceNumberAction,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const copyIncrementalOperationSpec = {
|
||||
path: "/{containerName}/{blob}",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
202: {
|
||||
headersMapper: Mappers.PageBlobCopyIncrementalHeaders
|
||||
headersMapper: Mappers.PageBlobCopyIncrementalHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [Parameters.timeoutInSeconds, Parameters.comp21],
|
||||
urlParameters: [Parameters.url],
|
||||
|
|
@ -486,9 +448,9 @@ const copyIncrementalOperationSpec = {
|
|||
Parameters.ifMatch,
|
||||
Parameters.ifNoneMatch,
|
||||
Parameters.ifTags,
|
||||
Parameters.copySource
|
||||
Parameters.copySource,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
//# sourceMappingURL=pageBlob.js.map
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -5,11 +5,11 @@
|
|||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
import * as coreHttp from "@azure/core-http";
|
||||
import * as coreClient from "@azure/core-client";
|
||||
import * as Mappers from "../models/mappers";
|
||||
import * as Parameters from "../models/parameters";
|
||||
/** Class representing a Service. */
|
||||
export class Service {
|
||||
/** Class containing Service operations. */
|
||||
export class ServiceImpl {
|
||||
/**
|
||||
* Initialize a new instance of the class Service class.
|
||||
* @param client Reference to the service client
|
||||
|
|
@ -24,11 +24,7 @@ export class Service {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
setProperties(blobServiceProperties, options) {
|
||||
const operationArguments = {
|
||||
blobServiceProperties,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec);
|
||||
return this.client.sendOperationRequest({ blobServiceProperties, options }, setPropertiesOperationSpec);
|
||||
}
|
||||
/**
|
||||
* gets the properties of a storage account's Blob service, including properties for Storage Analytics
|
||||
|
|
@ -36,10 +32,7 @@ export class Service {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
getProperties(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec);
|
||||
}
|
||||
/**
|
||||
* Retrieves statistics related to replication for the Blob service. It is only available on the
|
||||
|
|
@ -48,20 +41,14 @@ export class Service {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
getStatistics(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, getStatisticsOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The List Containers Segment operation returns a list of the containers under the specified account
|
||||
* @param options The options parameters.
|
||||
*/
|
||||
listContainersSegment(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, listContainersSegmentOperationSpec);
|
||||
}
|
||||
/**
|
||||
* Retrieves a user delegation key for the Blob service. This is only a valid operation when using
|
||||
|
|
@ -70,21 +57,14 @@ export class Service {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
getUserDelegationKey(keyInfo, options) {
|
||||
const operationArguments = {
|
||||
keyInfo,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec);
|
||||
return this.client.sendOperationRequest({ keyInfo, options }, getUserDelegationKeyOperationSpec);
|
||||
}
|
||||
/**
|
||||
* Returns the sku name and account kind
|
||||
* @param options The options parameters.
|
||||
*/
|
||||
getAccountInfo(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Batch operation allows multiple API calls to be embedded into a single HTTP request.
|
||||
|
|
@ -95,13 +75,7 @@ export class Service {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
submitBatch(contentLength, multipartContentType, body, options) {
|
||||
const operationArguments = {
|
||||
contentLength,
|
||||
multipartContentType,
|
||||
body,
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec);
|
||||
return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec);
|
||||
}
|
||||
/**
|
||||
* The Filter Blobs operation enables callers to list blobs across all containers whose tags match a
|
||||
|
|
@ -110,43 +84,40 @@ export class Service {
|
|||
* @param options The options parameters.
|
||||
*/
|
||||
filterBlobs(options) {
|
||||
const operationArguments = {
|
||||
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
|
||||
};
|
||||
return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec);
|
||||
return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec);
|
||||
}
|
||||
}
|
||||
// Operation Specifications
|
||||
const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);
|
||||
const xmlSerializer = coreClient.createSerializer(Mappers, /* isXml */ true);
|
||||
const setPropertiesOperationSpec = {
|
||||
path: "/",
|
||||
httpMethod: "PUT",
|
||||
responses: {
|
||||
202: {
|
||||
headersMapper: Mappers.ServiceSetPropertiesHeaders
|
||||
headersMapper: Mappers.ServiceSetPropertiesHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ServiceSetPropertiesExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ServiceSetPropertiesExceptionHeaders,
|
||||
},
|
||||
},
|
||||
requestBody: Parameters.blobServiceProperties,
|
||||
queryParameters: [
|
||||
Parameters.restype,
|
||||
Parameters.comp,
|
||||
Parameters.timeoutInSeconds
|
||||
Parameters.timeoutInSeconds,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
Parameters.contentType,
|
||||
Parameters.accept,
|
||||
Parameters.version,
|
||||
Parameters.requestId
|
||||
Parameters.requestId,
|
||||
],
|
||||
isXML: true,
|
||||
contentType: "application/xml; charset=utf-8",
|
||||
mediaType: "xml",
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const getPropertiesOperationSpec = {
|
||||
path: "/",
|
||||
|
|
@ -154,26 +125,26 @@ const getPropertiesOperationSpec = {
|
|||
responses: {
|
||||
200: {
|
||||
bodyMapper: Mappers.BlobServiceProperties,
|
||||
headersMapper: Mappers.ServiceGetPropertiesHeaders
|
||||
headersMapper: Mappers.ServiceGetPropertiesHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ServiceGetPropertiesExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ServiceGetPropertiesExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.restype,
|
||||
Parameters.comp,
|
||||
Parameters.timeoutInSeconds
|
||||
Parameters.timeoutInSeconds,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
Parameters.version,
|
||||
Parameters.requestId,
|
||||
Parameters.accept1
|
||||
Parameters.accept1,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const getStatisticsOperationSpec = {
|
||||
path: "/",
|
||||
|
|
@ -181,26 +152,26 @@ const getStatisticsOperationSpec = {
|
|||
responses: {
|
||||
200: {
|
||||
bodyMapper: Mappers.BlobServiceStatistics,
|
||||
headersMapper: Mappers.ServiceGetStatisticsHeaders
|
||||
headersMapper: Mappers.ServiceGetStatisticsHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ServiceGetStatisticsExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ServiceGetStatisticsExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.restype,
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.comp1
|
||||
Parameters.comp1,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
Parameters.version,
|
||||
Parameters.requestId,
|
||||
Parameters.accept1
|
||||
Parameters.accept1,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const listContainersSegmentOperationSpec = {
|
||||
path: "/",
|
||||
|
|
@ -208,12 +179,12 @@ const listContainersSegmentOperationSpec = {
|
|||
responses: {
|
||||
200: {
|
||||
bodyMapper: Mappers.ListContainersSegmentResponse,
|
||||
headersMapper: Mappers.ServiceListContainersSegmentHeaders
|
||||
headersMapper: Mappers.ServiceListContainersSegmentHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ServiceListContainersSegmentExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ServiceListContainersSegmentExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
|
|
@ -221,16 +192,16 @@ const listContainersSegmentOperationSpec = {
|
|||
Parameters.prefix,
|
||||
Parameters.marker,
|
||||
Parameters.maxPageSize,
|
||||
Parameters.include
|
||||
Parameters.include,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
Parameters.version,
|
||||
Parameters.requestId,
|
||||
Parameters.accept1
|
||||
Parameters.accept1,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const getUserDelegationKeyOperationSpec = {
|
||||
path: "/",
|
||||
|
|
@ -238,48 +209,56 @@ const getUserDelegationKeyOperationSpec = {
|
|||
responses: {
|
||||
200: {
|
||||
bodyMapper: Mappers.UserDelegationKey,
|
||||
headersMapper: Mappers.ServiceGetUserDelegationKeyHeaders
|
||||
headersMapper: Mappers.ServiceGetUserDelegationKeyHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ServiceGetUserDelegationKeyExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ServiceGetUserDelegationKeyExceptionHeaders,
|
||||
},
|
||||
},
|
||||
requestBody: Parameters.keyInfo,
|
||||
queryParameters: [
|
||||
Parameters.restype,
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.comp3
|
||||
Parameters.comp3,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
Parameters.contentType,
|
||||
Parameters.accept,
|
||||
Parameters.version,
|
||||
Parameters.requestId
|
||||
Parameters.requestId,
|
||||
],
|
||||
isXML: true,
|
||||
contentType: "application/xml; charset=utf-8",
|
||||
mediaType: "xml",
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const getAccountInfoOperationSpec = {
|
||||
path: "/",
|
||||
httpMethod: "GET",
|
||||
responses: {
|
||||
200: {
|
||||
headersMapper: Mappers.ServiceGetAccountInfoHeaders
|
||||
headersMapper: Mappers.ServiceGetAccountInfoHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ServiceGetAccountInfoExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ServiceGetAccountInfoExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [Parameters.comp, Parameters.restype1],
|
||||
queryParameters: [
|
||||
Parameters.comp,
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.restype1,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [Parameters.version, Parameters.accept1],
|
||||
headerParameters: [
|
||||
Parameters.version,
|
||||
Parameters.requestId,
|
||||
Parameters.accept1,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const submitBatchOperationSpec = {
|
||||
path: "/",
|
||||
|
|
@ -288,30 +267,29 @@ const submitBatchOperationSpec = {
|
|||
202: {
|
||||
bodyMapper: {
|
||||
type: { name: "Stream" },
|
||||
serializedName: "parsedResponse"
|
||||
serializedName: "parsedResponse",
|
||||
},
|
||||
headersMapper: Mappers.ServiceSubmitBatchHeaders
|
||||
headersMapper: Mappers.ServiceSubmitBatchHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ServiceSubmitBatchExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ServiceSubmitBatchExceptionHeaders,
|
||||
},
|
||||
},
|
||||
requestBody: Parameters.body,
|
||||
queryParameters: [Parameters.timeoutInSeconds, Parameters.comp4],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
Parameters.contentType,
|
||||
Parameters.accept,
|
||||
Parameters.version,
|
||||
Parameters.requestId,
|
||||
Parameters.contentLength,
|
||||
Parameters.multipartContentType
|
||||
Parameters.multipartContentType,
|
||||
],
|
||||
isXML: true,
|
||||
contentType: "application/xml; charset=utf-8",
|
||||
mediaType: "xml",
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
const filterBlobsOperationSpec = {
|
||||
path: "/",
|
||||
|
|
@ -319,27 +297,27 @@ const filterBlobsOperationSpec = {
|
|||
responses: {
|
||||
200: {
|
||||
bodyMapper: Mappers.FilterBlobSegment,
|
||||
headersMapper: Mappers.ServiceFilterBlobsHeaders
|
||||
headersMapper: Mappers.ServiceFilterBlobsHeaders,
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.StorageError,
|
||||
headersMapper: Mappers.ServiceFilterBlobsExceptionHeaders
|
||||
}
|
||||
headersMapper: Mappers.ServiceFilterBlobsExceptionHeaders,
|
||||
},
|
||||
},
|
||||
queryParameters: [
|
||||
Parameters.timeoutInSeconds,
|
||||
Parameters.marker,
|
||||
Parameters.maxPageSize,
|
||||
Parameters.comp5,
|
||||
Parameters.where
|
||||
Parameters.where,
|
||||
],
|
||||
urlParameters: [Parameters.url],
|
||||
headerParameters: [
|
||||
Parameters.version,
|
||||
Parameters.requestId,
|
||||
Parameters.accept1
|
||||
Parameters.accept1,
|
||||
],
|
||||
isXML: true,
|
||||
serializer: xmlSerializer
|
||||
serializer: xmlSerializer,
|
||||
};
|
||||
//# sourceMappingURL=service.js.map
|
||||
File diff suppressed because one or more lines are too long
9
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/appendBlob.js
generated
vendored
Normal file
9
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/appendBlob.js
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
/*
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
* Licensed under the MIT License.
|
||||
*
|
||||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
export {};
|
||||
//# sourceMappingURL=appendBlob.js.map
|
||||
1
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/appendBlob.js.map
generated
vendored
Normal file
1
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/appendBlob.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"appendBlob.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operationsInterfaces/appendBlob.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreRestPipeline from \"@azure/core-rest-pipeline\";\nimport {\n AppendBlobCreateOptionalParams,\n AppendBlobCreateResponse,\n AppendBlobAppendBlockOptionalParams,\n AppendBlobAppendBlockResponse,\n AppendBlobAppendBlockFromUrlOptionalParams,\n AppendBlobAppendBlockFromUrlResponse,\n AppendBlobSealOptionalParams,\n AppendBlobSealResponse,\n} from \"../models\";\n\n/** Interface representing a AppendBlob. */\nexport interface AppendBlob {\n /**\n * The Create Append Blob operation creates a new append blob.\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n create(\n contentLength: number,\n options?: AppendBlobCreateOptionalParams,\n ): Promise<AppendBlobCreateResponse>;\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob. The\n * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to\n * AppendBlob. Append Block is supported only on version 2015-02-21 version or later.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n appendBlock(\n contentLength: number,\n body: coreRestPipeline.RequestBodyType,\n options?: AppendBlobAppendBlockOptionalParams,\n ): Promise<AppendBlobAppendBlockResponse>;\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob where\n * the contents are read from a source url. The Append Block operation is permitted only if the blob\n * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version\n * 2015-02-21 version or later.\n * @param sourceUrl Specify a URL to the copy source.\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n appendBlockFromUrl(\n sourceUrl: string,\n contentLength: number,\n options?: AppendBlobAppendBlockFromUrlOptionalParams,\n ): Promise<AppendBlobAppendBlockFromUrlResponse>;\n /**\n * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version\n * 2019-12-12 version or later.\n * @param options The options parameters.\n */\n seal(options?: AppendBlobSealOptionalParams): Promise<AppendBlobSealResponse>;\n}\n"]}
|
||||
9
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/blob.js
generated
vendored
Normal file
9
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/blob.js
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
/*
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
* Licensed under the MIT License.
|
||||
*
|
||||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
export {};
|
||||
//# sourceMappingURL=blob.js.map
|
||||
1
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/blob.js.map
generated
vendored
Normal file
1
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/blob.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
9
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/blockBlob.js
generated
vendored
Normal file
9
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/blockBlob.js
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
/*
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
* Licensed under the MIT License.
|
||||
*
|
||||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
export {};
|
||||
//# sourceMappingURL=blockBlob.js.map
|
||||
1
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/blockBlob.js.map
generated
vendored
Normal file
1
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/blockBlob.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
9
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/container.js
generated
vendored
Normal file
9
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/container.js
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
/*
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
* Licensed under the MIT License.
|
||||
*
|
||||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
export {};
|
||||
//# sourceMappingURL=container.js.map
|
||||
1
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/container.js.map
generated
vendored
Normal file
1
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/container.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
14
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/index.js
generated
vendored
Normal file
14
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
/*
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
* Licensed under the MIT License.
|
||||
*
|
||||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
export * from "./service";
|
||||
export * from "./container";
|
||||
export * from "./blob";
|
||||
export * from "./pageBlob";
|
||||
export * from "./appendBlob";
|
||||
export * from "./blockBlob";
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/index.js.map
generated
vendored
Normal file
1
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/index.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operationsInterfaces/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,cAAc,WAAW,CAAC;AAC1B,cAAc,aAAa,CAAC;AAC5B,cAAc,QAAQ,CAAC;AACvB,cAAc,YAAY,CAAC;AAC3B,cAAc,cAAc,CAAC;AAC7B,cAAc,aAAa,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nexport * from \"./service\";\nexport * from \"./container\";\nexport * from \"./blob\";\nexport * from \"./pageBlob\";\nexport * from \"./appendBlob\";\nexport * from \"./blockBlob\";\n"]}
|
||||
9
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/pageBlob.js
generated
vendored
Normal file
9
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/pageBlob.js
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
/*
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
* Licensed under the MIT License.
|
||||
*
|
||||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
export {};
|
||||
//# sourceMappingURL=pageBlob.js.map
|
||||
1
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/pageBlob.js.map
generated
vendored
Normal file
1
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/pageBlob.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
9
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/service.js
generated
vendored
Normal file
9
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/service.js
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
/*
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
* Licensed under the MIT License.
|
||||
*
|
||||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
export {};
|
||||
//# sourceMappingURL=service.js.map
|
||||
1
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/service.js.map
generated
vendored
Normal file
1
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/service.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"service.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operationsInterfaces/service.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreRestPipeline from \"@azure/core-rest-pipeline\";\nimport {\n BlobServiceProperties,\n ServiceSetPropertiesOptionalParams,\n ServiceSetPropertiesResponse,\n ServiceGetPropertiesOptionalParams,\n ServiceGetPropertiesResponse,\n ServiceGetStatisticsOptionalParams,\n ServiceGetStatisticsResponse,\n ServiceListContainersSegmentOptionalParams,\n ServiceListContainersSegmentResponse,\n KeyInfo,\n ServiceGetUserDelegationKeyOptionalParams,\n ServiceGetUserDelegationKeyResponse,\n ServiceGetAccountInfoOptionalParams,\n ServiceGetAccountInfoResponse,\n ServiceSubmitBatchOptionalParams,\n ServiceSubmitBatchResponse,\n ServiceFilterBlobsOptionalParams,\n ServiceFilterBlobsResponse,\n} from \"../models\";\n\n/** Interface representing a Service. */\nexport interface Service {\n /**\n * Sets properties for a storage account's Blob service endpoint, including properties for Storage\n * Analytics and CORS (Cross-Origin Resource Sharing) rules\n * @param blobServiceProperties The StorageService properties.\n * @param options The options parameters.\n */\n setProperties(\n blobServiceProperties: BlobServiceProperties,\n options?: ServiceSetPropertiesOptionalParams,\n ): Promise<ServiceSetPropertiesResponse>;\n /**\n * gets the properties of a storage account's Blob service, including properties for Storage Analytics\n * and CORS (Cross-Origin Resource Sharing) rules.\n * @param options The options parameters.\n */\n getProperties(\n options?: ServiceGetPropertiesOptionalParams,\n ): Promise<ServiceGetPropertiesResponse>;\n /**\n * Retrieves statistics related to replication for the Blob service. It is only available on the\n * secondary location endpoint when read-access geo-redundant replication is enabled for the storage\n * account.\n * @param options The options parameters.\n */\n getStatistics(\n options?: ServiceGetStatisticsOptionalParams,\n ): Promise<ServiceGetStatisticsResponse>;\n /**\n * The List Containers Segment operation returns a list of the containers under the specified account\n * @param options The options parameters.\n */\n listContainersSegment(\n options?: ServiceListContainersSegmentOptionalParams,\n ): Promise<ServiceListContainersSegmentResponse>;\n /**\n * Retrieves a user delegation key for the Blob service. This is only a valid operation when using\n * bearer token authentication.\n * @param keyInfo Key information\n * @param options The options parameters.\n */\n getUserDelegationKey(\n keyInfo: KeyInfo,\n options?: ServiceGetUserDelegationKeyOptionalParams,\n ): Promise<ServiceGetUserDelegationKeyResponse>;\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: ServiceGetAccountInfoOptionalParams,\n ): Promise<ServiceGetAccountInfoResponse>;\n /**\n * The Batch operation allows multiple API calls to be embedded into a single HTTP request.\n * @param contentLength The length of the request.\n * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch\n * boundary. Example header value: multipart/mixed; boundary=batch_<GUID>\n * @param body Initial data\n * @param options The options parameters.\n */\n submitBatch(\n contentLength: number,\n multipartContentType: string,\n body: coreRestPipeline.RequestBodyType,\n options?: ServiceSubmitBatchOptionalParams,\n ): Promise<ServiceSubmitBatchResponse>;\n /**\n * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a\n * given search expression. Filter blobs searches across all containers within a storage account but\n * can be scoped within the expression to a single container.\n * @param options The options parameters.\n */\n filterBlobs(\n options?: ServiceFilterBlobsOptionalParams,\n ): Promise<ServiceFilterBlobsResponse>;\n}\n"]}
|
||||
|
|
@ -5,9 +5,9 @@
|
|||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
import { Service, Container, Blob, PageBlob, AppendBlob, BlockBlob } from "./operations";
|
||||
import { StorageClientContext } from "./storageClientContext";
|
||||
export class StorageClient extends StorageClientContext {
|
||||
import * as coreHttpCompat from "@azure/core-http-compat";
|
||||
import { ServiceImpl, ContainerImpl, BlobImpl, PageBlobImpl, AppendBlobImpl, BlockBlobImpl, } from "./operations";
|
||||
export class StorageClient extends coreHttpCompat.ExtendedServiceClient {
|
||||
/**
|
||||
* Initializes a new instance of the StorageClient class.
|
||||
* @param url The URL of the service account, container, or blob that is the target of the desired
|
||||
|
|
@ -15,13 +15,35 @@ export class StorageClient extends StorageClientContext {
|
|||
* @param options The parameter options
|
||||
*/
|
||||
constructor(url, options) {
|
||||
super(url, options);
|
||||
this.service = new Service(this);
|
||||
this.container = new Container(this);
|
||||
this.blob = new Blob(this);
|
||||
this.pageBlob = new PageBlob(this);
|
||||
this.appendBlob = new AppendBlob(this);
|
||||
this.blockBlob = new BlockBlob(this);
|
||||
var _a, _b;
|
||||
if (url === undefined) {
|
||||
throw new Error("'url' cannot be null");
|
||||
}
|
||||
// Initializing default values for options
|
||||
if (!options) {
|
||||
options = {};
|
||||
}
|
||||
const defaults = {
|
||||
requestContentType: "application/json; charset=utf-8",
|
||||
};
|
||||
const packageDetails = `azsdk-js-azure-storage-blob/12.25.0`;
|
||||
const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix
|
||||
? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}`
|
||||
: `${packageDetails}`;
|
||||
const optionsWithDefaults = Object.assign(Object.assign(Object.assign({}, defaults), options), { userAgentOptions: {
|
||||
userAgentPrefix,
|
||||
}, endpoint: (_b = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri) !== null && _b !== void 0 ? _b : "{url}" });
|
||||
super(optionsWithDefaults);
|
||||
// Parameter assignments
|
||||
this.url = url;
|
||||
// Assigning values to Constant parameters
|
||||
this.version = options.version || "2024-11-04";
|
||||
this.service = new ServiceImpl(this);
|
||||
this.container = new ContainerImpl(this);
|
||||
this.blob = new BlobImpl(this);
|
||||
this.pageBlob = new PageBlobImpl(this);
|
||||
this.appendBlob = new AppendBlobImpl(this);
|
||||
this.blockBlob = new BlockBlobImpl(this);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=storageClient.js.map
|
||||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"storageClient.js","sourceRoot":"","sources":["../../../../../src/generated/src/storageClient.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EACL,OAAO,EACP,SAAS,EACT,IAAI,EACJ,QAAQ,EACR,UAAU,EACV,SAAS,EACV,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAG9D,MAAM,OAAO,aAAc,SAAQ,oBAAoB;IACrD;;;;;OAKG;IACH,YAAY,GAAW,EAAE,OAAqC;QAC5D,KAAK,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;QACpB,IAAI,CAAC,OAAO,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;QACjC,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,CAAC;QACrC,IAAI,CAAC,IAAI,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC;QAC3B,IAAI,CAAC,QAAQ,GAAG,IAAI,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnC,IAAI,CAAC,UAAU,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;QACvC,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,CAAC;IACvC,CAAC;CAQF","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport {\n Service,\n Container,\n Blob,\n PageBlob,\n AppendBlob,\n BlockBlob\n} from \"./operations\";\nimport { StorageClientContext } from \"./storageClientContext\";\nimport { StorageClientOptionalParams } from \"./models\";\n\nexport class StorageClient extends StorageClientContext {\n /**\n * Initializes a new instance of the StorageClient class.\n * @param url The URL of the service account, container, or blob that is the target of the desired\n * operation.\n * @param options The parameter options\n */\n constructor(url: string, options?: StorageClientOptionalParams) {\n super(url, options);\n this.service = new Service(this);\n this.container = new Container(this);\n this.blob = new Blob(this);\n this.pageBlob = new PageBlob(this);\n this.appendBlob = new AppendBlob(this);\n this.blockBlob = new BlockBlob(this);\n }\n\n service: Service;\n container: Container;\n blob: Blob;\n pageBlob: PageBlob;\n appendBlob: AppendBlob;\n blockBlob: BlockBlob;\n}\n"]}
|
||||
{"version":3,"file":"storageClient.js","sourceRoot":"","sources":["../../../../../src/generated/src/storageClient.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,cAAc,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EACL,WAAW,EACX,aAAa,EACb,QAAQ,EACR,YAAY,EACZ,cAAc,EACd,aAAa,GACd,MAAM,cAAc,CAAC;AAWtB,MAAM,OAAO,aAAc,SAAQ,cAAc,CAAC,qBAAqB;IAIrE;;;;;OAKG;IACH,YAAY,GAAW,EAAE,OAAqC;;QAC5D,IAAI,GAAG,KAAK,SAAS,EAAE,CAAC;YACtB,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;QAC1C,CAAC;QAED,0CAA0C;QAC1C,IAAI,CAAC,OAAO,EAAE,CAAC;YACb,OAAO,GAAG,EAAE,CAAC;QACf,CAAC;QACD,MAAM,QAAQ,GAAgC;YAC5C,kBAAkB,EAAE,iCAAiC;SACtD,CAAC;QAEF,MAAM,cAAc,GAAG,qCAAqC,CAAC;QAC7D,MAAM,eAAe,GACnB,OAAO,CAAC,gBAAgB,IAAI,OAAO,CAAC,gBAAgB,CAAC,eAAe;YAClE,CAAC,CAAC,GAAG,OAAO,CAAC,gBAAgB,CAAC,eAAe,IAAI,cAAc,EAAE;YACjE,CAAC,CAAC,GAAG,cAAc,EAAE,CAAC;QAE1B,MAAM,mBAAmB,iDACpB,QAAQ,GACR,OAAO,KACV,gBAAgB,EAAE;gBAChB,eAAe;aAChB,EACD,QAAQ,EAAE,MAAA,MAAA,OAAO,CAAC,QAAQ,mCAAI,OAAO,CAAC,OAAO,mCAAI,OAAO,GACzD,CAAC;QACF,KAAK,CAAC,mBAAmB,CAAC,CAAC;QAC3B,wBAAwB;QACxB,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;QAEf,0CAA0C;QAC1C,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,YAAY,CAAC;QAC/C,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;QACrC,IAAI,CAAC,SAAS,GAAG,IAAI,aAAa,CAAC,IAAI,CAAC,CAAC;QACzC,IAAI,CAAC,IAAI,GAAG,IAAI,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC/B,IAAI,CAAC,QAAQ,GAAG,IAAI,YAAY,CAAC,IAAI,CAAC,CAAC;QACvC,IAAI,CAAC,UAAU,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,CAAC;QAC3C,IAAI,CAAC,SAAS,GAAG,IAAI,aAAa,CAAC,IAAI,CAAC,CAAC;IAC3C,CAAC;CAQF","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttpCompat from \"@azure/core-http-compat\";\nimport {\n ServiceImpl,\n ContainerImpl,\n BlobImpl,\n PageBlobImpl,\n AppendBlobImpl,\n BlockBlobImpl,\n} from \"./operations\";\nimport {\n Service,\n Container,\n Blob,\n PageBlob,\n AppendBlob,\n BlockBlob,\n} from \"./operationsInterfaces\";\nimport { StorageClientOptionalParams } from \"./models\";\n\nexport class StorageClient extends coreHttpCompat.ExtendedServiceClient {\n url: string;\n version: string;\n\n /**\n * Initializes a new instance of the StorageClient class.\n * @param url The URL of the service account, container, or blob that is the target of the desired\n * operation.\n * @param options The parameter options\n */\n constructor(url: string, options?: StorageClientOptionalParams) {\n if (url === undefined) {\n throw new Error(\"'url' cannot be null\");\n }\n\n // Initializing default values for options\n if (!options) {\n options = {};\n }\n const defaults: StorageClientOptionalParams = {\n requestContentType: \"application/json; charset=utf-8\",\n };\n\n const packageDetails = `azsdk-js-azure-storage-blob/12.25.0`;\n const userAgentPrefix =\n options.userAgentOptions && options.userAgentOptions.userAgentPrefix\n ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}`\n : `${packageDetails}`;\n\n const optionsWithDefaults = {\n ...defaults,\n ...options,\n userAgentOptions: {\n userAgentPrefix,\n },\n endpoint: options.endpoint ?? options.baseUri ?? \"{url}\",\n };\n super(optionsWithDefaults);\n // Parameter assignments\n this.url = url;\n\n // Assigning values to Constant parameters\n this.version = options.version || \"2024-11-04\";\n this.service = new ServiceImpl(this);\n this.container = new ContainerImpl(this);\n this.blob = new BlobImpl(this);\n this.pageBlob = new PageBlobImpl(this);\n this.appendBlob = new AppendBlobImpl(this);\n this.blockBlob = new BlockBlobImpl(this);\n }\n\n service: Service;\n container: Container;\n blob: Blob;\n pageBlob: PageBlob;\n appendBlob: AppendBlob;\n blockBlob: BlockBlob;\n}\n"]}
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
/*
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
* Licensed under the MIT License.
|
||||
*
|
||||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
import * as coreHttp from "@azure/core-http";
|
||||
const packageName = "azure-storage-blob";
|
||||
const packageVersion = "12.14.0";
|
||||
export class StorageClientContext extends coreHttp.ServiceClient {
|
||||
/**
|
||||
* Initializes a new instance of the StorageClientContext class.
|
||||
* @param url The URL of the service account, container, or blob that is the target of the desired
|
||||
* operation.
|
||||
* @param options The parameter options
|
||||
*/
|
||||
constructor(url, options) {
|
||||
if (url === undefined) {
|
||||
throw new Error("'url' cannot be null");
|
||||
}
|
||||
// Initializing default values for options
|
||||
if (!options) {
|
||||
options = {};
|
||||
}
|
||||
if (!options.userAgent) {
|
||||
const defaultUserAgent = coreHttp.getDefaultUserAgentValue();
|
||||
options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`;
|
||||
}
|
||||
super(undefined, options);
|
||||
this.requestContentType = "application/json; charset=utf-8";
|
||||
this.baseUri = options.endpoint || "{url}";
|
||||
// Parameter assignments
|
||||
this.url = url;
|
||||
// Assigning values to Constant parameters
|
||||
this.version = options.version || "2022-11-02";
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=storageClientContext.js.map
|
||||
|
|
@ -1 +0,0 @@
|
|||
{"version":3,"file":"storageClientContext.js","sourceRoot":"","sources":["../../../../../src/generated/src/storageClientContext.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAG7C,MAAM,WAAW,GAAG,oBAAoB,CAAC;AACzC,MAAM,cAAc,GAAG,SAAS,CAAC;AAEjC,MAAM,OAAO,oBAAqB,SAAQ,QAAQ,CAAC,aAAa;IAI9D;;;;;OAKG;IACH,YAAY,GAAW,EAAE,OAAqC;QAC5D,IAAI,GAAG,KAAK,SAAS,EAAE;YACrB,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;SACzC;QAED,0CAA0C;QAC1C,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;YACtB,MAAM,gBAAgB,GAAG,QAAQ,CAAC,wBAAwB,EAAE,CAAC;YAC7D,OAAO,CAAC,SAAS,GAAG,GAAG,WAAW,IAAI,cAAc,IAAI,gBAAgB,EAAE,CAAC;SAC5E;QAED,KAAK,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;QAE1B,IAAI,CAAC,kBAAkB,GAAG,iCAAiC,CAAC;QAE5D,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,IAAI,OAAO,CAAC;QAE3C,wBAAwB;QACxB,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;QAEf,0CAA0C;QAC1C,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,YAAY,CAAC;IACjD,CAAC;CACF","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport { StorageClientOptionalParams } from \"./models\";\n\nconst packageName = \"azure-storage-blob\";\nconst packageVersion = \"12.14.0\";\n\nexport class StorageClientContext extends coreHttp.ServiceClient {\n url: string;\n version: string;\n\n /**\n * Initializes a new instance of the StorageClientContext class.\n * @param url The URL of the service account, container, or blob that is the target of the desired\n * operation.\n * @param options The parameter options\n */\n constructor(url: string, options?: StorageClientOptionalParams) {\n if (url === undefined) {\n throw new Error(\"'url' cannot be null\");\n }\n\n // Initializing default values for options\n if (!options) {\n options = {};\n }\n\n if (!options.userAgent) {\n const defaultUserAgent = coreHttp.getDefaultUserAgentValue();\n options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`;\n }\n\n super(undefined, options);\n\n this.requestContentType = \"application/json; charset=utf-8\";\n\n this.baseUri = options.endpoint || \"{url}\";\n\n // Parameter assignments\n this.url = url;\n\n // Assigning values to Constant parameters\n this.version = options.version || \"2022-11-02\";\n }\n}\n"]}
|
||||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js
generated
vendored
|
|
@ -1,5 +1,5 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Licensed under the MIT License.
|
||||
/** Known values of {@link EncryptionAlgorithmType} that the service accepts. */
|
||||
export var KnownEncryptionAlgorithmType;
|
||||
(function (KnownEncryptionAlgorithmType) {
|
||||
|
|
|
|||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js.map
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js.map
generated
vendored
File diff suppressed because one or more lines are too long
7
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js
generated
vendored
7
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
import { RestError } from "@azure/core-http";
|
||||
// Licensed under the MIT License.
|
||||
import { RestError } from "@azure/core-rest-pipeline";
|
||||
export * from "./BlobServiceClient";
|
||||
export * from "./Clients";
|
||||
export * from "./ContainerClient";
|
||||
|
|
@ -12,7 +12,8 @@ export * from "./StorageBrowserPolicyFactory";
|
|||
export * from "./credentials/AnonymousCredential";
|
||||
export * from "./credentials/Credential";
|
||||
export { BlockBlobTier, PremiumPageBlobTier, } from "./models";
|
||||
export * from "./Pipeline";
|
||||
export { Pipeline, isPipelineLike, newPipeline, StorageOAuthScopes, } from "./Pipeline";
|
||||
export { BaseRequestPolicy } from "./policies/RequestPolicy";
|
||||
export * from "./policies/AnonymousCredentialPolicy";
|
||||
export * from "./policies/CredentialPolicy";
|
||||
export * from "./StorageRetryPolicyFactory";
|
||||
|
|
|
|||
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js.map
generated
vendored
2
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js.map
generated
vendored
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"index.browser.js","sourceRoot":"","sources":["../../../src/index.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAE7C,cAAc,qBAAqB,CAAC;AACpC,cAAc,WAAW,CAAC;AAC1B,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,aAAa,CAAC;AAC5B,cAAc,mBAAmB,CAAC;AAClC,cAAc,iBAAiB,CAAC;AAChC,cAAc,+BAA+B,CAAC;AAC9C,cAAc,mCAAmC,CAAC;AAClD,cAAc,0BAA0B,CAAC;AAGzC,OAAO,EACL,aAAa,EAEb,mBAAmB,GAOpB,MAAM,UAAU,CAAC;AAClB,cAAc,YAAY,CAAC;AAC3B,cAAc,sCAAsC,CAAC;AACrD,cAAc,6BAA6B,CAAC;AAC5C,cAAc,6BAA6B,CAAC;AAE5C,cAAc,mBAAmB,CAAC;AAClC,OAAO,EAAE,SAAS,EAAE,CAAC;AAMrB,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RestError } from \"@azure/core-http\";\n\nexport * from \"./BlobServiceClient\";\nexport * from \"./Clients\";\nexport * from \"./ContainerClient\";\nexport * from \"./BlobLeaseClient\";\nexport * from \"./BlobBatch\";\nexport * from \"./BlobBatchClient\";\nexport * from \"./BatchResponse\";\nexport * from \"./StorageBrowserPolicyFactory\";\nexport * from \"./credentials/AnonymousCredential\";\nexport * from \"./credentials/Credential\";\nexport { SasIPRange } from \"./sas/SasIPRange\";\nexport { Range } from \"./Range\";\nexport {\n BlockBlobTier,\n BlobImmutabilityPolicy,\n PremiumPageBlobTier,\n Tags,\n TagConditions,\n ContainerRequestConditions,\n HttpAuthorization,\n ModificationConditions,\n MatchConditions,\n} from \"./models\";\nexport * from \"./Pipeline\";\nexport * from \"./policies/AnonymousCredentialPolicy\";\nexport * from \"./policies/CredentialPolicy\";\nexport * from \"./StorageRetryPolicyFactory\";\nexport { CommonOptions } from \"./StorageClient\";\nexport * from \"./generatedModels\";\nexport { RestError };\nexport {\n PageBlobGetPageRangesDiffResponse,\n PageBlobGetPageRangesResponse,\n PageList,\n} from \"./PageBlobRangeResponse\";\nexport { logger } from \"./log\";\n"]}
|
||||
{"version":3,"file":"index.browser.js","sourceRoot":"","sources":["../../../src/index.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,SAAS,EAAE,MAAM,2BAA2B,CAAC;AAEtD,cAAc,qBAAqB,CAAC;AACpC,cAAc,WAAW,CAAC;AAC1B,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,aAAa,CAAC;AAC5B,cAAc,mBAAmB,CAAC;AAClC,cAAc,iBAAiB,CAAC;AAChC,cAAc,+BAA+B,CAAC;AAC9C,cAAc,mCAAmC,CAAC;AAClD,cAAc,0BAA0B,CAAC;AAGzC,OAAO,EACL,aAAa,EAEb,mBAAmB,GAOpB,MAAM,UAAU,CAAC;AAClB,OAAO,EACL,QAAQ,EAGR,cAAc,EACd,WAAW,EAUX,kBAAkB,GAEnB,MAAM,YAAY,CAAC;AACpB,OAAO,EAAE,iBAAiB,EAAE,MAAM,0BAA0B,CAAC;AAC7D,cAAc,sCAAsC,CAAC;AACrD,cAAc,6BAA6B,CAAC;AAC5C,cAAc,6BAA6B,CAAC;AAE5C,cAAc,mBAAmB,CAAC;AAClC,OAAO,EAAE,SAAS,EAAE,CAAC;AAMrB,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport { RestError } from \"@azure/core-rest-pipeline\";\n\nexport * from \"./BlobServiceClient\";\nexport * from \"./Clients\";\nexport * from \"./ContainerClient\";\nexport * from \"./BlobLeaseClient\";\nexport * from \"./BlobBatch\";\nexport * from \"./BlobBatchClient\";\nexport * from \"./BatchResponse\";\nexport * from \"./StorageBrowserPolicyFactory\";\nexport * from \"./credentials/AnonymousCredential\";\nexport * from \"./credentials/Credential\";\nexport { SasIPRange } from \"./sas/SasIPRange\";\nexport { Range } from \"./Range\";\nexport {\n BlockBlobTier,\n BlobImmutabilityPolicy,\n PremiumPageBlobTier,\n Tags,\n TagConditions,\n ContainerRequestConditions,\n HttpAuthorization,\n ModificationConditions,\n MatchConditions,\n} from \"./models\";\nexport {\n Pipeline,\n PipelineLike,\n PipelineOptions,\n isPipelineLike,\n newPipeline,\n StoragePipelineOptions,\n RequestPolicyFactory,\n RequestPolicy,\n RequestPolicyOptions,\n WebResource,\n HttpOperationResponse,\n HttpHeaders,\n HttpRequestBody,\n IHttpClient,\n StorageOAuthScopes,\n ServiceClientOptions,\n} from \"./Pipeline\";\nexport { BaseRequestPolicy } from \"./policies/RequestPolicy\";\nexport * from \"./policies/AnonymousCredentialPolicy\";\nexport * from \"./policies/CredentialPolicy\";\nexport * from \"./StorageRetryPolicyFactory\";\nexport { CommonOptions } from \"./StorageClient\";\nexport * from \"./generatedModels\";\nexport { RestError };\nexport {\n PageBlobGetPageRangesDiffResponse,\n PageBlobGetPageRangesResponse,\n PageList,\n} from \"./PageBlobRangeResponse\";\nexport { logger } from \"./log\";\n"]}
|
||||
13
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js
generated
vendored
13
node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js
generated
vendored
|
|
@ -1,6 +1,6 @@
|
|||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
import { RestError } from "@azure/core-http";
|
||||
// Licensed under the MIT License.
|
||||
import { RestError } from "@azure/core-rest-pipeline";
|
||||
export * from "./BlobServiceClient";
|
||||
export * from "./Clients";
|
||||
export * from "./ContainerClient";
|
||||
|
|
@ -8,19 +8,20 @@ export * from "./BlobLeaseClient";
|
|||
export * from "./sas/AccountSASPermissions";
|
||||
export * from "./sas/AccountSASResourceTypes";
|
||||
export * from "./sas/AccountSASServices";
|
||||
export * from "./sas/AccountSASSignatureValues";
|
||||
export { generateAccountSASQueryParameters, } from "./sas/AccountSASSignatureValues";
|
||||
export * from "./BlobBatch";
|
||||
export * from "./BlobBatchClient";
|
||||
export * from "./BatchResponse";
|
||||
export * from "./sas/BlobSASPermissions";
|
||||
export * from "./sas/BlobSASSignatureValues";
|
||||
export { generateBlobSASQueryParameters, } from "./sas/BlobSASSignatureValues";
|
||||
export * from "./StorageBrowserPolicyFactory";
|
||||
export * from "./sas/ContainerSASPermissions";
|
||||
export * from "./credentials/AnonymousCredential";
|
||||
export * from "./credentials/Credential";
|
||||
export * from "./credentials/StorageSharedKeyCredential";
|
||||
export { BlockBlobTier, PremiumPageBlobTier, StorageBlobAudience, } from "./models";
|
||||
export * from "./Pipeline";
|
||||
export { BlockBlobTier, PremiumPageBlobTier, StorageBlobAudience, getBlobServiceAccountAudience, } from "./models";
|
||||
export { Pipeline, isPipelineLike, newPipeline, StorageOAuthScopes, } from "./Pipeline";
|
||||
export { BaseRequestPolicy } from "./policies/RequestPolicy";
|
||||
export * from "./policies/AnonymousCredentialPolicy";
|
||||
export * from "./policies/CredentialPolicy";
|
||||
export * from "./StorageRetryPolicyFactory";
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue