diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 2163ca29..4d7fdf1c 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -30,10 +30,10 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2 + uses: github/codeql-action/init@4e828ff8d448a8a6e532957b1811f387a63867e8 # v3.29.4 with: config-file: .github/codeql/codeql-configuration.yml languages: ${{ matrix.language }} - name: Perform CodeQL analysis - uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2 + uses: github/codeql-action/analyze@4e828ff8d448a8a6e532957b1811f387a63867e8 # v3.29.4 diff --git a/action.yml b/action.yml index 109a049e..95958c66 100644 --- a/action.yml +++ b/action.yml @@ -36,7 +36,7 @@ inputs: cache-prefix: description: The prefix of the cache keys. required: false - default: v3 + default: v1 github-token: description: DO NOT SET THIS. required: false diff --git a/dist/index.cjs b/dist/index.cjs index 4f96b5f6..ad468307 100644 --- a/dist/index.cjs +++ b/dist/index.cjs @@ -941,7 +941,7 @@ var require_util = __commonJS({ var assert = require("assert"); var { kDestroyed, kBodyUsed } = require_symbols(); var { IncomingMessage } = require("http"); - var stream2 = require("stream"); + var stream = require("stream"); var net = require("net"); var { InvalidArgumentError } = require_errors(); var { Blob: Blob2 } = require("buffer"); @@ -1060,29 +1060,29 @@ var require_util = __commonJS({ } return null; } - function isDestroyed(stream3) { - return !stream3 || !!(stream3.destroyed || stream3[kDestroyed]); + function isDestroyed(stream2) { + return !stream2 || !!(stream2.destroyed || stream2[kDestroyed]); } - function isReadableAborted(stream3) { - const state = stream3 && stream3._readableState; - return isDestroyed(stream3) && state && !state.endEmitted; + function isReadableAborted(stream2) { + const state = stream2 && stream2._readableState; + return isDestroyed(stream2) && state && !state.endEmitted; } - function destroy(stream3, err) { - if (stream3 == null || !isStream(stream3) || isDestroyed(stream3)) { + function destroy(stream2, err) { + if (stream2 == null || !isStream(stream2) || isDestroyed(stream2)) { return; } - if (typeof stream3.destroy === "function") { - if (Object.getPrototypeOf(stream3).constructor === IncomingMessage) { - stream3.socket = null; + if (typeof stream2.destroy === "function") { + if (Object.getPrototypeOf(stream2).constructor === IncomingMessage) { + stream2.socket = null; } - stream3.destroy(err); + stream2.destroy(err); } else if (err) { - process.nextTick((stream4, err2) => { - stream4.emit("error", err2); - }, stream3, err); + process.nextTick((stream3, err2) => { + stream3.emit("error", err2); + }, stream2, err); } - if (stream3.destroyed !== true) { - stream3[kDestroyed] = true; + if (stream2.destroyed !== true) { + stream2[kDestroyed] = true; } } var KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/; @@ -1171,15 +1171,15 @@ var require_util = __commonJS({ } } function isDisturbed(body) { - return !!(body && (stream2.isDisturbed ? stream2.isDisturbed(body) || body[kBodyUsed] : body[kBodyUsed] || body.readableDidRead || body._readableState && body._readableState.dataEmitted || isReadableAborted(body))); + return !!(body && (stream.isDisturbed ? stream.isDisturbed(body) || body[kBodyUsed] : body[kBodyUsed] || body.readableDidRead || body._readableState && body._readableState.dataEmitted || isReadableAborted(body))); } function isErrored(body) { - return !!(body && (stream2.isErrored ? stream2.isErrored(body) : /state: 'errored'/.test( + return !!(body && (stream.isErrored ? stream.isErrored(body) : /state: 'errored'/.test( nodeUtil.inspect(body) ))); } function isReadable(body) { - return !!(body && (stream2.isReadable ? stream2.isReadable(body) : /state: 'readable'/.test( + return !!(body && (stream.isReadable ? stream.isReadable(body) : /state: 'readable'/.test( nodeUtil.inspect(body) ))); } @@ -4131,11 +4131,11 @@ var require_util2 = __commonJS({ } } var ReadableStream2 = globalThis.ReadableStream; - function isReadableStreamLike(stream2) { + function isReadableStreamLike(stream) { if (!ReadableStream2) { ReadableStream2 = require("stream/web").ReadableStream; } - return stream2 instanceof ReadableStream2 || stream2[Symbol.toStringTag] === "ReadableStream" && typeof stream2.tee === "function"; + return stream instanceof ReadableStream2 || stream[Symbol.toStringTag] === "ReadableStream" && typeof stream.tee === "function"; } var MAXIMUM_ARGUMENT_LENGTH = 65535; function isomorphicDecode(input) { @@ -5292,13 +5292,13 @@ var require_body = __commonJS({ if (!ReadableStream2) { ReadableStream2 = require("stream/web").ReadableStream; } - let stream2 = null; + let stream = null; if (object instanceof ReadableStream2) { - stream2 = object; + stream = object; } else if (isBlobLike(object)) { - stream2 = object.stream(); + stream = object.stream(); } else { - stream2 = new ReadableStream2({ + stream = new ReadableStream2({ async pull(controller) { controller.enqueue( typeof source === "string" ? textEncoder.encode(source) : source @@ -5310,7 +5310,7 @@ var require_body = __commonJS({ type: void 0 }); } - assert(isReadableStreamLike(stream2)); + assert(isReadableStreamLike(stream)); let action = null; let source = null; let length = null; @@ -5388,14 +5388,14 @@ Content-Type: ${value.type || "application/octet-stream"}\r "Response body object should not be disturbed or locked" ); } - stream2 = object instanceof ReadableStream2 ? object : ReadableStreamFrom(object); + stream = object instanceof ReadableStream2 ? object : ReadableStreamFrom(object); } if (typeof source === "string" || util.isBuffer(source)) { length = Buffer.byteLength(source); } if (action != null) { let iterator; - stream2 = new ReadableStream2({ + stream = new ReadableStream2({ async start() { iterator = action(object)[Symbol.asyncIterator](); }, @@ -5406,7 +5406,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r controller.close(); }); } else { - if (!isErrored(stream2)) { + if (!isErrored(stream)) { controller.enqueue(new Uint8Array(value)); } } @@ -5418,7 +5418,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r type: void 0 }); } - const body = { stream: stream2, source, length }; + const body = { stream, source, length }; return [body, type]; } function safelyExtractBody(object, keepalive = false) { @@ -5447,15 +5447,15 @@ Content-Type: ${value.type || "application/octet-stream"}\r if (isUint8Array(body)) { yield body; } else { - const stream2 = body.stream; - if (util.isDisturbed(stream2)) { + const stream = body.stream; + if (util.isDisturbed(stream)) { throw new TypeError("The body has already been consumed."); } - if (stream2.locked) { + if (stream.locked) { throw new TypeError("The stream is locked."); } - stream2[kBodyUsed] = true; - yield* stream2; + stream[kBodyUsed] = true; + yield* stream; } } } @@ -6978,14 +6978,14 @@ var require_client = __commonJS({ channels.connectError = { hasSubscribers: false }; channels.connected = { hasSubscribers: false }; } - var Client = class extends DispatcherBase { + var Client2 = class extends DispatcherBase { /** * * @param {string|URL} url * @param {import('../types/client').Client.Options} options */ constructor(url, { - interceptors, + interceptors: interceptors2, maxHeaderSize, headersTimeout, socketTimeout, @@ -7089,7 +7089,7 @@ var require_client = __commonJS({ ...connect2 }); } - this[kInterceptors] = interceptors && interceptors.Client && Array.isArray(interceptors.Client) ? interceptors.Client : [createRedirectInterceptor({ maxRedirections })]; + this[kInterceptors] = interceptors2 && interceptors2.Client && Array.isArray(interceptors2.Client) ? interceptors2.Client : [createRedirectInterceptor({ maxRedirections })]; this[kUrl] = util.parseOrigin(url); this[kConnector] = connect2; this[kSocket] = null; @@ -8128,23 +8128,23 @@ upgrade: ${upgrade}\r if (request.aborted) { return false; } - let stream2; + let stream; const h2State = client[kHTTP2SessionState]; headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost]; headers[HTTP2_HEADER_METHOD] = method; if (method === "CONNECT") { session.ref(); - stream2 = session.request(headers, { endStream: false, signal }); - if (stream2.id && !stream2.pending) { - request.onUpgrade(null, null, stream2); + stream = session.request(headers, { endStream: false, signal }); + if (stream.id && !stream.pending) { + request.onUpgrade(null, null, stream); ++h2State.openStreams; } else { - stream2.once("ready", () => { - request.onUpgrade(null, null, stream2); + stream.once("ready", () => { + request.onUpgrade(null, null, stream); ++h2State.openStreams; }); } - stream2.once("close", () => { + stream.once("close", () => { h2State.openStreams -= 1; if (h2State.openStreams === 0) session.unref(); }); @@ -8178,48 +8178,48 @@ upgrade: ${upgrade}\r const shouldEndStream = method === "GET" || method === "HEAD"; if (expectContinue) { headers[HTTP2_HEADER_EXPECT] = "100-continue"; - stream2 = session.request(headers, { endStream: shouldEndStream, signal }); - stream2.once("continue", writeBodyH2); + stream = session.request(headers, { endStream: shouldEndStream, signal }); + stream.once("continue", writeBodyH2); } else { - stream2 = session.request(headers, { + stream = session.request(headers, { endStream: shouldEndStream, signal }); writeBodyH2(); } ++h2State.openStreams; - stream2.once("response", (headers2) => { + stream.once("response", (headers2) => { const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers2; - if (request.onHeaders(Number(statusCode), realHeaders, stream2.resume.bind(stream2), "") === false) { - stream2.pause(); + if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), "") === false) { + stream.pause(); } }); - stream2.once("end", () => { + stream.once("end", () => { request.onComplete([]); }); - stream2.on("data", (chunk) => { + stream.on("data", (chunk) => { if (request.onData(chunk) === false) { - stream2.pause(); + stream.pause(); } }); - stream2.once("close", () => { + stream.once("close", () => { h2State.openStreams -= 1; if (h2State.openStreams === 0) { session.unref(); } }); - stream2.once("error", function(err) { + stream.once("error", function(err) { if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { h2State.streams -= 1; - util.destroy(stream2, err); + util.destroy(stream, err); } }); - stream2.once("frameError", (type, code) => { + stream.once("frameError", (type, code) => { const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`); errorRequest2(client, request, err); if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { h2State.streams -= 1; - util.destroy(stream2, err); + util.destroy(stream, err); } }); return true; @@ -8228,10 +8228,10 @@ upgrade: ${upgrade}\r request.onRequestSent(); } else if (util.isBuffer(body)) { assert(contentLength === body.byteLength, "buffer body must have content length"); - stream2.cork(); - stream2.write(body); - stream2.uncork(); - stream2.end(); + stream.cork(); + stream.write(body); + stream.uncork(); + stream.end(); request.onBodySent(body); request.onRequestSent(); } else if (util.isBlobLike(body)) { @@ -8240,7 +8240,7 @@ upgrade: ${upgrade}\r client, request, contentLength, - h2stream: stream2, + h2stream: stream, expectsPayload, body: body.stream(), socket: client[kSocket], @@ -8253,7 +8253,7 @@ upgrade: ${upgrade}\r request, contentLength, expectsPayload, - h2stream: stream2, + h2stream: stream, header: "", socket: client[kSocket] }); @@ -8266,7 +8266,7 @@ upgrade: ${upgrade}\r contentLength, expectsPayload, socket: client[kSocket], - h2stream: stream2, + h2stream: stream, header: "" }); } else if (util.isIterable(body)) { @@ -8277,7 +8277,7 @@ upgrade: ${upgrade}\r contentLength, expectsPayload, header: "", - h2stream: stream2, + h2stream: stream, socket: client[kSocket] }); } else { @@ -8569,7 +8569,7 @@ ${len.toString(16)}\r client.emit("error", err2); } } - module2.exports = Client; + module2.exports = Client2; } }); @@ -8828,7 +8828,7 @@ var require_pool = __commonJS({ kAddClient, kGetDispatcher } = require_pool_base(); - var Client = require_client(); + var Client2 = require_client(); var { InvalidArgumentError } = require_errors(); @@ -8839,7 +8839,7 @@ var require_pool = __commonJS({ var kConnections = Symbol("connections"); var kFactory = Symbol("factory"); function defaultFactory(origin, opts) { - return new Client(origin, opts); + return new Client2(origin, opts); } var Pool = class extends PoolBase { constructor(origin, { @@ -9092,7 +9092,7 @@ var require_agent = __commonJS({ var { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = require_symbols(); var DispatcherBase = require_dispatcher_base(); var Pool = require_pool(); - var Client = require_client(); + var Client2 = require_client(); var util = require_util(); var createRedirectInterceptor = require_redirectInterceptor(); var { WeakRef: WeakRef2, FinalizationRegistry: FinalizationRegistry2 } = require_dispatcher_weakref()(); @@ -9105,7 +9105,7 @@ var require_agent = __commonJS({ var kFinalizer = Symbol("finalizer"); var kOptions = Symbol("options"); function defaultFactory(origin, opts) { - return opts && opts.connections === 1 ? new Client(origin, opts) : new Pool(origin, opts); + return opts && opts.connections === 1 ? new Client2(origin, opts) : new Pool(origin, opts); } var Agent = class extends DispatcherBase { constructor({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) { @@ -9362,28 +9362,28 @@ var require_readable = __commonJS({ function isUnusable(self2) { return util.isDisturbed(self2) || isLocked(self2); } - async function consume(stream2, type) { - if (isUnusable(stream2)) { + async function consume(stream, type) { + if (isUnusable(stream)) { throw new TypeError("unusable"); } - assert(!stream2[kConsume]); + assert(!stream[kConsume]); return new Promise((resolve, reject) => { - stream2[kConsume] = { + stream[kConsume] = { type, - stream: stream2, + stream, resolve, reject, length: 0, body: [] }; - stream2.on("error", function(err) { + stream.on("error", function(err) { consumeFinish(this[kConsume], err); }).on("close", function() { if (this[kConsume].body !== null) { consumeFinish(this[kConsume], new RequestAbortedError()); } }); - process.nextTick(consumeStart, stream2[kConsume]); + process.nextTick(consumeStart, stream[kConsume]); }); } function consumeStart(consume2) { @@ -9406,7 +9406,7 @@ var require_readable = __commonJS({ } } function consumeEnd(consume2) { - const { type, body, resolve, stream: stream2, length } = consume2; + const { type, body, resolve, stream, length } = consume2; try { if (type === "text") { resolve(toUSVString(Buffer.concat(body))); @@ -9424,11 +9424,11 @@ var require_readable = __commonJS({ if (!Blob2) { Blob2 = require("buffer").Blob; } - resolve(new Blob2(body, { type: stream2[kContentType] })); + resolve(new Blob2(body, { type: stream[kContentType] })); } consumeFinish(consume2); } catch (err) { - stream2.destroy(err); + stream.destroy(err); } } function consumePush(consume2, chunk) { @@ -9852,10 +9852,10 @@ var require_api_stream = __commonJS({ } } }; - function stream2(opts, factory, callback) { + function stream(opts, factory, callback) { if (callback === void 0) { return new Promise((resolve, reject) => { - stream2.call(this, opts, factory, (err, data2) => { + stream.call(this, opts, factory, (err, data2) => { return err ? reject(err) : resolve(data2); }); }); @@ -9870,7 +9870,7 @@ var require_api_stream = __commonJS({ queueMicrotask(() => callback(err, { opaque })); } } - module2.exports = stream2; + module2.exports = stream; } }); @@ -10755,7 +10755,7 @@ var require_mock_client = __commonJS({ "../../node_modules/undici/lib/mock/mock-client.js"(exports2, module2) { "use strict"; var { promisify } = require("util"); - var Client = require_client(); + var Client2 = require_client(); var { buildMockDispatch } = require_mock_utils(); var { kDispatches, @@ -10769,7 +10769,7 @@ var require_mock_client = __commonJS({ var { MockInterceptor } = require_mock_interceptor(); var Symbols = require_symbols(); var { InvalidArgumentError } = require_errors(); - var MockClient = class extends Client { + var MockClient = class extends Client2 { constructor(origin, opts) { super(origin, opts); if (!opts || !opts.agent || typeof opts.agent.dispatch !== "function") { @@ -13772,7 +13772,7 @@ var require_fetch = __commonJS({ if (!ReadableStream2) { ReadableStream2 = require("stream/web").ReadableStream; } - const stream2 = new ReadableStream2( + const stream = new ReadableStream2( { async start(controller) { fetchParams.controller.controller = controller; @@ -13791,7 +13791,7 @@ var require_fetch = __commonJS({ } } ); - response.body = { stream: stream2 }; + response.body = { stream }; fetchParams.controller.on("terminated", onAborted); fetchParams.controller.resume = async () => { while (true) { @@ -13822,7 +13822,7 @@ var require_fetch = __commonJS({ return; } fetchParams.controller.controller.enqueue(new Uint8Array(bytes)); - if (isErrored(stream2)) { + if (isErrored(stream)) { fetchParams.controller.terminate(); return; } @@ -13834,13 +13834,13 @@ var require_fetch = __commonJS({ function onAborted(reason) { if (isAborted(fetchParams)) { response.aborted = true; - if (isReadable(stream2)) { + if (isReadable(stream)) { fetchParams.controller.controller.error( fetchParams.controller.serializedAbortReason ); } } else { - if (isReadable(stream2)) { + if (isReadable(stream)) { fetchParams.controller.controller.error(new TypeError("terminated", { cause: isErrorLike(reason) ? reason : void 0 })); @@ -14391,8 +14391,8 @@ var require_util4 = __commonJS({ fr[kState] = "loading"; fr[kResult] = null; fr[kError] = null; - const stream2 = blob.stream(); - const reader = stream2.getReader(); + const stream = blob.stream(); + const reader = stream.getReader(); const bytes = []; let chunkPromise = reader.read(); let isFirstChunk = true; @@ -15076,8 +15076,8 @@ var require_cache = __commonJS({ const clonedResponse = cloneResponse(innerResponse); const bodyReadPromise = createDeferredPromise(); if (innerResponse.body != null) { - const stream2 = innerResponse.body.stream; - const reader = stream2.getReader(); + const stream = innerResponse.body.stream; + const reader = stream.getReader(); readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject); } else { bodyReadPromise.resolve(void 0); @@ -17157,7 +17157,7 @@ var require_websocket = __commonJS({ var require_undici = __commonJS({ "../../node_modules/undici/index.js"(exports2, module2) { "use strict"; - var Client = require_client(); + var Client2 = require_client(); var Dispatcher = require_dispatcher(); var errors2 = require_errors(); var Pool = require_pool(); @@ -17186,7 +17186,7 @@ var require_undici = __commonJS({ } Object.assign(Dispatcher.prototype, api); module2.exports.Dispatcher = Dispatcher; - module2.exports.Client = Client; + module2.exports.Client = Client2; module2.exports.Pool = Pool; module2.exports.BalancedPool = BalancedPool; module2.exports.Agent = Agent; @@ -17535,9 +17535,9 @@ var require_lib = __commonJS({ return this.request("HEAD", requestUrl, null, additionalHeaders || {}); }); } - sendStream(verb, requestUrl, stream2, additionalHeaders) { + sendStream(verb, requestUrl, stream, additionalHeaders) { return __awaiter2(this, void 0, void 0, function* () { - return this.request(verb, requestUrl, stream2, additionalHeaders); + return this.request(verb, requestUrl, stream, additionalHeaders); }); } /** @@ -24318,20 +24318,20 @@ var require_nodeHttpClient = __commonJS({ function isReadableStream(body) { return body && typeof body.pipe === "function"; } - function isStreamComplete(stream2) { - if (stream2.readable === false) { + function isStreamComplete(stream) { + if (stream.readable === false) { return Promise.resolve(); } return new Promise((resolve) => { const handler = () => { resolve(); - stream2.removeListener("close", handler); - stream2.removeListener("end", handler); - stream2.removeListener("error", handler); + stream.removeListener("close", handler); + stream.removeListener("end", handler); + stream.removeListener("error", handler); }; - stream2.on("close", handler); - stream2.on("end", handler); - stream2.on("error", handler); + stream.on("close", handler); + stream.on("end", handler); + stream.on("error", handler); }); } function isArrayBuffer(body) { @@ -24545,33 +24545,33 @@ var require_nodeHttpClient = __commonJS({ } return headers; } - function getDecodedResponseStream(stream2, headers) { + function getDecodedResponseStream(stream, headers) { const contentEncoding = headers.get("Content-Encoding"); if (contentEncoding === "gzip") { const unzip = zlib.createGunzip(); - stream2.pipe(unzip); + stream.pipe(unzip); return unzip; } else if (contentEncoding === "deflate") { const inflate = zlib.createInflate(); - stream2.pipe(inflate); + stream.pipe(inflate); return inflate; } - return stream2; + return stream; } - function streamToText(stream2) { + function streamToText(stream) { return new Promise((resolve, reject) => { const buffer = []; - stream2.on("data", (chunk) => { + stream.on("data", (chunk) => { if (Buffer.isBuffer(chunk)) { buffer.push(chunk); } else { buffer.push(Buffer.from(chunk)); } }); - stream2.on("end", () => { + stream.on("end", () => { resolve(Buffer.concat(buffer).toString("utf8")); }); - stream2.on("error", (e) => { + stream.on("error", (e) => { if (e && (e === null || e === void 0 ? void 0 : e.name) === "AbortError") { reject(e); } else { @@ -25902,18 +25902,18 @@ var require_helpers2 = __commonJS({ exports2.req = exports2.json = exports2.toBuffer = void 0; var http = __importStar2(require("http")); var https = __importStar2(require("https")); - async function toBuffer(stream2) { + async function toBuffer(stream) { let length = 0; const chunks = []; - for await (const chunk of stream2) { + for await (const chunk of stream) { length += chunk.length; chunks.push(chunk); } return Buffer.concat(chunks, length); } exports2.toBuffer = toBuffer; - async function json(stream2) { - const buf = await toBuffer(stream2); + async function json(stream) { + const buf = await toBuffer(stream); const str = buf.toString("utf8"); try { return JSON.parse(str); @@ -26726,12 +26726,12 @@ var require_concat = __commonJS({ webStream.values = streamAsyncIterator.bind(webStream); } } - function ensureNodeStream(stream2) { - if (stream2 instanceof ReadableStream) { - makeAsyncIterable(stream2); - return stream_1.Readable.fromWeb(stream2); + function ensureNodeStream(stream) { + if (stream instanceof ReadableStream) { + makeAsyncIterable(stream); + return stream_1.Readable.fromWeb(stream); } else { - return stream2; + return stream; } } function toStream(source) { @@ -26749,9 +26749,9 @@ var require_concat = __commonJS({ return stream_1.Readable.from(function() { return tslib_1.__asyncGenerator(this, arguments, function* () { var _a4, e_1, _b, _c; - for (const stream2 of streams) { + for (const stream of streams) { try { - for (var _d = true, stream_2 = (e_1 = void 0, tslib_1.__asyncValues(stream2)), stream_2_1; stream_2_1 = yield tslib_1.__await(stream_2.next()), _a4 = stream_2_1.done, !_a4; _d = true) { + for (var _d = true, stream_2 = (e_1 = void 0, tslib_1.__asyncValues(stream)), stream_2_1; stream_2_1 = yield tslib_1.__await(stream_2.next()), _a4 = stream_2_1.done, !_a4; _d = true) { _c = stream_2_1.value; _d = false; const chunk = _c; @@ -27307,9 +27307,9 @@ var require_sendRequest = __commonJS({ try { const response = await pipeline.sendRequest(httpClient, request); const headers = response.headers.toJSON(); - const stream2 = (_a4 = response.readableStreamBody) !== null && _a4 !== void 0 ? _a4 : response.browserStreamBody; - const parsedBody = options.responseAsStream || stream2 !== void 0 ? void 0 : getResponseBody(response); - const body = stream2 !== null && stream2 !== void 0 ? stream2 : parsedBody; + const stream = (_a4 = response.readableStreamBody) !== null && _a4 !== void 0 ? _a4 : response.browserStreamBody; + const parsedBody = options.responseAsStream || stream !== void 0 ? void 0 : getResponseBody(response); + const body = stream !== null && stream !== void 0 ? stream : parsedBody; if (options === null || options === void 0 ? void 0 : options.onResponse) { options.onResponse(Object.assign(Object.assign({}, response), { request, rawHeaders: headers, parsedBody })); } @@ -28524,15 +28524,15 @@ var require_file2 = __commonJS({ return blob; } } - function createFileFromStream(stream2, name, options = {}) { + function createFileFromStream(stream, name, options = {}) { var _a4, _b, _c, _d; return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a4 = options.type) !== null && _a4 !== void 0 ? _a4 : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : (/* @__PURE__ */ new Date()).getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: (_d = options.size) !== null && _d !== void 0 ? _d : -1, name, stream: () => { - const s = stream2(); + const s = stream(); if (isNodeReadableStream(s)) { throw new Error("Not supported: a Node stream was provided as input to createFileFromStream."); } return s; - }, [rawContent]: stream2 }); + }, [rawContent]: stream }); } function createFile(content, name, options = {}) { var _a4, _b, _c; @@ -33280,1421 +33280,2312 @@ var require_commonjs10 = __commonJS({ } }); -// ../../node_modules/@azure/core-lro/dist/commonjs/logger.js -var require_logger2 = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/logger.js"(exports2) { +// ../../node_modules/@azure/storage-blob/dist/commonjs/log.js +var require_log5 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/log.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.logger = void 0; var logger_1 = require_commonjs2(); - exports2.logger = (0, logger_1.createClientLogger)("core-lro"); + exports2.logger = (0, logger_1.createClientLogger)("storage-blob"); } }); -// ../../node_modules/@azure/core-lro/dist/commonjs/poller/constants.js +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/RequestPolicy.js +var require_RequestPolicy = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/RequestPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BaseRequestPolicy = void 0; + var BaseRequestPolicy = class { + _nextPolicy; + _options; + /** + * The main method to implement that manipulates a request/response. + */ + constructor(_nextPolicy, _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + this._options.log(logLevel, message); + } + }; + exports2.BaseRequestPolicy = BaseRequestPolicy; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/constants.js var require_constants9 = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/poller/constants.js"(exports2) { + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/constants.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.terminalStates = exports2.POLL_INTERVAL_IN_MS = void 0; - exports2.POLL_INTERVAL_IN_MS = 2e3; - exports2.terminalStates = ["succeeded", "canceled", "failed"]; + exports2.PathStylePorts = exports2.BlobDoesNotUseCustomerSpecifiedEncryption = exports2.BlobUsesCustomerSpecifiedEncryptionMsg = exports2.StorageBlobLoggingAllowedQueryParameters = exports2.StorageBlobLoggingAllowedHeaderNames = exports2.DevelopmentConnectionString = exports2.EncryptionAlgorithmAES25 = exports2.HTTP_VERSION_1_1 = exports2.HTTP_LINE_ENDING = exports2.BATCH_MAX_PAYLOAD_IN_BYTES = exports2.BATCH_MAX_REQUEST = exports2.SIZE_1_MB = exports2.ETagAny = exports2.ETagNone = exports2.HeaderConstants = exports2.HTTPURLConnection = exports2.URLConstants = exports2.StorageOAuthScopes = exports2.REQUEST_TIMEOUT = exports2.DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = exports2.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = exports2.DEFAULT_BLOCK_BUFFER_SIZE_BYTES = exports2.BLOCK_BLOB_MAX_BLOCKS = exports2.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = exports2.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = exports2.SERVICE_VERSION = exports2.SDK_VERSION = void 0; + exports2.SDK_VERSION = "12.28.0"; + exports2.SERVICE_VERSION = "2025-07-05"; + exports2.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; + exports2.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4e3 * 1024 * 1024; + exports2.BLOCK_BLOB_MAX_BLOCKS = 5e4; + exports2.DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; + exports2.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; + exports2.DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; + exports2.REQUEST_TIMEOUT = 100 * 1e3; + exports2.StorageOAuthScopes = "https://storage.azure.com/.default"; + exports2.URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout" + } + }; + exports2.HTTPURLConnection = { + HTTP_ACCEPTED: 202, + HTTP_CONFLICT: 409, + HTTP_NOT_FOUND: 404, + HTTP_PRECON_FAILED: 412, + HTTP_RANGE_NOT_SATISFIABLE: 416 + }; + exports2.HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", + X_MS_CopySourceErrorCode: "x-ms-copy-source-error-code" + }; + exports2.ETagNone = ""; + exports2.ETagAny = "*"; + exports2.SIZE_1_MB = 1 * 1024 * 1024; + exports2.BATCH_MAX_REQUEST = 256; + exports2.BATCH_MAX_PAYLOAD_IN_BYTES = 4 * exports2.SIZE_1_MB; + exports2.HTTP_LINE_ENDING = "\r\n"; + exports2.HTTP_VERSION_1_1 = "HTTP/1.1"; + exports2.EncryptionAlgorithmAES25 = "AES256"; + exports2.DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; + exports2.StorageBlobLoggingAllowedHeaderNames = [ + "Access-Control-Allow-Origin", + "Cache-Control", + "Content-Length", + "Content-Type", + "Date", + "Request-Id", + "traceparent", + "Transfer-Encoding", + "User-Agent", + "x-ms-client-request-id", + "x-ms-date", + "x-ms-error-code", + "x-ms-request-id", + "x-ms-return-client-request-id", + "x-ms-version", + "Accept-Ranges", + "Content-Disposition", + "Content-Encoding", + "Content-Language", + "Content-MD5", + "Content-Range", + "ETag", + "Last-Modified", + "Server", + "Vary", + "x-ms-content-crc64", + "x-ms-copy-action", + "x-ms-copy-completion-time", + "x-ms-copy-id", + "x-ms-copy-progress", + "x-ms-copy-status", + "x-ms-has-immutability-policy", + "x-ms-has-legal-hold", + "x-ms-lease-state", + "x-ms-lease-status", + "x-ms-range", + "x-ms-request-server-encrypted", + "x-ms-server-encrypted", + "x-ms-snapshot", + "x-ms-source-range", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "x-ms-access-tier", + "x-ms-access-tier-change-time", + "x-ms-access-tier-inferred", + "x-ms-account-kind", + "x-ms-archive-status", + "x-ms-blob-append-offset", + "x-ms-blob-cache-control", + "x-ms-blob-committed-block-count", + "x-ms-blob-condition-appendpos", + "x-ms-blob-condition-maxsize", + "x-ms-blob-content-disposition", + "x-ms-blob-content-encoding", + "x-ms-blob-content-language", + "x-ms-blob-content-length", + "x-ms-blob-content-md5", + "x-ms-blob-content-type", + "x-ms-blob-public-access", + "x-ms-blob-sequence-number", + "x-ms-blob-type", + "x-ms-copy-destination-snapshot", + "x-ms-creation-time", + "x-ms-default-encryption-scope", + "x-ms-delete-snapshots", + "x-ms-delete-type-permanent", + "x-ms-deny-encryption-scope-override", + "x-ms-encryption-algorithm", + "x-ms-if-sequence-number-eq", + "x-ms-if-sequence-number-le", + "x-ms-if-sequence-number-lt", + "x-ms-incremental-copy", + "x-ms-lease-action", + "x-ms-lease-break-period", + "x-ms-lease-duration", + "x-ms-lease-id", + "x-ms-lease-time", + "x-ms-page-write", + "x-ms-proposed-lease-id", + "x-ms-range-get-content-md5", + "x-ms-rehydrate-priority", + "x-ms-sequence-number-action", + "x-ms-sku-name", + "x-ms-source-content-md5", + "x-ms-source-if-match", + "x-ms-source-if-modified-since", + "x-ms-source-if-none-match", + "x-ms-source-if-unmodified-since", + "x-ms-tag-count", + "x-ms-encryption-key-sha256", + "x-ms-copy-source-error-code", + "x-ms-copy-source-status-code", + "x-ms-if-tags", + "x-ms-source-if-tags" + ]; + exports2.StorageBlobLoggingAllowedQueryParameters = [ + "comp", + "maxresults", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "se", + "si", + "sip", + "sp", + "spr", + "sr", + "srt", + "ss", + "st", + "sv", + "include", + "marker", + "prefix", + "copyid", + "restype", + "blockid", + "blocklisttype", + "delimiter", + "prevsnapshot", + "ske", + "skoid", + "sks", + "skt", + "sktid", + "skv", + "snapshot" + ]; + exports2.BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; + exports2.BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; + exports2.PathStylePorts = [ + "10000", + "10001", + "10002", + "10003", + "10004", + "10100", + "10101", + "10102", + "10103", + "10104", + "11000", + "11001", + "11002", + "11003", + "11004", + "11100", + "11101", + "11102", + "11103", + "11104" + ]; } }); -// ../../node_modules/@azure/core-lro/dist/commonjs/poller/operation.js -var require_operation = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/poller/operation.js"(exports2) { +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/utils.common.js +var require_utils_common = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/utils.common.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.pollOperation = exports2.initOperation = exports2.deserializeState = void 0; - var logger_js_1 = require_logger2(); + exports2.escapeURLPath = escapeURLPath; + exports2.getValueInConnString = getValueInConnString; + exports2.extractConnectionStringParts = extractConnectionStringParts; + exports2.appendToURLPath = appendToURLPath; + exports2.setURLParameter = setURLParameter; + exports2.getURLParameter = getURLParameter; + exports2.setURLHost = setURLHost; + exports2.getURLPath = getURLPath; + exports2.getURLScheme = getURLScheme; + exports2.getURLPathAndQuery = getURLPathAndQuery; + exports2.getURLQueries = getURLQueries; + exports2.appendToURLQuery = appendToURLQuery; + exports2.truncatedISO8061Date = truncatedISO8061Date; + exports2.base64encode = base64encode; + exports2.base64decode = base64decode; + exports2.generateBlockID = generateBlockID; + exports2.delay = delay; + exports2.padStart = padStart; + exports2.sanitizeURL = sanitizeURL; + exports2.sanitizeHeaders = sanitizeHeaders; + exports2.iEqual = iEqual; + exports2.getAccountNameFromUrl = getAccountNameFromUrl; + exports2.isIpEndpointStyle = isIpEndpointStyle; + exports2.toBlobTagsString = toBlobTagsString; + exports2.toBlobTags = toBlobTags; + exports2.toTags = toTags; + exports2.toQuerySerialization = toQuerySerialization; + exports2.parseObjectReplicationRecord = parseObjectReplicationRecord; + exports2.attachCredential = attachCredential; + exports2.httpAuthorizationToString = httpAuthorizationToString; + exports2.BlobNameToString = BlobNameToString; + exports2.ConvertInternalResponseOfListBlobFlat = ConvertInternalResponseOfListBlobFlat; + exports2.ConvertInternalResponseOfListBlobHierarchy = ConvertInternalResponseOfListBlobHierarchy; + exports2.ExtractPageRangeInfoItems = ExtractPageRangeInfoItems; + exports2.EscapePath = EscapePath; + exports2.assertResponse = assertResponse; + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); var constants_js_1 = require_constants9(); - function deserializeState(serializedState) { - try { - return JSON.parse(serializedState).state; - } catch (e) { - throw new Error(`Unable to deserialize input state: ${serializedState}`); - } + function escapeURLPath(url) { + const urlParsed = new URL(url); + let path6 = urlParsed.pathname; + path6 = path6 || "/"; + path6 = escape2(path6); + urlParsed.pathname = path6; + return urlParsed.toString(); } - exports2.deserializeState = deserializeState; - function setStateError(inputs) { - const { state, stateProxy, isOperationError } = inputs; - return (error2) => { - if (isOperationError(error2)) { - stateProxy.setError(state, error2); - stateProxy.setFailed(state); + function getProxyUriFromDevConnString(connectionString) { + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; + } } - throw error2; - }; - } - function appendReadableErrorMessage(currentMessage, innerMessage) { - let message = currentMessage; - if (message.slice(-1) !== ".") { - message = message + "."; } - return message + " " + innerMessage; + return proxyUri; } - function simplifyError(err) { - let message = err.message; - let code = err.code; - let curErr = err; - while (curErr.innererror) { - curErr = curErr.innererror; - code = curErr.code; - message = appendReadableErrorMessage(message, curErr.message); + function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; + } } - return { - code, - message - }; + return ""; } - function processOperationStatus(result2) { - const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } = result2; - switch (status) { - case "succeeded": { - stateProxy.setSucceeded(state); - break; - } - case "failed": { - const err = getError === null || getError === void 0 ? void 0 : getError(response); - let postfix = ""; - if (err) { - const { code, message } = simplifyError(err); - postfix = `. ${code}. ${message}`; + function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = constants_js_1.DevelopmentConnectionString; + } + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && connectionString.search("AccountKey=") !== -1) { + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); } - const errStr = `The long-running operation has failed${postfix}`; - stateProxy.setError(state, new Error(errStr)); - stateProxy.setFailed(state); - logger_js_1.logger.warning(errStr); - break; + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); + } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; } - case "canceled": { - stateProxy.setCanceled(state); - break; + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); } - } - if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || isDone === void 0 && ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status)) { - stateProxy.setResult(state, buildResult({ - response, - state, - processResult - })); + return { + kind: "AccountConnString", + url: blobEndpoint, + accountName, + accountKey, + proxyUri + }; + } else { + let accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + let accountName = getValueInConnString(connectionString, "AccountName"); + if (!accountName) { + accountName = getAccountNameFromUrl(blobEndpoint); + } + if (!blobEndpoint) { + throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + } else if (!accountSas) { + throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + } + if (accountSas.startsWith("?")) { + accountSas = accountSas.substring(1); + } + return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; } } - function buildResult(inputs) { - const { processResult, response, state } = inputs; - return processResult ? processResult(response, state) : response; + function escape2(text3) { + return encodeURIComponent(text3).replace(/%2F/g, "/").replace(/'/g, "%27").replace(/\+/g, "%20").replace(/%25/g, "%"); } - async function initOperation(inputs) { - const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult } = inputs; - const { operationLocation, resourceLocation, metadata, response } = await init(); - if (operationLocation) - withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); - const config = { - metadata, - operationLocation, - resourceLocation - }; - logger_js_1.logger.verbose(`LRO: Operation description:`, config); - const state = stateProxy.initState(config); - const status = getOperationStatus({ response, state, operationLocation }); - processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); - return state; + function appendToURLPath(url, name) { + const urlParsed = new URL(url); + let path6 = urlParsed.pathname; + path6 = path6 ? path6.endsWith("/") ? `${path6}${name}` : `${path6}/${name}` : name; + urlParsed.pathname = path6; + return urlParsed.toString(); } - exports2.initOperation = initOperation; - async function pollOperationHelper(inputs) { - const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, isOperationError, options } = inputs; - const response = await poll(operationLocation, options).catch(setStateError({ - state, - stateProxy, - isOperationError - })); - const status = getOperationStatus(response, state); - logger_js_1.logger.verbose(`LRO: Status: - Polling from: ${state.config.operationLocation} - Operation status: ${status} - Polling status: ${constants_js_1.terminalStates.includes(status) ? "Stopped" : "Running"}`); - if (status === "succeeded") { - const resourceLocation = getResourceLocation(response, state); - if (resourceLocation !== void 0) { - return { - response: await poll(resourceLocation).catch(setStateError({ state, stateProxy, isOperationError })), - status - }; + function setURLParameter(url, name, value) { + const urlParsed = new URL(url); + const encodedName = encodeURIComponent(name); + const encodedValue = value ? encodeURIComponent(value) : void 0; + const searchString = urlParsed.search === "" ? "?" : urlParsed.search; + const searchPieces = []; + for (const pair of searchString.slice(1).split("&")) { + if (pair) { + const [key] = pair.split("=", 2); + if (key !== encodedName) { + searchPieces.push(pair); + } } } - return { response, status }; - } - async function pollOperation(inputs) { - const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, processResult, getError, updateState, setDelay, isDone, setErrorAsResult } = inputs; - const { operationLocation } = state.config; - if (operationLocation !== void 0) { - const { response, status } = await pollOperationHelper({ - poll, - getOperationStatus, - state, - stateProxy, - operationLocation, - getResourceLocation, - isOperationError, - options - }); - processOperationStatus({ - status, - response, - state, - stateProxy, - isDone, - processResult, - getError, - setErrorAsResult - }); - if (!constants_js_1.terminalStates.includes(status)) { - const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); - if (intervalInMs) - setDelay(intervalInMs); - const location = getOperationLocation === null || getOperationLocation === void 0 ? void 0 : getOperationLocation(response, state); - if (location !== void 0) { - const isUpdated = operationLocation !== location; - state.config.operationLocation = location; - withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(location, isUpdated); - } else - withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); - } - updateState === null || updateState === void 0 ? void 0 : updateState(state, response); + if (encodedValue) { + searchPieces.push(`${encodedName}=${encodedValue}`); } + urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; + return urlParsed.toString(); } - exports2.pollOperation = pollOperation; - } -}); - -// ../../node_modules/@azure/core-lro/dist/commonjs/http/operation.js -var require_operation2 = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/http/operation.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.pollHttpOperation = exports2.isOperationError = exports2.getResourceLocation = exports2.getOperationStatus = exports2.getOperationLocation = exports2.initHttpOperation = exports2.getStatusFromInitialResponse = exports2.getErrorFromResponse = exports2.parseRetryAfter = exports2.inferLroMode = void 0; - var operation_js_1 = require_operation(); - var logger_js_1 = require_logger2(); - function getOperationLocationPollingUrl(inputs) { - const { azureAsyncOperation, operationLocation } = inputs; - return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation; - } - function getLocationHeader(rawResponse) { - return rawResponse.headers["location"]; + function getURLParameter(url, name) { + const urlParsed = new URL(url); + return urlParsed.searchParams.get(name) ?? void 0; } - function getOperationLocationHeader(rawResponse) { - return rawResponse.headers["operation-location"]; + function setURLHost(url, host) { + const urlParsed = new URL(url); + urlParsed.hostname = host; + return urlParsed.toString(); } - function getAzureAsyncOperationHeader(rawResponse) { - return rawResponse.headers["azure-asyncoperation"]; + function getURLPath(url) { + try { + const urlParsed = new URL(url); + return urlParsed.pathname; + } catch (e) { + return void 0; + } } - function findResourceLocation(inputs) { - var _a4; - const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; - switch (requestMethod) { - case "PUT": { - return requestPath; - } - case "DELETE": { - return void 0; - } - case "PATCH": { - return (_a4 = getDefault()) !== null && _a4 !== void 0 ? _a4 : requestPath; - } - default: { - return getDefault(); - } + function getURLScheme(url) { + try { + const urlParsed = new URL(url); + return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol; + } catch (e) { + return void 0; } - function getDefault() { - switch (resourceLocationConfig) { - case "azure-async-operation": { - return void 0; - } - case "original-uri": { - return requestPath; - } - case "location": - default: { - return location; - } - } + } + function getURLPathAndQuery(url) { + const urlParsed = new URL(url); + const pathString = urlParsed.pathname; + if (!pathString) { + throw new RangeError("Invalid url without valid path."); + } + let queryString = urlParsed.search || ""; + queryString = queryString.trim(); + if (queryString !== "") { + queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; } + return `${pathString}${queryString}`; } - function inferLroMode(inputs) { - const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs; - const operationLocation = getOperationLocationHeader(rawResponse); - const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); - const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); - const location = getLocationHeader(rawResponse); - const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase(); - if (pollingUrl !== void 0) { - return { - mode: "OperationLocation", - operationLocation: pollingUrl, - resourceLocation: findResourceLocation({ - requestMethod: normalizedRequestMethod, - location, - requestPath, - resourceLocationConfig - }) - }; - } else if (location !== void 0) { - return { - mode: "ResourceLocation", - operationLocation: location + function getURLQueries(url) { + let queryString = new URL(url).search; + if (!queryString) { + return {}; + } + queryString = queryString.trim(); + queryString = queryString.startsWith("?") ? queryString.substring(1) : queryString; + let querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter((value) => { + const indexOfEqual = value.indexOf("="); + const lastIndexOfEqual = value.lastIndexOf("="); + return indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1; + }); + const queries = {}; + for (const querySubString of querySubStrings) { + const splitResults = querySubString.split("="); + const key = splitResults[0]; + const value = splitResults[1]; + queries[key] = value; + } + return queries; + } + function appendToURLQuery(url, queryParts) { + const urlParsed = new URL(url); + let query = urlParsed.search; + if (query) { + query += "&" + queryParts; + } else { + query = queryParts; + } + urlParsed.search = query; + return urlParsed.toString(); + } + function truncatedISO8061Date(date, withMilliseconds = true) { + const dateString = date.toISOString(); + return withMilliseconds ? dateString.substring(0, dateString.length - 1) + "0000Z" : dateString.substring(0, dateString.length - 5) + "Z"; + } + function base64encode(content) { + return !core_util_1.isNodeLike ? btoa(content) : Buffer.from(content).toString("base64"); + } + function base64decode(encodedString) { + return !core_util_1.isNodeLike ? atob(encodedString) : Buffer.from(encodedString, "base64").toString(); + } + function generateBlockID(blockIDPrefix, blockIndex) { + const maxSourceStringLength = 48; + const maxBlockIndexLength = 6; + const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { + blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); + } + const res = blockIDPrefix + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); + return base64encode(res); + } + async function delay(timeInMs, aborter, abortError) { + return new Promise((resolve, reject) => { + let timeout; + const abortHandler = () => { + if (timeout !== void 0) { + clearTimeout(timeout); + } + reject(abortError); }; - } else if (normalizedRequestMethod === "PUT" && requestPath) { - return { - mode: "Body", - operationLocation: requestPath + const resolveHandler = () => { + if (aborter !== void 0) { + aborter.removeEventListener("abort", abortHandler); + } + resolve(); }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== void 0) { + aborter.addEventListener("abort", abortHandler); + } + }); + } + function padStart(currentString, targetLength, padString = " ") { + if (String.prototype.padStart) { + return currentString.padStart(targetLength, padString); + } + padString = padString || " "; + if (currentString.length > targetLength) { + return currentString; } else { - return void 0; + targetLength = targetLength - currentString.length; + if (targetLength > padString.length) { + padString += padString.repeat(targetLength / padString.length); + } + return padString.slice(0, targetLength) + currentString; } } - exports2.inferLroMode = inferLroMode; - function transformStatus(inputs) { - const { status, statusCode } = inputs; - if (typeof status !== "string" && status !== void 0) { - throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); + function sanitizeURL(url) { + let safeURL = url; + if (getURLParameter(safeURL, constants_js_1.URLConstants.Parameters.SIGNATURE)) { + safeURL = setURLParameter(safeURL, constants_js_1.URLConstants.Parameters.SIGNATURE, "*****"); } - switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) { - case void 0: - return toOperationStatus(statusCode); - case "succeeded": - return "succeeded"; - case "failed": - return "failed"; - case "running": - case "accepted": - case "started": - case "canceling": - case "cancelling": - return "running"; - case "canceled": - case "cancelled": - return "canceled"; - default: { - logger_js_1.logger.verbose(`LRO: unrecognized operation status: ${status}`); - return status; + return safeURL; + } + function sanitizeHeaders(originalHeader) { + const headers = (0, core_rest_pipeline_1.createHttpHeaders)(); + for (const [name, value] of originalHeader) { + if (name.toLowerCase() === constants_js_1.HeaderConstants.AUTHORIZATION.toLowerCase()) { + headers.set(name, "*****"); + } else if (name.toLowerCase() === constants_js_1.HeaderConstants.X_MS_COPY_SOURCE) { + headers.set(name, sanitizeURL(value)); + } else { + headers.set(name, value); } } + return headers; } - function getStatus(rawResponse) { - var _a4; - const { status } = (_a4 = rawResponse.body) !== null && _a4 !== void 0 ? _a4 : {}; - return transformStatus({ status, statusCode: rawResponse.statusCode }); + function iEqual(str1, str2) { + return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); } - function getProvisioningState(rawResponse) { - var _a4, _b; - const { properties, provisioningState } = (_a4 = rawResponse.body) !== null && _a4 !== void 0 ? _a4 : {}; - const status = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; - return transformStatus({ status, statusCode: rawResponse.statusCode }); + function getAccountNameFromUrl(url) { + const parsedUrl = new URL(url); + let accountName; + try { + if (parsedUrl.hostname.split(".")[1] === "blob") { + accountName = parsedUrl.hostname.split(".")[0]; + } else if (isIpEndpointStyle(parsedUrl)) { + accountName = parsedUrl.pathname.split("/")[1]; + } else { + accountName = ""; + } + return accountName; + } catch (error2) { + throw new Error("Unable to extract accountName with provided information."); + } } - function toOperationStatus(statusCode) { - if (statusCode === 202) { - return "running"; - } else if (statusCode < 300) { - return "succeeded"; - } else { - return "failed"; + function isIpEndpointStyle(parsedUrl) { + const host = parsedUrl.host; + return /^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || Boolean(parsedUrl.port) && constants_js_1.PathStylePorts.includes(parsedUrl.port); + } + function toBlobTagsString(tags) { + if (tags === void 0) { + return void 0; + } + const tagPairs = []; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + } } + return tagPairs.join("&"); } - function parseRetryAfter({ rawResponse }) { - const retryAfter = rawResponse.headers["retry-after"]; - if (retryAfter !== void 0) { - const retryAfterInSeconds = parseInt(retryAfter); - return isNaN(retryAfterInSeconds) ? calculatePollingIntervalFromDate(new Date(retryAfter)) : retryAfterInSeconds * 1e3; + function toBlobTags(tags) { + if (tags === void 0) { + return void 0; } - return void 0; + const res = { + blobTagSet: [] + }; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + res.blobTagSet.push({ + key, + value + }); + } + } + return res; } - exports2.parseRetryAfter = parseRetryAfter; - function getErrorFromResponse(response) { - const error2 = accessBodyProperty(response, "error"); - if (!error2) { - logger_js_1.logger.warning(`The long-running operation failed but there is no error property in the response's body`); - return; + function toTags(tags) { + if (tags === void 0) { + return void 0; } - if (!error2.code || !error2.message) { - logger_js_1.logger.warning(`The long-running operation failed but the error property in the response's body doesn't contain code or message`); - return; + const res = {}; + for (const blobTag of tags.blobTagSet) { + res[blobTag.key] = blobTag.value; } - return error2; + return res; } - exports2.getErrorFromResponse = getErrorFromResponse; - function calculatePollingIntervalFromDate(retryAfterDate) { - const timeNow = Math.floor((/* @__PURE__ */ new Date()).getTime()); - const retryAfterTime = retryAfterDate.getTime(); - if (timeNow < retryAfterTime) { - return retryAfterTime - timeNow; + function toQuerySerialization(textConfiguration) { + if (textConfiguration === void 0) { + return void 0; + } + switch (textConfiguration.kind) { + case "csv": + return { + format: { + type: "delimited", + delimitedTextConfiguration: { + columnSeparator: textConfiguration.columnSeparator || ",", + fieldQuote: textConfiguration.fieldQuote || "", + recordSeparator: textConfiguration.recordSeparator, + escapeChar: textConfiguration.escapeCharacter || "", + headersPresent: textConfiguration.hasHeaders || false + } + } + }; + case "json": + return { + format: { + type: "json", + jsonTextConfiguration: { + recordSeparator: textConfiguration.recordSeparator + } + } + }; + case "arrow": + return { + format: { + type: "arrow", + arrowConfiguration: { + schema: textConfiguration.schema + } + } + }; + case "parquet": + return { + format: { + type: "parquet" + } + }; + default: + throw Error("Invalid BlobQueryTextConfiguration."); } - return void 0; } - function getStatusFromInitialResponse(inputs) { - const { response, state, operationLocation } = inputs; - function helper() { - var _a4; - const mode = (_a4 = state.config.metadata) === null || _a4 === void 0 ? void 0 : _a4["mode"]; - switch (mode) { - case void 0: - return toOperationStatus(response.rawResponse.statusCode); - case "Body": - return getOperationStatus(response, state); - default: - return "running"; + function parseObjectReplicationRecord(objectReplicationRecord) { + if (!objectReplicationRecord) { + return void 0; + } + if ("policy-id" in objectReplicationRecord) { + return void 0; + } + const orProperties = []; + for (const key in objectReplicationRecord) { + const ids = key.split("_"); + const policyPrefix = "or-"; + if (ids[0].startsWith(policyPrefix)) { + ids[0] = ids[0].substring(policyPrefix.length); + } + const rule = { + ruleId: ids[1], + replicationStatus: objectReplicationRecord[key] + }; + const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + if (policyIndex > -1) { + orProperties[policyIndex].rules.push(rule); + } else { + orProperties.push({ + policyId: ids[0], + rules: [rule] + }); } } - const status = helper(); - return status === "running" && operationLocation === void 0 ? "succeeded" : status; + return orProperties; } - exports2.getStatusFromInitialResponse = getStatusFromInitialResponse; - async function initHttpOperation(inputs) { - const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; - return (0, operation_js_1.initOperation)({ - init: async () => { - const response = await lro.sendInitialRequest(); - const config = inferLroMode({ - rawResponse: response.rawResponse, - requestPath: lro.requestPath, - requestMethod: lro.requestMethod, - resourceLocationConfig - }); - return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); - }, - stateProxy, - processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse, - getOperationStatus: getStatusFromInitialResponse, - setErrorAsResult - }); + function attachCredential(thing, credential) { + thing.credential = credential; + return thing; } - exports2.initHttpOperation = initHttpOperation; - function getOperationLocation({ rawResponse }, state) { - var _a4; - const mode = (_a4 = state.config.metadata) === null || _a4 === void 0 ? void 0 : _a4["mode"]; - switch (mode) { - case "OperationLocation": { - return getOperationLocationPollingUrl({ - operationLocation: getOperationLocationHeader(rawResponse), - azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse) - }); - } - case "ResourceLocation": { - return getLocationHeader(rawResponse); - } - case "Body": - default: { - return void 0; - } + function httpAuthorizationToString(httpAuthorization) { + return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : void 0; + } + function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); + } else { + return name.content; } } - exports2.getOperationLocation = getOperationLocation; - function getOperationStatus({ rawResponse }, state) { - var _a4; - const mode = (_a4 = state.config.metadata) === null || _a4 === void 0 ? void 0 : _a4["mode"]; - switch (mode) { - case "OperationLocation": { - return getStatus(rawResponse); + function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return { + ...internalResponse, + segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = { + ...blobItemInteral, + name: BlobNameToString(blobItemInteral.name) + }; + return blobItem; + }) } - case "ResourceLocation": { - return toOperationStatus(rawResponse.statusCode); + }; + } + function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + return { + ...internalResponse, + segment: { + blobPrefixes: internalResponse.segment.blobPrefixes?.map((blobPrefixInternal) => { + const blobPrefix = { + ...blobPrefixInternal, + name: BlobNameToString(blobPrefixInternal.name) + }; + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = { + ...blobItemInteral, + name: BlobNameToString(blobItemInteral.name) + }; + return blobItem; + }) } - case "Body": { - return getProvisioningState(rawResponse); + }; + } + function* ExtractPageRangeInfoItems(getPageRangesSegment) { + let pageRange = []; + let clearRange = []; + if (getPageRangesSegment.pageRange) + pageRange = getPageRangesSegment.pageRange; + if (getPageRangesSegment.clearRange) + clearRange = getPageRangesSegment.clearRange; + let pageRangeIndex = 0; + let clearRangeIndex = 0; + while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { + if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false + }; + ++pageRangeIndex; + } else { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true + }; + ++clearRangeIndex; } - default: - throw new Error(`Internal error: Unexpected operation mode: ${mode}`); } - } - exports2.getOperationStatus = getOperationStatus; - function accessBodyProperty({ flatResponse, rawResponse }, prop2) { - var _a4, _b; - return (_a4 = flatResponse === null || flatResponse === void 0 ? void 0 : flatResponse[prop2]) !== null && _a4 !== void 0 ? _a4 : (_b = rawResponse.body) === null || _b === void 0 ? void 0 : _b[prop2]; - } - function getResourceLocation(res, state) { - const loc = accessBodyProperty(res, "resourceLocation"); - if (loc && typeof loc === "string") { - state.config.resourceLocation = loc; + for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false + }; + } + for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true + }; } - return state.config.resourceLocation; } - exports2.getResourceLocation = getResourceLocation; - function isOperationError(e) { - return e.name === "RestError"; + function EscapePath(blobName) { + const split = blobName.split("/"); + for (let i = 0; i < split.length; i++) { + split[i] = encodeURIComponent(split[i]); + } + return split.join("/"); } - exports2.isOperationError = isOperationError; - async function pollHttpOperation(inputs) { - const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult } = inputs; - return (0, operation_js_1.pollOperation)({ - state, - stateProxy, - setDelay, - processResult: processResult ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) : ({ flatResponse }) => flatResponse, - getError: getErrorFromResponse, - updateState, - getPollingInterval: parseRetryAfter, - getOperationLocation, - getOperationStatus, - isOperationError, - getResourceLocation, - options, - /** - * The expansion here is intentional because `lro` could be an object that - * references an inner this, so we need to preserve a reference to it. - */ - poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), - setErrorAsResult - }); + function assertResponse(response) { + if (`_response` in response) { + return response; + } + throw new TypeError(`Unexpected response object ${response}`); } - exports2.pollHttpOperation = pollHttpOperation; } }); -// ../../node_modules/@azure/core-lro/dist/commonjs/poller/poller.js -var require_poller = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/poller/poller.js"(exports2) { +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicyType.js +var require_StorageRetryPolicyType = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicyType.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.buildCreatePoller = void 0; - var operation_js_1 = require_operation(); + exports2.StorageRetryPolicyType = void 0; + var StorageRetryPolicyType; + (function(StorageRetryPolicyType2) { + StorageRetryPolicyType2[StorageRetryPolicyType2["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + StorageRetryPolicyType2[StorageRetryPolicyType2["FIXED"] = 1] = "FIXED"; + })(StorageRetryPolicyType || (exports2.StorageRetryPolicyType = StorageRetryPolicyType = {})); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicy.js +var require_StorageRetryPolicy = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageRetryPolicy = void 0; + exports2.NewRetryPolicyFactory = NewRetryPolicyFactory; + var abort_controller_1 = require_commonjs3(); + var RequestPolicy_js_1 = require_RequestPolicy(); var constants_js_1 = require_constants9(); - var core_util_1 = require_commonjs4(); - var createStateProxy = () => ({ + var utils_common_js_1 = require_utils_common(); + var log_js_1 = require_log5(); + var StorageRetryPolicyType_js_1 = require_StorageRetryPolicyType(); + function NewRetryPolicyFactory(retryOptions) { + return { + create: (nextPolicy, options) => { + return new StorageRetryPolicy(nextPolicy, options, retryOptions); + } + }; + } + var DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1e3, + maxTries: 4, + retryDelayInMs: 4 * 1e3, + retryPolicyType: StorageRetryPolicyType_js_1.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: void 0 + // Use server side default timeout strategy + }; + var RETRY_ABORT_ERROR = new abort_controller_1.AbortError("The operation was aborted."); + var StorageRetryPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { /** - * The state at this point is created to be of type OperationState. - * It will be updated later to be of type TState when the - * customer-provided callback, `updateState`, is called during polling. + * RetryOptions. */ - initState: (config) => ({ status: "running", config }), - setCanceled: (state) => state.status = "canceled", - setError: (state, error2) => state.error = error2, - setResult: (state, result2) => state.result = result2, - setRunning: (state) => state.status = "running", - setSucceeded: (state) => state.status = "succeeded", - setFailed: (state) => state.status = "failed", - getError: (state) => state.error, - getResult: (state) => state.result, - isCanceled: (state) => state.status === "canceled", - isFailed: (state) => state.status === "failed", - isRunning: (state) => state.status === "running", - isSucceeded: (state) => state.status === "succeeded" - }); - function buildCreatePoller(inputs) { - const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful } = inputs; - return async ({ init, poll }, options) => { - const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, restoreFrom } = options || {}; - const stateProxy = createStateProxy(); - const withOperationLocation = withOperationLocationCallback ? /* @__PURE__ */ (() => { - let called = false; - return (operationLocation, isUpdated) => { - if (isUpdated) - withOperationLocationCallback(operationLocation); - else if (!called) - withOperationLocationCallback(operationLocation); - called = true; - }; - })() : void 0; - const state = restoreFrom ? (0, operation_js_1.deserializeState)(restoreFrom) : await (0, operation_js_1.initOperation)({ - init, - stateProxy, - processResult, - getOperationStatus: getStatusFromInitialResponse, - withOperationLocation, - setErrorAsResult: !resolveOnUnsuccessful - }); - let resultPromise; - const abortController = new AbortController(); - const handlers = /* @__PURE__ */ new Map(); - const handleProgressEvents = async () => handlers.forEach((h) => h(state)); - const cancelErrMsg = "Operation was canceled"; - let currentPollIntervalInMs = intervalInMs; - const poller = { - getOperationState: () => state, - getResult: () => state.result, - isDone: () => ["succeeded", "failed", "canceled"].includes(state.status), - isStopped: () => resultPromise === void 0, - stopPolling: () => { - abortController.abort(); - }, - toString: () => JSON.stringify({ - state - }), - onProgress: (callback) => { - const s = Symbol(); - handlers.set(s, callback); - return () => handlers.delete(s); - }, - pollUntilDone: (pollOptions) => resultPromise !== null && resultPromise !== void 0 ? resultPromise : resultPromise = (async () => { - const { abortSignal: inputAbortSignal } = pollOptions || {}; - function abortListener() { - abortController.abort(); - } - const abortSignal = abortController.signal; - if (inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.aborted) { - abortController.abort(); - } else if (!abortSignal.aborted) { - inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.addEventListener("abort", abortListener, { once: true }); - } - try { - if (!poller.isDone()) { - await poller.poll({ abortSignal }); - while (!poller.isDone()) { - await (0, core_util_1.delay)(currentPollIntervalInMs, { abortSignal }); - await poller.poll({ abortSignal }); - } - } - } finally { - inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.removeEventListener("abort", abortListener); - } - if (resolveOnUnsuccessful) { - return poller.getResult(); - } else { - switch (state.status) { - case "succeeded": - return poller.getResult(); - case "canceled": - throw new Error(cancelErrMsg); - case "failed": - throw state.error; - case "notStarted": - case "running": - throw new Error(`Polling completed without succeeding or failing`); - } - } - })().finally(() => { - resultPromise = void 0; - }), - async poll(pollOptions) { - if (resolveOnUnsuccessful) { - if (poller.isDone()) - return; - } else { - switch (state.status) { - case "succeeded": - return; - case "canceled": - throw new Error(cancelErrMsg); - case "failed": - throw state.error; - } - } - await (0, operation_js_1.pollOperation)({ - poll, - state, - stateProxy, - getOperationLocation, - isOperationError, - withOperationLocation, - getPollingInterval, - getOperationStatus: getStatusFromPollResponse, - getResourceLocation, - processResult, - getError, - updateState, - options: pollOptions, - setDelay: (pollIntervalInMs) => { - currentPollIntervalInMs = pollIntervalInMs; - }, - setErrorAsResult: !resolveOnUnsuccessful - }); - await handleProgressEvents(); - if (!resolveOnUnsuccessful) { - switch (state.status) { - case "canceled": - throw new Error(cancelErrMsg); - case "failed": - throw state.error; - } - } - } + retryOptions; + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { + super(nextPolicy, options); + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType ? retryOptions.retryPolicyType : DEFAULT_RETRY_OPTIONS.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 ? Math.floor(retryOptions.maxTries) : DEFAULT_RETRY_OPTIONS.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 ? retryOptions.tryTimeoutInMs : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) : DEFAULT_RETRY_OPTIONS.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost : DEFAULT_RETRY_OPTIONS.secondaryHost }; - return poller; - }; - } - exports2.buildCreatePoller = buildCreatePoller; - } -}); - -// ../../node_modules/@azure/core-lro/dist/commonjs/http/poller.js -var require_poller2 = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/http/poller.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createHttpPoller = void 0; - var operation_js_1 = require_operation2(); - var poller_js_1 = require_poller(); - async function createHttpPoller(lro, options) { - const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false } = options || {}; - return (0, poller_js_1.buildCreatePoller)({ - getStatusFromInitialResponse: operation_js_1.getStatusFromInitialResponse, - getStatusFromPollResponse: operation_js_1.getOperationStatus, - isOperationError: operation_js_1.isOperationError, - getOperationLocation: operation_js_1.getOperationLocation, - getResourceLocation: operation_js_1.getResourceLocation, - getPollingInterval: operation_js_1.parseRetryAfter, - getError: operation_js_1.getErrorFromResponse, - resolveOnUnsuccessful - })({ - init: async () => { - const response = await lro.sendInitialRequest(); - const config = (0, operation_js_1.inferLroMode)({ - rawResponse: response.rawResponse, - requestPath: lro.requestPath, - requestMethod: lro.requestMethod, - resourceLocationConfig - }); - return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); - }, - poll: lro.sendPollRequest - }, { - intervalInMs, - withOperationLocation, - restoreFrom, - updateState, - processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse - }); - } - exports2.createHttpPoller = createHttpPoller; - } -}); - -// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js -var require_operation3 = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.GenericPollOperation = void 0; - var operation_js_1 = require_operation2(); - var logger_js_1 = require_logger2(); - var createStateProxy = () => ({ - initState: (config) => ({ config, isStarted: true }), - setCanceled: (state) => state.isCancelled = true, - setError: (state, error2) => state.error = error2, - setResult: (state, result2) => state.result = result2, - setRunning: (state) => state.isStarted = true, - setSucceeded: (state) => state.isCompleted = true, - setFailed: () => { - }, - getError: (state) => state.error, - getResult: (state) => state.result, - isCanceled: (state) => !!state.isCancelled, - isFailed: (state) => !!state.error, - isRunning: (state) => !!state.isStarted, - isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error) - }); - var GenericPollOperation = class { - constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { - this.state = state; - this.lro = lro; - this.setErrorAsResult = setErrorAsResult; - this.lroResourceLocationConfig = lroResourceLocationConfig; - this.processResult = processResult; - this.updateState = updateState; - this.isDone = isDone; } - setPollerConfig(pollerConfig) { - this.pollerConfig = pollerConfig; + /** + * Sends request. + * + * @param request - + */ + async sendRequest(request) { + return this.attemptSendRequest(request, false, 1); } - async update(options) { - var _a4; - const stateProxy = createStateProxy(); - if (!this.state.isStarted) { - this.state = Object.assign(Object.assign({}, this.state), await (0, operation_js_1.initHttpOperation)({ - lro: this.lro, - stateProxy, - resourceLocationConfig: this.lroResourceLocationConfig, - processResult: this.processResult, - setErrorAsResult: this.setErrorAsResult - })); + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + async attemptSendRequest(request, secondaryHas404, attempt) { + const newRequest = request.clone(); + const isPrimaryRetry = secondaryHas404 || !this.retryOptions.secondaryHost || !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = (0, utils_common_js_1.setURLHost)(newRequest.url, this.retryOptions.secondaryHost); } - const updateState = this.updateState; - const isDone = this.isDone; - if (!this.state.isCompleted && this.state.error === void 0) { - await (0, operation_js_1.pollHttpOperation)({ - lro: this.lro, - state: this.state, - stateProxy, - processResult: this.processResult, - updateState: updateState ? (state, { rawResponse }) => updateState(state, rawResponse) : void 0, - isDone: isDone ? ({ flatResponse }, state) => isDone(flatResponse, state) : void 0, - options, - setDelay: (intervalInMs) => { - this.pollerConfig.intervalInMs = intervalInMs; - }, - setErrorAsResult: this.setErrorAsResult - }); + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = (0, utils_common_js_1.setURLParameter)(newRequest.url, constants_js_1.URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1e3).toString()); } - (_a4 = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a4 === void 0 ? void 0 : _a4.call(options, this.state); - return this; + let response; + try { + log_js_1.logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; + } + secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; + } catch (err) { + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; + } + } + await this.delay(isPrimaryRetry, attempt, request.abortSignal); + return this.attemptSendRequest(request, secondaryHas404, ++attempt); } - async cancel() { - logger_js_1.logger.error("`cancelOperation` is deprecated because it wasn't implemented"); - return this; + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + log_js_1.logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions.maxTries}, no further try.`); + return false; + } + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR" + // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || err.message.toUpperCase().includes(retriableError) || err.code && err.code.toString().toUpperCase() === retriableError) { + log_js_1.logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + } + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + log_js_1.logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + if (statusCode === 503 || statusCode === 500) { + log_js_1.logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + if (err?.code === "PARSE_ERROR" && err?.message.startsWith(`Error "Error: Unclosed root tag`)) { + log_js_1.logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + return false; } /** - * Serializes the Poller operation. + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - */ - toString() { - return JSON.stringify({ - state: this.state - }); + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case StorageRetryPolicyType_js_1.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case StorageRetryPolicyType_js_1.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } + } else { + delayTimeInMs = Math.random() * 1e3; + } + log_js_1.logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return (0, utils_common_js_1.delay)(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); } }; - exports2.GenericPollOperation = GenericPollOperation; + exports2.StorageRetryPolicy = StorageRetryPolicy; } }); -// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js -var require_poller3 = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js"(exports2) { +// ../../node_modules/@azure/storage-blob/dist/commonjs/StorageRetryPolicyFactory.js +var require_StorageRetryPolicyFactory = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/StorageRetryPolicyFactory.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Poller = exports2.PollerCancelledError = exports2.PollerStoppedError = void 0; - var PollerStoppedError = class _PollerStoppedError extends Error { - constructor(message) { - super(message); - this.name = "PollerStoppedError"; - Object.setPrototypeOf(this, _PollerStoppedError.prototype); - } - }; - exports2.PollerStoppedError = PollerStoppedError; - var PollerCancelledError = class _PollerCancelledError extends Error { - constructor(message) { - super(message); - this.name = "PollerCancelledError"; - Object.setPrototypeOf(this, _PollerCancelledError.prototype); - } - }; - exports2.PollerCancelledError = PollerCancelledError; - var Poller = class { + exports2.StorageRetryPolicyFactory = exports2.StorageRetryPolicy = exports2.StorageRetryPolicyType = void 0; + var StorageRetryPolicy_js_1 = require_StorageRetryPolicy(); + Object.defineProperty(exports2, "StorageRetryPolicy", { enumerable: true, get: function() { + return StorageRetryPolicy_js_1.StorageRetryPolicy; + } }); + var StorageRetryPolicyType_js_1 = require_StorageRetryPolicyType(); + Object.defineProperty(exports2, "StorageRetryPolicyType", { enumerable: true, get: function() { + return StorageRetryPolicyType_js_1.StorageRetryPolicyType; + } }); + var StorageRetryPolicyFactory = class { + retryOptions; /** - * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. - * - * When writing an implementation of a Poller, this implementation needs to deal with the initialization - * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's - * operation has already been defined, at least its basic properties. The code below shows how to approach - * the definition of the constructor of a new custom poller. - * - * ```ts - * export class MyPoller extends Poller { - * constructor({ - * // Anything you might need outside of the basics - * }) { - * let state: MyOperationState = { - * privateProperty: private, - * publicProperty: public, - * }; - * - * const operation = { - * state, - * update, - * cancel, - * toString - * } - * - * // Sending the operation to the parent's constructor. - * super(operation); - * - * // You can assign more local properties here. - * } - * } - * ``` - * - * Inside of this constructor, a new promise is created. This will be used to - * tell the user when the poller finishes (see `pollUntilDone()`). The promise's - * resolve and reject methods are also used internally to control when to resolve - * or reject anyone waiting for the poller to finish. - * - * The constructor of a custom implementation of a poller is where any serialized version of - * a previous poller's operation should be deserialized into the operation sent to the - * base constructor. For example: - * - * ```ts - * export class MyPoller extends Poller { - * constructor( - * baseOperation: string | undefined - * ) { - * let state: MyOperationState = {}; - * if (baseOperation) { - * state = { - * ...JSON.parse(baseOperation).state, - * ...state - * }; - * } - * const operation = { - * state, - * // ... - * } - * super(operation); - * } - * } - * ``` - * - * @param operation - Must contain the basic properties of `PollOperation`. + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - */ - constructor(operation) { - this.resolveOnUnsuccessful = false; - this.stopped = true; - this.pollProgressCallbacks = []; - this.operation = operation; - this.promise = new Promise((resolve, reject) => { - this.resolve = resolve; - this.reject = reject; - }); - this.promise.catch(() => { - }); + constructor(retryOptions) { + this.retryOptions = retryOptions; } /** - * Starts a loop that will break only if the poller is done - * or if the poller is stopped. + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - */ - async startPolling(pollOptions = {}) { - if (this.stopped) { - this.stopped = false; - } - while (!this.isStopped() && !this.isDone()) { - await this.poll(pollOptions); - await this.delay(); - } + create(nextPolicy, options) { + return new StorageRetryPolicy_js_1.StorageRetryPolicy(nextPolicy, options, this.retryOptions); } + }; + exports2.StorageRetryPolicyFactory = StorageRetryPolicyFactory; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/CredentialPolicy.js +var require_CredentialPolicy = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/CredentialPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.CredentialPolicy = void 0; + var RequestPolicy_js_1 = require_RequestPolicy(); + var CredentialPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { /** - * pollOnce does one polling, by calling to the update method of the underlying - * poll operation to make any relevant change effective. - * - * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * Sends out request. * - * @param options - Optional properties passed to the operation's update method. + * @param request - */ - async pollOnce(options = {}) { - if (!this.isDone()) { - this.operation = await this.operation.update({ - abortSignal: options.abortSignal, - fireProgress: this.fireProgress.bind(this) - }); - } - this.processUpdatedState(); + sendRequest(request) { + return this._nextPolicy.sendRequest(this.signRequest(request)); } /** - * fireProgress calls the functions passed in via onProgress the method of the poller. + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. * - * It loops over all of the callbacks received from onProgress, and executes them, sending them - * the current operation state. - * - * @param state - The current operation state. + * @param request - */ - fireProgress(state) { - for (const callback of this.pollProgressCallbacks) { - callback(state); + signRequest(request) { + return request; + } + }; + exports2.CredentialPolicy = CredentialPolicy; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/SharedKeyComparator.js +var require_SharedKeyComparator = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/SharedKeyComparator.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.compareHeader = compareHeader; + var table_lv0 = new Uint32Array([ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 1820, + 0, + 1823, + 1825, + 1827, + 1829, + 0, + 0, + 0, + 1837, + 2051, + 0, + 0, + 1843, + 0, + 3331, + 3354, + 3356, + 3358, + 3360, + 3362, + 3364, + 3366, + 3368, + 3370, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 3586, + 3593, + 3594, + 3610, + 3617, + 3619, + 3621, + 3628, + 3634, + 3637, + 3638, + 3656, + 3665, + 3696, + 3708, + 3710, + 3721, + 3722, + 3729, + 3737, + 3743, + 3746, + 3748, + 3750, + 3751, + 3753, + 0, + 0, + 0, + 1859, + 1860, + 1864, + 3586, + 3593, + 3594, + 3610, + 3617, + 3619, + 3621, + 3628, + 3634, + 3637, + 3638, + 3656, + 3665, + 3696, + 3708, + 3710, + 3721, + 3722, + 3729, + 3737, + 3743, + 3746, + 3748, + 3750, + 3751, + 3753, + 0, + 1868, + 0, + 1872, + 0 + ]); + var table_lv2 = new Uint32Array([ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ]); + var table_lv4 = new Uint32Array([ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 32786, + 0, + 0, + 0, + 0, + 0, + 33298, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ]); + function compareHeader(lhs, rhs) { + if (isLessThan(lhs, rhs)) + return -1; + return 1; + } + function isLessThan(lhs, rhs) { + const tables = [table_lv0, table_lv2, table_lv4]; + let curr_level = 0; + let i = 0; + let j = 0; + while (curr_level < tables.length) { + if (curr_level === tables.length - 1 && i !== j) { + return i > j; + } + const weight1 = i < lhs.length ? tables[curr_level][lhs[i].charCodeAt(0)] : 1; + const weight2 = j < rhs.length ? tables[curr_level][rhs[j].charCodeAt(0)] : 1; + if (weight1 === 1 && weight2 === 1) { + i = 0; + j = 0; + ++curr_level; + } else if (weight1 === weight2) { + ++i; + ++j; + } else if (weight1 === 0) { + ++i; + } else if (weight2 === 0) { + ++j; + } else { + return weight1 < weight2; } } + return false; + } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageSharedKeyCredentialPolicy.js +var require_StorageSharedKeyCredentialPolicy = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageSharedKeyCredentialPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageSharedKeyCredentialPolicy = void 0; + var constants_js_1 = require_constants9(); + var utils_common_js_1 = require_utils_common(); + var CredentialPolicy_js_1 = require_CredentialPolicy(); + var SharedKeyComparator_js_1 = require_SharedKeyComparator(); + var StorageSharedKeyCredentialPolicy = class extends CredentialPolicy_js_1.CredentialPolicy { /** - * Invokes the underlying operation's cancel method. + * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy */ - async cancelOnce(options = {}) { - this.operation = await this.operation.cancel(options); + factory; + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy, options, factory) { + super(nextPolicy, options); + this.factory = factory; } /** - * Returns a promise that will resolve once a single polling request finishes. - * It does this by calling the update method of the Poller's operation. - * - * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * Signs request. * - * @param options - Optional properties passed to the operation's update method. + * @param request - */ - poll(options = {}) { - if (!this.pollOncePromise) { - this.pollOncePromise = this.pollOnce(options); - const clearPollOncePromise = () => { - this.pollOncePromise = void 0; - }; - this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); + signRequest(request) { + request.headers.set(constants_js_1.HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); + if (request.body && (typeof request.body === "string" || request.body !== void 0) && request.body.length > 0) { + request.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } - return this.pollOncePromise; + const stringToSign = [ + request.method.toUpperCase(), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.DATE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.RANGE) + ].join("\n") + "\n" + this.getCanonicalizedHeadersString(request) + this.getCanonicalizedResourceString(request); + const signature = this.factory.computeHMACSHA256(stringToSign); + request.headers.set(constants_js_1.HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + return request; } - processUpdatedState() { - if (this.operation.state.error) { - this.stopped = true; - if (!this.resolveOnUnsuccessful) { - this.reject(this.operation.state.error); - throw this.operation.state.error; - } - } - if (this.operation.state.isCancelled) { - this.stopped = true; - if (!this.resolveOnUnsuccessful) { - const error2 = new PollerCancelledError("Operation was canceled"); - this.reject(error2); - throw error2; - } + /** + * Retrieve header value according to shared key sign rules. + * @see https://learn.microsoft.com/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; } - if (this.isDone() && this.resolve) { - this.resolve(this.getResult()); + if (headerName === constants_js_1.HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; } + return value; } /** - * Returns a promise that will resolve once the underlying operation is completed. - */ - async pollUntilDone(pollOptions = {}) { - if (this.stopped) { - this.startPolling(pollOptions).catch(this.reject); - } - this.processUpdatedState(); - return this.promise; - } - /** - * Invokes the provided callback after each polling is completed, - * sending the current state of the poller's operation. + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. * - * It returns a method that can be used to stop receiving updates on the given callback function. - */ - onProgress(callback) { - this.pollProgressCallbacks.push(callback); - return () => { - this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); - }; - } - /** - * Returns true if the poller has finished polling. + * @param request - */ - isDone() { - const state = this.operation.state; - return Boolean(state.isCompleted || state.isCancelled || state.error); + getCanonicalizedHeadersString(request) { + let headersArray = request.headers.headersArray().filter((value) => { + return value.name.toLowerCase().startsWith(constants_js_1.HeaderConstants.PREFIX_FOR_STORAGE); + }); + headersArray.sort((a, b) => { + return (0, SharedKeyComparator_js_1.compareHeader)(a.name.toLowerCase(), b.name.toLowerCase()); + }); + headersArray = headersArray.filter((value, index2, array) => { + if (index2 > 0 && value.name.toLowerCase() === array[index2 - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name.toLowerCase().trimRight()}:${header.value.trimLeft()} +`; + }); + return canonicalizedHeadersStringToSign; } /** - * Stops the poller from continuing to poll. + * Retrieves the webResource canonicalized resource string. + * + * @param request - */ - stopPolling() { - if (!this.stopped) { - this.stopped = true; - if (this.reject) { - this.reject(new PollerStoppedError("This poller is already stopped")); + getCanonicalizedResourceString(request) { + const path6 = (0, utils_common_js_1.getURLPath)(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${this.factory.accountName}${path6}`; + const queries = (0, utils_common_js_1.getURLQueries)(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += ` +${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } + return canonicalizedResourceString; } + }; + exports2.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/credentials/Credential.js +var require_Credential = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/credentials/Credential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Credential = void 0; + var Credential = class { /** - * Returns true if the poller is stopped. + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - */ - isStopped() { - return this.stopped; + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); } + }; + exports2.Credential = Credential; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/credentials/StorageSharedKeyCredential.js +var require_StorageSharedKeyCredential = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/credentials/StorageSharedKeyCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageSharedKeyCredential = void 0; + var node_crypto_1 = require("node:crypto"); + var StorageSharedKeyCredentialPolicy_js_1 = require_StorageSharedKeyCredentialPolicy(); + var Credential_js_1 = require_Credential(); + var StorageSharedKeyCredential = class extends Credential_js_1.Credential { /** - * Attempts to cancel the underlying operation. - * - * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. - * - * If it's called again before it finishes, it will throw an error. - * - * @param options - Optional properties passed to the operation's update method. + * Azure Storage account name; readonly. */ - cancelOperation(options = {}) { - if (!this.cancelPromise) { - this.cancelPromise = this.cancelOnce(options); - } else if (options.abortSignal) { - throw new Error("A cancel request is currently pending"); - } - return this.cancelPromise; - } + accountName; /** - * Returns the state of the operation. - * - * Even though TState will be the same type inside any of the methods of any extension of the Poller class, - * implementations of the pollers can customize what's shared with the public by writing their own - * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller - * and a public type representing a safe to share subset of the properties of the internal state. - * Their definition of getOperationState can then return their public type. - * - * Example: - * - * ```ts - * // Let's say we have our poller's operation state defined as: - * interface MyOperationState extends PollOperationState { - * privateProperty?: string; - * publicProperty?: string; - * } - * - * // To allow us to have a true separation of public and private state, we have to define another interface: - * interface PublicState extends PollOperationState { - * publicProperty?: string; - * } - * - * // Then, we define our Poller as follows: - * export class MyPoller extends Poller { - * // ... More content is needed here ... - * - * public getOperationState(): PublicState { - * const state: PublicState = this.operation.state; - * return { - * // Properties from PollOperationState - * isStarted: state.isStarted, - * isCompleted: state.isCompleted, - * isCancelled: state.isCancelled, - * error: state.error, - * result: state.result, - * - * // The only other property needed by PublicState. - * publicProperty: state.publicProperty - * } - * } - * } - * ``` - * - * You can see this in the tests of this repository, go to the file: - * `../test/utils/testPoller.ts` - * and look for the getOperationState implementation. + * Azure Storage account key; readonly. */ - getOperationState() { - return this.operation.state; + accountKey; + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName, accountKey) { + super(); + this.accountName = accountName; + this.accountKey = Buffer.from(accountKey, "base64"); } /** - * Returns the result value of the operation, - * regardless of the state of the poller. - * It can return undefined or an incomplete form of the final TResult value - * depending on the implementation. + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - */ - getResult() { - const state = this.operation.state; - return state.result; + create(nextPolicy, options) { + return new StorageSharedKeyCredentialPolicy_js_1.StorageSharedKeyCredentialPolicy(nextPolicy, options, this); } /** - * Returns a serialized version of the poller's operation - * by invoking the operation's toString method. + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - */ - toString() { - return this.operation.toString(); + computeHMACSHA256(stringToSign) { + return (0, node_crypto_1.createHmac)("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); } }; - exports2.Poller = Poller; + exports2.StorageSharedKeyCredential = StorageSharedKeyCredential; } }); -// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js -var require_lroEngine = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js"(exports2) { +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/AnonymousCredentialPolicy.js +var require_AnonymousCredentialPolicy = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/AnonymousCredentialPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.LroEngine = void 0; - var operation_js_1 = require_operation3(); - var constants_js_1 = require_constants9(); - var poller_js_1 = require_poller3(); - var operation_js_2 = require_operation(); - var LroEngine = class extends poller_js_1.Poller { - constructor(lro, options) { - const { intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState } = options || {}; - const state = resumeFrom ? (0, operation_js_2.deserializeState)(resumeFrom) : {}; - const operation = new operation_js_1.GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); - super(operation); - this.resolveOnUnsuccessful = resolveOnUnsuccessful; - this.config = { intervalInMs }; - operation.setPollerConfig(this.config); - } + exports2.AnonymousCredentialPolicy = void 0; + var CredentialPolicy_js_1 = require_CredentialPolicy(); + var AnonymousCredentialPolicy = class extends CredentialPolicy_js_1.CredentialPolicy { /** - * The method used by the poller to wait before attempting to update its operation. + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - */ - delay() { - return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); } }; - exports2.LroEngine = LroEngine; + exports2.AnonymousCredentialPolicy = AnonymousCredentialPolicy; } }); -// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js -var require_lroEngine2 = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js"(exports2) { +// ../../node_modules/@azure/storage-blob/dist/commonjs/credentials/AnonymousCredential.js +var require_AnonymousCredential = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/credentials/AnonymousCredential.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.LroEngine = void 0; - var lroEngine_js_1 = require_lroEngine(); - Object.defineProperty(exports2, "LroEngine", { enumerable: true, get: function() { - return lroEngine_js_1.LroEngine; - } }); + exports2.AnonymousCredential = void 0; + var AnonymousCredentialPolicy_js_1 = require_AnonymousCredentialPolicy(); + var Credential_js_1 = require_Credential(); + var AnonymousCredential = class extends Credential_js_1.Credential { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new AnonymousCredentialPolicy_js_1.AnonymousCredentialPolicy(nextPolicy, options); + } + }; + exports2.AnonymousCredential = AnonymousCredential; } }); -// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js -var require_pollOperation = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js"(exports2) { +// ../../node_modules/@azure/storage-common/dist/commonjs/BuffersStream.js +var require_BuffersStream = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/BuffersStream.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BuffersStream = void 0; + var node_stream_1 = require("node:stream"); + var BuffersStream = class extends node_stream_1.Readable { + buffers; + byteLength; + /** + * The offset of data to be read in the current buffer. + */ + byteOffsetInCurrentBuffer; + /** + * The index of buffer to be read in the array of buffers. + */ + bufferIndex; + /** + * The total length of data already read. + */ + pushedBytesLength; + /** + * Creates an instance of BuffersStream that will emit the data + * contained in the array of buffers. + * + * @param buffers - Array of buffers containing the data + * @param byteLength - The total length of data contained in the buffers + */ + constructor(buffers, byteLength, options) { + super(options); + this.buffers = buffers; + this.byteLength = byteLength; + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex = 0; + this.pushedBytesLength = 0; + let buffersLength = 0; + for (const buf of this.buffers) { + buffersLength += buf.byteLength; + } + if (buffersLength < this.byteLength) { + throw new Error("Data size shouldn't be larger than the total length of buffers."); + } + } + /** + * Internal _read() that will be called when the stream wants to pull more data in. + * + * @param size - Optional. The size of data to be read + */ + _read(size) { + if (this.pushedBytesLength >= this.byteLength) { + this.push(null); + } + if (!size) { + size = this.readableHighWaterMark; + } + const outBuffers = []; + let i = 0; + while (i < size && this.pushedBytesLength < this.byteLength) { + const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + if (remaining > size - i) { + const end2 = this.byteOffsetInCurrentBuffer + size - i; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end2)); + this.pushedBytesLength += size - i; + this.byteOffsetInCurrentBuffer = end2; + i = size; + break; + } else { + const end2 = this.byteOffsetInCurrentBuffer + remaining; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end2)); + if (remaining === remainingCapacityInThisBuffer) { + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex++; + } else { + this.byteOffsetInCurrentBuffer = end2; + } + this.pushedBytesLength += remaining; + i += remaining; + } + } + if (outBuffers.length > 1) { + this.push(Buffer.concat(outBuffers)); + } else if (outBuffers.length === 1) { + this.push(outBuffers[0]); + } + } + }; + exports2.BuffersStream = BuffersStream; } }); -// ../../node_modules/@azure/core-lro/dist/commonjs/index.js -var require_commonjs11 = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/index.js"(exports2) { +// ../../node_modules/@azure/storage-common/dist/commonjs/PooledBuffer.js +var require_PooledBuffer = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/PooledBuffer.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createHttpPoller = void 0; + exports2.PooledBuffer = void 0; var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var poller_js_1 = require_poller2(); - Object.defineProperty(exports2, "createHttpPoller", { enumerable: true, get: function() { - return poller_js_1.createHttpPoller; - } }); - tslib_1.__exportStar(require_lroEngine2(), exports2); - tslib_1.__exportStar(require_poller3(), exports2); - tslib_1.__exportStar(require_pollOperation(), exports2); + var BuffersStream_js_1 = require_BuffersStream(); + var node_buffer_1 = tslib_1.__importDefault(require("node:buffer")); + var maxBufferLength = node_buffer_1.default.constants.MAX_LENGTH; + var PooledBuffer = class { + /** + * Internal buffers used to keep the data. + * Each buffer has a length of the maxBufferLength except last one. + */ + buffers = []; + /** + * The total size of internal buffers. + */ + capacity; + /** + * The total size of data contained in internal buffers. + */ + _size; + /** + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; + } + constructor(capacity, buffers, totalLength) { + this.capacity = capacity; + this._size = 0; + const bufferNum = Math.ceil(capacity / maxBufferLength); + for (let i = 0; i < bufferNum; i++) { + let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + if (len === 0) { + len = maxBufferLength; + } + this.buffers.push(Buffer.allocUnsafe(len)); + } + if (buffers) { + this.fill(buffers, totalLength); + } + } + /** + * Fill the internal buffers with data in the input buffers serially + * with respect to the total length and the total capacity of the internal buffers. + * Data copied will be shift out of the input buffers. + * + * @param buffers - Input buffers containing the data to be filled in the pooled buffer + * @param totalLength - Total length of the data to be filled in. + * + */ + fill(buffers, totalLength) { + this._size = Math.min(this.capacity, totalLength); + let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + while (totalCopiedNum < this._size) { + const source = buffers[i]; + const target = this.buffers[j]; + const copiedNum = source.copy(target, targetOffset, sourceOffset); + totalCopiedNum += copiedNum; + sourceOffset += copiedNum; + targetOffset += copiedNum; + if (sourceOffset === source.length) { + i++; + sourceOffset = 0; + } + if (targetOffset === target.length) { + j++; + targetOffset = 0; + } + } + buffers.splice(0, i); + if (buffers.length > 0) { + buffers[0] = buffers[0].slice(sourceOffset); + } + } + /** + * Get the readable stream assembled from all the data in the internal buffers. + * + */ + getReadableStream() { + return new BuffersStream_js_1.BuffersStream(this.buffers, this.size); + } + }; + exports2.PooledBuffer = PooledBuffer; } }); -// ../../node_modules/@azure/storage-blob/dist/index.js -var require_dist4 = __commonJS({ - "../../node_modules/@azure/storage-blob/dist/index.js"(exports2) { +// ../../node_modules/@azure/storage-common/dist/commonjs/BufferScheduler.js +var require_BufferScheduler = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/BufferScheduler.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - var coreRestPipeline = require_commonjs6(); - var tslib = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var coreAuth = require_commonjs7(); - var coreUtil = require_commonjs4(); - var coreHttpCompat = require_commonjs9(); - var coreClient = require_commonjs8(); - var coreXml = require_commonjs10(); - var logger$1 = require_commonjs2(); - var abortController = require_commonjs3(); - var crypto2 = require("crypto"); - var coreTracing = require_commonjs5(); - var stream2 = require("stream"); - var coreLro = require_commonjs11(); - var events = require("events"); - var fs5 = require("fs"); - var util = require("util"); - var buffer = require("buffer"); - function _interopNamespaceDefault(e) { - var n = /* @__PURE__ */ Object.create(null); - if (e) { - Object.keys(e).forEach(function(k) { - if (k !== "default") { - var d = Object.getOwnPropertyDescriptor(e, k); - Object.defineProperty(n, k, d.get ? d : { - enumerable: true, - get: function() { - return e[k]; - } - }); - } - }); - } - n.default = e; - return Object.freeze(n); - } - var coreHttpCompat__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreHttpCompat); - var coreClient__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreClient); - var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs5); - var util__namespace = /* @__PURE__ */ _interopNamespaceDefault(util); - var logger = logger$1.createClientLogger("storage-blob"); - var BaseRequestPolicy = class { + exports2.BufferScheduler = void 0; + var events_1 = require("events"); + var PooledBuffer_js_1 = require_PooledBuffer(); + var BufferScheduler = class { /** - * The main method to implement that manipulates a request/response. + * Size of buffers in incoming and outgoing queues. This class will try to align + * data read from Readable stream into buffer chunks with bufferSize defined. */ - constructor(_nextPolicy, _options) { - this._nextPolicy = _nextPolicy; - this._options = _options; - } + bufferSize; /** - * Get whether or not a log with the provided log level should be logged. - * @param logLevel - The log level of the log that will be logged. - * @returns Whether or not a log with the provided log level should be logged. + * How many buffers can be created or maintained. */ - shouldLog(logLevel) { - return this._options.shouldLog(logLevel); - } + maxBuffers; /** - * Attempt to log the provided message to the provided logger. If no logger was provided or if - * the log level does not meat the logger's threshold, then nothing will be logged. - * @param logLevel - The log level of this log. - * @param message - The message of this log. + * A Node.js Readable stream. */ - log(logLevel, message) { - this._options.log(logLevel, message); - } - }; - var SDK_VERSION = "12.27.0"; - var SERVICE_VERSION = "2025-05-05"; - var BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; - var BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4e3 * 1024 * 1024; - var BLOCK_BLOB_MAX_BLOCKS = 5e4; - var DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; - var DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; - var DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; - var REQUEST_TIMEOUT = 100 * 1e3; - var StorageOAuthScopes = "https://storage.azure.com/.default"; - var URLConstants = { - Parameters: { - FORCE_BROWSER_NO_CACHE: "_", - SNAPSHOT: "snapshot", - VERSIONID: "versionid", - TIMEOUT: "timeout" + readable; + /** + * OutgoingHandler is an async function triggered by BufferScheduler when there + * are available buffers in outgoing array. + */ + outgoingHandler; + /** + * An internal event emitter. + */ + emitter = new events_1.EventEmitter(); + /** + * Concurrency of executing outgoingHandlers. (0 lesser than concurrency lesser than or equal to maxBuffers) + */ + concurrency; + /** + * An internal offset marker to track data offset in bytes of next outgoingHandler. + */ + offset = 0; + /** + * An internal marker to track whether stream is end. + */ + isStreamEnd = false; + /** + * An internal marker to track whether stream or outgoingHandler returns error. + */ + isError = false; + /** + * How many handlers are executing. + */ + executingOutgoingHandlers = 0; + /** + * Encoding of the input Readable stream which has string data type instead of Buffer. + */ + encoding; + /** + * How many buffers have been allocated. + */ + numBuffers = 0; + /** + * Because this class doesn't know how much data every time stream pops, which + * is defined by highWaterMarker of the stream. So BufferScheduler will cache + * data received from the stream, when data in unresolvedDataArray exceeds the + * blockSize defined, it will try to concat a blockSize of buffer, fill into available + * buffers from incoming and push to outgoing array. + */ + unresolvedDataArray = []; + /** + * How much data consisted in unresolvedDataArray. + */ + unresolvedLength = 0; + /** + * The array includes all the available buffers can be used to fill data from stream. + */ + incoming = []; + /** + * The array (queue) includes all the buffers filled from stream data. + */ + outgoing = []; + /** + * Creates an instance of BufferScheduler. + * + * @param readable - A Node.js Readable stream + * @param bufferSize - Buffer size of every maintained buffer + * @param maxBuffers - How many buffers can be allocated + * @param outgoingHandler - An async function scheduled to be + * triggered when a buffer fully filled + * with stream data + * @param concurrency - Concurrency of executing outgoingHandlers (>0) + * @param encoding - [Optional] Encoding of Readable stream when it's a string stream + */ + constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + if (bufferSize <= 0) { + throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); + } + if (maxBuffers <= 0) { + throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); + } + if (concurrency <= 0) { + throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); + } + this.bufferSize = bufferSize; + this.maxBuffers = maxBuffers; + this.readable = readable; + this.outgoingHandler = outgoingHandler; + this.concurrency = concurrency; + this.encoding = encoding; } - }; - var HTTPURLConnection = { - HTTP_ACCEPTED: 202 - }; - var HeaderConstants = { - AUTHORIZATION: "Authorization", - CONTENT_ENCODING: "Content-Encoding", - CONTENT_ID: "Content-ID", - CONTENT_LANGUAGE: "Content-Language", - CONTENT_LENGTH: "Content-Length", - CONTENT_MD5: "Content-Md5", - CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", - CONTENT_TYPE: "Content-Type", - COOKIE: "Cookie", - DATE: "date", - IF_MATCH: "if-match", - IF_MODIFIED_SINCE: "if-modified-since", - IF_NONE_MATCH: "if-none-match", - IF_UNMODIFIED_SINCE: "if-unmodified-since", - PREFIX_FOR_STORAGE: "x-ms-", - RANGE: "Range", - X_MS_DATE: "x-ms-date", - X_MS_ERROR_CODE: "x-ms-error-code", - X_MS_VERSION: "x-ms-version" - }; - var ETagNone = ""; - var ETagAny = "*"; - var SIZE_1_MB = 1 * 1024 * 1024; - var BATCH_MAX_REQUEST = 256; - var BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; - var HTTP_LINE_ENDING = "\r\n"; - var HTTP_VERSION_1_1 = "HTTP/1.1"; - var EncryptionAlgorithmAES25 = "AES256"; - var DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; - var StorageBlobLoggingAllowedHeaderNames = [ - "Access-Control-Allow-Origin", - "Cache-Control", - "Content-Length", - "Content-Type", - "Date", - "Request-Id", - "traceparent", - "Transfer-Encoding", - "User-Agent", - "x-ms-client-request-id", - "x-ms-date", - "x-ms-error-code", - "x-ms-request-id", - "x-ms-return-client-request-id", - "x-ms-version", - "Accept-Ranges", - "Content-Disposition", - "Content-Encoding", - "Content-Language", - "Content-MD5", - "Content-Range", - "ETag", - "Last-Modified", - "Server", - "Vary", - "x-ms-content-crc64", - "x-ms-copy-action", - "x-ms-copy-completion-time", - "x-ms-copy-id", - "x-ms-copy-progress", - "x-ms-copy-status", - "x-ms-has-immutability-policy", - "x-ms-has-legal-hold", - "x-ms-lease-state", - "x-ms-lease-status", - "x-ms-range", - "x-ms-request-server-encrypted", - "x-ms-server-encrypted", - "x-ms-snapshot", - "x-ms-source-range", - "If-Match", - "If-Modified-Since", - "If-None-Match", - "If-Unmodified-Since", - "x-ms-access-tier", - "x-ms-access-tier-change-time", - "x-ms-access-tier-inferred", - "x-ms-account-kind", - "x-ms-archive-status", - "x-ms-blob-append-offset", - "x-ms-blob-cache-control", - "x-ms-blob-committed-block-count", - "x-ms-blob-condition-appendpos", - "x-ms-blob-condition-maxsize", - "x-ms-blob-content-disposition", - "x-ms-blob-content-encoding", - "x-ms-blob-content-language", - "x-ms-blob-content-length", - "x-ms-blob-content-md5", - "x-ms-blob-content-type", - "x-ms-blob-public-access", - "x-ms-blob-sequence-number", - "x-ms-blob-type", - "x-ms-copy-destination-snapshot", - "x-ms-creation-time", - "x-ms-default-encryption-scope", - "x-ms-delete-snapshots", - "x-ms-delete-type-permanent", - "x-ms-deny-encryption-scope-override", - "x-ms-encryption-algorithm", - "x-ms-if-sequence-number-eq", - "x-ms-if-sequence-number-le", - "x-ms-if-sequence-number-lt", - "x-ms-incremental-copy", - "x-ms-lease-action", - "x-ms-lease-break-period", - "x-ms-lease-duration", - "x-ms-lease-id", - "x-ms-lease-time", - "x-ms-page-write", - "x-ms-proposed-lease-id", - "x-ms-range-get-content-md5", - "x-ms-rehydrate-priority", - "x-ms-sequence-number-action", - "x-ms-sku-name", - "x-ms-source-content-md5", - "x-ms-source-if-match", - "x-ms-source-if-modified-since", - "x-ms-source-if-none-match", - "x-ms-source-if-unmodified-since", - "x-ms-tag-count", - "x-ms-encryption-key-sha256", - "x-ms-copy-source-error-code", - "x-ms-copy-source-status-code", - "x-ms-if-tags", - "x-ms-source-if-tags" - ]; - var StorageBlobLoggingAllowedQueryParameters = [ - "comp", - "maxresults", - "rscc", - "rscd", - "rsce", - "rscl", - "rsct", - "se", - "si", - "sip", - "sp", - "spr", - "sr", - "srt", - "ss", - "st", - "sv", - "include", - "marker", - "prefix", - "copyid", - "restype", - "blockid", - "blocklisttype", - "delimiter", - "prevsnapshot", - "ske", - "skoid", - "sks", - "skt", - "sktid", - "skv", - "snapshot" - ]; - var BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; - var BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; - var PathStylePorts = [ + /** + * Start the scheduler, will return error when stream of any of the outgoingHandlers + * returns error. + * + */ + async do() { + return new Promise((resolve, reject) => { + this.readable.on("data", (data2) => { + data2 = typeof data2 === "string" ? Buffer.from(data2, this.encoding) : data2; + this.appendUnresolvedData(data2); + if (!this.resolveData()) { + this.readable.pause(); + } + }); + this.readable.on("error", (err) => { + this.emitter.emit("error", err); + }); + this.readable.on("end", () => { + this.isStreamEnd = true; + this.emitter.emit("checkEnd"); + }); + this.emitter.on("error", (err) => { + this.isError = true; + this.readable.pause(); + reject(err); + }); + this.emitter.on("checkEnd", () => { + if (this.outgoing.length > 0) { + this.triggerOutgoingHandlers(); + return; + } + if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { + if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { + const buffer = this.shiftBufferFromUnresolvedDataArray(); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset).then(resolve).catch(reject); + } else if (this.unresolvedLength >= this.bufferSize) { + return; + } else { + resolve(); + } + } + }); + }); + } + /** + * Insert a new data into unresolved array. + * + * @param data - + */ + appendUnresolvedData(data2) { + this.unresolvedDataArray.push(data2); + this.unresolvedLength += data2.length; + } + /** + * Try to shift a buffer with size in blockSize. The buffer returned may be less + * than blockSize when data in unresolvedDataArray is less than bufferSize. + * + */ + shiftBufferFromUnresolvedDataArray(buffer) { + if (!buffer) { + buffer = new PooledBuffer_js_1.PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); + } else { + buffer.fill(this.unresolvedDataArray, this.unresolvedLength); + } + this.unresolvedLength -= buffer.size; + return buffer; + } + /** + * Resolve data in unresolvedDataArray. For every buffer with size in blockSize + * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, + * then push it into outgoing to be handled by outgoing handler. + * + * Return false when available buffers in incoming are not enough, else true. + * + * @returns Return false when buffers in incoming are not enough, else true. + */ + resolveData() { + while (this.unresolvedLength >= this.bufferSize) { + let buffer; + if (this.incoming.length > 0) { + buffer = this.incoming.shift(); + this.shiftBufferFromUnresolvedDataArray(buffer); + } else { + if (this.numBuffers < this.maxBuffers) { + buffer = this.shiftBufferFromUnresolvedDataArray(); + this.numBuffers++; + } else { + return false; + } + } + this.outgoing.push(buffer); + this.triggerOutgoingHandlers(); + } + return true; + } + /** + * Try to trigger a outgoing handler for every buffer in outgoing. Stop when + * concurrency reaches. + */ + async triggerOutgoingHandlers() { + let buffer; + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); + } + } while (buffer); + } + /** + * Trigger a outgoing handler for a buffer shifted from outgoing. + * + * @param buffer - + */ + async triggerOutgoingHandler(buffer) { + const bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + try { + await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); + } catch (err) { + this.emitter.emit("error", err); + return; + } + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); + } + /** + * Return buffer used by outgoing handler into incoming. + * + * @param buffer - + */ + reuseBuffer(buffer) { + this.incoming.push(buffer); + if (!this.isError && this.resolveData() && !this.isStreamEnd) { + this.readable.resume(); + } + } + }; + exports2.BufferScheduler = BufferScheduler; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/cache.js +var require_cache2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/cache.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getCachedDefaultHttpClient = getCachedDefaultHttpClient; + var core_rest_pipeline_1 = require_commonjs6(); + var _defaultHttpClient; + function getCachedDefaultHttpClient() { + if (!_defaultHttpClient) { + _defaultHttpClient = (0, core_rest_pipeline_1.createDefaultHttpClient)(); + } + return _defaultHttpClient; + } + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/RequestPolicy.js +var require_RequestPolicy2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/RequestPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BaseRequestPolicy = void 0; + var BaseRequestPolicy = class { + _nextPolicy; + _options; + /** + * The main method to implement that manipulates a request/response. + */ + constructor(_nextPolicy, _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + this._options.log(logLevel, message); + } + }; + exports2.BaseRequestPolicy = BaseRequestPolicy; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/utils/constants.js +var require_constants10 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/utils/constants.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.PathStylePorts = exports2.DevelopmentConnectionString = exports2.HeaderConstants = exports2.URLConstants = exports2.SDK_VERSION = void 0; + exports2.SDK_VERSION = "1.0.0"; + exports2.URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout" + } + }; + exports2.HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", + X_MS_CopySourceErrorCode: "x-ms-copy-source-error-code" + }; + exports2.DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; + exports2.PathStylePorts = [ "10000", "10001", "10002", @@ -34716,8 +35607,46 @@ var require_dist4 = __commonJS({ "11103", "11104" ]; - function escapeURLPath(url2) { - const urlParsed = new URL(url2); + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/utils/utils.common.js +var require_utils_common2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/utils/utils.common.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.escapeURLPath = escapeURLPath; + exports2.getValueInConnString = getValueInConnString; + exports2.extractConnectionStringParts = extractConnectionStringParts; + exports2.appendToURLPath = appendToURLPath; + exports2.setURLParameter = setURLParameter; + exports2.getURLParameter = getURLParameter; + exports2.setURLHost = setURLHost; + exports2.getURLPath = getURLPath; + exports2.getURLScheme = getURLScheme; + exports2.getURLPathAndQuery = getURLPathAndQuery; + exports2.getURLQueries = getURLQueries; + exports2.appendToURLQuery = appendToURLQuery; + exports2.truncatedISO8061Date = truncatedISO8061Date; + exports2.base64encode = base64encode; + exports2.base64decode = base64decode; + exports2.generateBlockID = generateBlockID; + exports2.delay = delay; + exports2.padStart = padStart; + exports2.sanitizeURL = sanitizeURL; + exports2.sanitizeHeaders = sanitizeHeaders; + exports2.iEqual = iEqual; + exports2.getAccountNameFromUrl = getAccountNameFromUrl; + exports2.isIpEndpointStyle = isIpEndpointStyle; + exports2.attachCredential = attachCredential; + exports2.httpAuthorizationToString = httpAuthorizationToString; + exports2.EscapePath = EscapePath; + exports2.assertResponse = assertResponse; + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants10(); + function escapeURLPath(url) { + const urlParsed = new URL(url); let path6 = urlParsed.pathname; path6 = path6 || "/"; path6 = escape2(path6); @@ -34749,7 +35678,7 @@ var require_dist4 = __commonJS({ let proxyUri = ""; if (connectionString.startsWith("UseDevelopmentStorage=true")) { proxyUri = getProxyUriFromDevConnString(connectionString); - connectionString = DevelopmentConnectionString; + connectionString = constants_js_1.DevelopmentConnectionString; } let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; @@ -34804,15 +35733,15 @@ var require_dist4 = __commonJS({ function escape2(text3) { return encodeURIComponent(text3).replace(/%2F/g, "/").replace(/'/g, "%27").replace(/\+/g, "%20").replace(/%25/g, "%"); } - function appendToURLPath(url2, name) { - const urlParsed = new URL(url2); + function appendToURLPath(url, name) { + const urlParsed = new URL(url); let path6 = urlParsed.pathname; path6 = path6 ? path6.endsWith("/") ? `${path6}${name}` : `${path6}/${name}` : name; urlParsed.pathname = path6; return urlParsed.toString(); } - function setURLParameter(url2, name, value) { - const urlParsed = new URL(url2); + function setURLParameter(url, name, value) { + const urlParsed = new URL(url); const encodedName = encodeURIComponent(name); const encodedValue = value ? encodeURIComponent(value) : void 0; const searchString = urlParsed.search === "" ? "?" : urlParsed.search; @@ -34831,34 +35760,33 @@ var require_dist4 = __commonJS({ urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; return urlParsed.toString(); } - function getURLParameter(url2, name) { - var _a4; - const urlParsed = new URL(url2); - return (_a4 = urlParsed.searchParams.get(name)) !== null && _a4 !== void 0 ? _a4 : void 0; + function getURLParameter(url, name) { + const urlParsed = new URL(url); + return urlParsed.searchParams.get(name) ?? void 0; } - function setURLHost(url2, host) { - const urlParsed = new URL(url2); + function setURLHost(url, host) { + const urlParsed = new URL(url); urlParsed.hostname = host; return urlParsed.toString(); } - function getURLPath(url2) { + function getURLPath(url) { try { - const urlParsed = new URL(url2); + const urlParsed = new URL(url); return urlParsed.pathname; } catch (e) { return void 0; } } - function getURLScheme(url2) { + function getURLScheme(url) { try { - const urlParsed = new URL(url2); + const urlParsed = new URL(url); return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol; } catch (e) { return void 0; } } - function getURLPathAndQuery(url2) { - const urlParsed = new URL(url2); + function getURLPathAndQuery(url) { + const urlParsed = new URL(url); const pathString = urlParsed.pathname; if (!pathString) { throw new RangeError("Invalid url without valid path."); @@ -34870,8 +35798,8 @@ var require_dist4 = __commonJS({ } return `${pathString}${queryString}`; } - function getURLQueries(url2) { - let queryString = new URL(url2).search; + function getURLQueries(url) { + let queryString = new URL(url).search; if (!queryString) { return {}; } @@ -34892,8 +35820,8 @@ var require_dist4 = __commonJS({ } return queries; } - function appendToURLQuery(url2, queryParts) { - const urlParsed = new URL(url2); + function appendToURLQuery(url, queryParts) { + const urlParsed = new URL(url); let query = urlParsed.search; if (query) { query += "&" + queryParts; @@ -34908,7 +35836,10 @@ var require_dist4 = __commonJS({ return withMilliseconds ? dateString.substring(0, dateString.length - 1) + "0000Z" : dateString.substring(0, dateString.length - 5) + "Z"; } function base64encode(content) { - return !coreUtil.isNode ? btoa(content) : Buffer.from(content).toString("base64"); + return !core_util_1.isNodeLike ? btoa(content) : Buffer.from(content).toString("base64"); + } + function base64decode(encodedString) { + return !core_util_1.isNodeLike ? atob(encodedString) : Buffer.from(encodedString, "base64").toString(); } function generateBlockID(blockIDPrefix, blockIndex) { const maxSourceStringLength = 48; @@ -34956,11 +35887,31 @@ var require_dist4 = __commonJS({ return padString.slice(0, targetLength) + currentString; } } + function sanitizeURL(url) { + let safeURL = url; + if (getURLParameter(safeURL, constants_js_1.URLConstants.Parameters.SIGNATURE)) { + safeURL = setURLParameter(safeURL, constants_js_1.URLConstants.Parameters.SIGNATURE, "*****"); + } + return safeURL; + } + function sanitizeHeaders(originalHeader) { + const headers = (0, core_rest_pipeline_1.createHttpHeaders)(); + for (const [name, value] of originalHeader) { + if (name.toLowerCase() === constants_js_1.HeaderConstants.AUTHORIZATION.toLowerCase()) { + headers.set(name, "*****"); + } else if (name.toLowerCase() === constants_js_1.HeaderConstants.X_MS_COPY_SOURCE) { + headers.set(name, sanitizeURL(value)); + } else { + headers.set(name, value); + } + } + return headers; + } function iEqual(str1, str2) { return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); } - function getAccountNameFromUrl(url2) { - const parsedUrl = new URL(url2); + function getAccountNameFromUrl(url) { + const parsedUrl = new URL(url); let accountName; try { if (parsedUrl.hostname.split(".")[1] === "blob") { @@ -34977,197 +35928,15 @@ var require_dist4 = __commonJS({ } function isIpEndpointStyle(parsedUrl) { const host = parsedUrl.host; - return /^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || Boolean(parsedUrl.port) && PathStylePorts.includes(parsedUrl.port); - } - function toBlobTagsString(tags2) { - if (tags2 === void 0) { - return void 0; - } - const tagPairs = []; - for (const key in tags2) { - if (Object.prototype.hasOwnProperty.call(tags2, key)) { - const value = tags2[key]; - tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); - } - } - return tagPairs.join("&"); - } - function toBlobTags(tags2) { - if (tags2 === void 0) { - return void 0; - } - const res = { - blobTagSet: [] - }; - for (const key in tags2) { - if (Object.prototype.hasOwnProperty.call(tags2, key)) { - const value = tags2[key]; - res.blobTagSet.push({ - key, - value - }); - } - } - return res; - } - function toTags(tags2) { - if (tags2 === void 0) { - return void 0; - } - const res = {}; - for (const blobTag of tags2.blobTagSet) { - res[blobTag.key] = blobTag.value; - } - return res; - } - function toQuerySerialization(textConfiguration) { - if (textConfiguration === void 0) { - return void 0; - } - switch (textConfiguration.kind) { - case "csv": - return { - format: { - type: "delimited", - delimitedTextConfiguration: { - columnSeparator: textConfiguration.columnSeparator || ",", - fieldQuote: textConfiguration.fieldQuote || "", - recordSeparator: textConfiguration.recordSeparator, - escapeChar: textConfiguration.escapeCharacter || "", - headersPresent: textConfiguration.hasHeaders || false - } - } - }; - case "json": - return { - format: { - type: "json", - jsonTextConfiguration: { - recordSeparator: textConfiguration.recordSeparator - } - } - }; - case "arrow": - return { - format: { - type: "arrow", - arrowConfiguration: { - schema: textConfiguration.schema - } - } - }; - case "parquet": - return { - format: { - type: "parquet" - } - }; - default: - throw Error("Invalid BlobQueryTextConfiguration."); - } + return /^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || Boolean(parsedUrl.port) && constants_js_1.PathStylePorts.includes(parsedUrl.port); } - function parseObjectReplicationRecord(objectReplicationRecord) { - if (!objectReplicationRecord) { - return void 0; - } - if ("policy-id" in objectReplicationRecord) { - return void 0; - } - const orProperties = []; - for (const key in objectReplicationRecord) { - const ids = key.split("_"); - const policyPrefix = "or-"; - if (ids[0].startsWith(policyPrefix)) { - ids[0] = ids[0].substring(policyPrefix.length); - } - const rule = { - ruleId: ids[1], - replicationStatus: objectReplicationRecord[key] - }; - const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); - if (policyIndex > -1) { - orProperties[policyIndex].rules.push(rule); - } else { - orProperties.push({ - policyId: ids[0], - rules: [rule] - }); - } - } - return orProperties; + function attachCredential(thing, credential) { + thing.credential = credential; + return thing; } function httpAuthorizationToString(httpAuthorization) { return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : void 0; } - function BlobNameToString(name) { - if (name.encoded) { - return decodeURIComponent(name.content); - } else { - return name.content; - } - } - function ConvertInternalResponseOfListBlobFlat(internalResponse) { - return Object.assign(Object.assign({}, internalResponse), { segment: { - blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); - return blobItem; - }) - } }); - } - function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { - var _a4; - return Object.assign(Object.assign({}, internalResponse), { segment: { - blobPrefixes: (_a4 = internalResponse.segment.blobPrefixes) === null || _a4 === void 0 ? void 0 : _a4.map((blobPrefixInternal) => { - const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); - return blobPrefix; - }), - blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); - return blobItem; - }) - } }); - } - function* ExtractPageRangeInfoItems(getPageRangesSegment) { - let pageRange = []; - let clearRange = []; - if (getPageRangesSegment.pageRange) - pageRange = getPageRangesSegment.pageRange; - if (getPageRangesSegment.clearRange) - clearRange = getPageRangesSegment.clearRange; - let pageRangeIndex = 0; - let clearRangeIndex = 0; - while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { - if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { - yield { - start: pageRange[pageRangeIndex].start, - end: pageRange[pageRangeIndex].end, - isClear: false - }; - ++pageRangeIndex; - } else { - yield { - start: clearRange[clearRangeIndex].start, - end: clearRange[clearRangeIndex].end, - isClear: true - }; - ++clearRangeIndex; - } - } - for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { - yield { - start: pageRange[pageRangeIndex].start, - end: pageRange[pageRangeIndex].end, - isClear: false - }; - } - for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { - yield { - start: clearRange[clearRangeIndex].start, - end: clearRange[clearRangeIndex].end, - isClear: true - }; - } - } function EscapePath(blobName) { const split = blobName.split("/"); for (let i = 0; i < split.length; i++) { @@ -35181,178 +35950,84 @@ var require_dist4 = __commonJS({ } throw new TypeError(`Unexpected response object ${response}`); } - exports2.StorageRetryPolicyType = void 0; - (function(StorageRetryPolicyType2) { - StorageRetryPolicyType2[StorageRetryPolicyType2["EXPONENTIAL"] = 0] = "EXPONENTIAL"; - StorageRetryPolicyType2[StorageRetryPolicyType2["FIXED"] = 1] = "FIXED"; - })(exports2.StorageRetryPolicyType || (exports2.StorageRetryPolicyType = {})); - var DEFAULT_RETRY_OPTIONS$1 = { - maxRetryDelayInMs: 120 * 1e3, - maxTries: 4, - retryDelayInMs: 4 * 1e3, - retryPolicyType: exports2.StorageRetryPolicyType.EXPONENTIAL, - secondaryHost: "", - tryTimeoutInMs: void 0 - // Use server side default timeout strategy - }; - var RETRY_ABORT_ERROR$1 = new abortController.AbortError("The operation was aborted."); - var StorageRetryPolicy = class extends BaseRequestPolicy { + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageBrowserPolicy.js +var require_StorageBrowserPolicy = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageBrowserPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageBrowserPolicy = void 0; + var RequestPolicy_js_1 = require_RequestPolicy2(); + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants10(); + var utils_common_js_1 = require_utils_common2(); + var StorageBrowserPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { /** - * Creates an instance of RetryPolicy. - * + * Creates an instance of StorageBrowserPolicy. * @param nextPolicy - * @param options - - * @param retryOptions - */ - constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS$1) { + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { super(nextPolicy, options); - this.retryOptions = { - retryPolicyType: retryOptions.retryPolicyType ? retryOptions.retryPolicyType : DEFAULT_RETRY_OPTIONS$1.retryPolicyType, - maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 ? Math.floor(retryOptions.maxTries) : DEFAULT_RETRY_OPTIONS$1.maxTries, - tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 ? retryOptions.tryTimeoutInMs : DEFAULT_RETRY_OPTIONS$1.tryTimeoutInMs, - retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs) : DEFAULT_RETRY_OPTIONS$1.retryDelayInMs, - maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs, - secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost : DEFAULT_RETRY_OPTIONS$1.secondaryHost - }; } /** - * Sends request. + * Sends out request. * * @param request - */ async sendRequest(request) { - return this.attemptSendRequest(request, false, 1); - } - /** - * Decide and perform next retry. Won't mutate request parameter. - * - * @param request - - * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then - * the resource was not found. This may be due to replication delay. So, in this - * case, we'll never try the secondary again for this operation. - * @param attempt - How many retries has been attempted to performed, starting from 1, which includes - * the attempt will be performed by this method call. - */ - async attemptSendRequest(request, secondaryHas404, attempt) { - const newRequest = request.clone(); - const isPrimaryRetry = secondaryHas404 || !this.retryOptions.secondaryHost || !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || attempt % 2 === 1; - if (!isPrimaryRetry) { - newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); - } - if (this.retryOptions.tryTimeoutInMs) { - newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1e3).toString()); - } - let response; - try { - logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); - response = await this._nextPolicy.sendRequest(newRequest); - if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { - return response; - } - secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; - } catch (err) { - logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); - if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { - throw err; - } - } - await this.delay(isPrimaryRetry, attempt, request.abortSignal); - return this.attemptSendRequest(request, secondaryHas404, ++attempt); - } - /** - * Decide whether to retry according to last HTTP response and retry counters. - * - * @param isPrimaryRetry - - * @param attempt - - * @param response - - * @param err - - */ - shouldRetry(isPrimaryRetry, attempt, response, err) { - if (attempt >= this.retryOptions.maxTries) { - logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions.maxTries}, no further try.`); - return false; - } - const retriableErrors2 = [ - "ETIMEDOUT", - "ESOCKETTIMEDOUT", - "ECONNREFUSED", - "ECONNRESET", - "ENOENT", - "ENOTFOUND", - "TIMEOUT", - "EPIPE", - "REQUEST_SEND_ERROR" - // For default xhr based http client provided in ms-rest-js - ]; - if (err) { - for (const retriableError of retriableErrors2) { - if (err.name.toUpperCase().includes(retriableError) || err.message.toUpperCase().includes(retriableError) || err.code && err.code.toString().toUpperCase() === retriableError) { - logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); - return true; - } - } - } - if (response || err) { - const statusCode = response ? response.status : err ? err.statusCode : 0; - if (!isPrimaryRetry && statusCode === 404) { - logger.info(`RetryPolicy: Secondary access with 404, will retry.`); - return true; - } - if (statusCode === 503 || statusCode === 500) { - logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); - return true; - } - } - if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { - logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); - return true; + if (core_util_1.isNodeLike) { + return this._nextPolicy.sendRequest(request); } - return false; - } - /** - * Delay a calculated time between retries. - * - * @param isPrimaryRetry - - * @param attempt - - * @param abortSignal - - */ - async delay(isPrimaryRetry, attempt, abortSignal) { - let delayTimeInMs = 0; - if (isPrimaryRetry) { - switch (this.retryOptions.retryPolicyType) { - case exports2.StorageRetryPolicyType.EXPONENTIAL: - delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); - break; - case exports2.StorageRetryPolicyType.FIXED: - delayTimeInMs = this.retryOptions.retryDelayInMs; - break; - } - } else { - delayTimeInMs = Math.random() * 1e3; + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = (0, utils_common_js_1.setURLParameter)(request.url, constants_js_1.URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); } - logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); - return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR$1); + request.headers.remove(constants_js_1.HeaderConstants.COOKIE); + request.headers.remove(constants_js_1.HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); } }; - var StorageRetryPolicyFactory = class { - /** - * Creates an instance of StorageRetryPolicyFactory. - * @param retryOptions - - */ - constructor(retryOptions) { - this.retryOptions = retryOptions; - } + exports2.StorageBrowserPolicy = StorageBrowserPolicy; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/StorageBrowserPolicyFactory.js +var require_StorageBrowserPolicyFactory = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/StorageBrowserPolicyFactory.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageBrowserPolicyFactory = exports2.StorageBrowserPolicy = void 0; + var StorageBrowserPolicy_js_1 = require_StorageBrowserPolicy(); + Object.defineProperty(exports2, "StorageBrowserPolicy", { enumerable: true, get: function() { + return StorageBrowserPolicy_js_1.StorageBrowserPolicy; + } }); + var StorageBrowserPolicyFactory = class { /** - * Creates a StorageRetryPolicy object. + * Creates a StorageBrowserPolicyFactory object. * * @param nextPolicy - * @param options - */ create(nextPolicy, options) { - return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); + return new StorageBrowserPolicy_js_1.StorageBrowserPolicy(nextPolicy, options); } }; - var CredentialPolicy = class extends BaseRequestPolicy { + exports2.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/CredentialPolicy.js +var require_CredentialPolicy2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/CredentialPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.CredentialPolicy = void 0; + var RequestPolicy_js_1 = require_RequestPolicy2(); + var CredentialPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { /** * Sends out request. * @@ -35371,6 +36046,83 @@ var require_dist4 = __commonJS({ return request; } }; + exports2.CredentialPolicy = CredentialPolicy; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/AnonymousCredentialPolicy.js +var require_AnonymousCredentialPolicy2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/AnonymousCredentialPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AnonymousCredentialPolicy = void 0; + var CredentialPolicy_js_1 = require_CredentialPolicy2(); + var AnonymousCredentialPolicy = class extends CredentialPolicy_js_1.CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + }; + exports2.AnonymousCredentialPolicy = AnonymousCredentialPolicy; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/credentials/Credential.js +var require_Credential2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/credentials/Credential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Credential = void 0; + var Credential = class { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); + } + }; + exports2.Credential = Credential; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/credentials/AnonymousCredential.js +var require_AnonymousCredential2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/credentials/AnonymousCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AnonymousCredential = void 0; + var AnonymousCredentialPolicy_js_1 = require_AnonymousCredentialPolicy2(); + var Credential_js_1 = require_Credential2(); + var AnonymousCredential = class extends Credential_js_1.Credential { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new AnonymousCredentialPolicy_js_1.AnonymousCredentialPolicy(nextPolicy, options); + } + }; + exports2.AnonymousCredential = AnonymousCredential; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/utils/SharedKeyComparator.js +var require_SharedKeyComparator2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/utils/SharedKeyComparator.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.compareHeader = compareHeader; var table_lv0 = new Uint32Array([ 0, 0, @@ -35794,7 +36546,24 @@ var require_dist4 = __commonJS({ } return false; } - var StorageSharedKeyCredentialPolicy = class extends CredentialPolicy { + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageSharedKeyCredentialPolicy.js +var require_StorageSharedKeyCredentialPolicy2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageSharedKeyCredentialPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageSharedKeyCredentialPolicy = void 0; + var constants_js_1 = require_constants10(); + var utils_common_js_1 = require_utils_common2(); + var CredentialPolicy_js_1 = require_CredentialPolicy2(); + var SharedKeyComparator_js_1 = require_SharedKeyComparator2(); + var StorageSharedKeyCredentialPolicy = class extends CredentialPolicy_js_1.CredentialPolicy { + /** + * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy + */ + factory; /** * Creates an instance of StorageSharedKeyCredentialPolicy. * @param nextPolicy - @@ -35811,26 +36580,26 @@ var require_dist4 = __commonJS({ * @param request - */ signRequest(request) { - request.headers.set(HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); + request.headers.set(constants_js_1.HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); if (request.body && (typeof request.body === "string" || request.body !== void 0) && request.body.length > 0) { - request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + request.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } const stringToSign = [ request.method.toUpperCase(), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), - this.getHeaderValueToSign(request, HeaderConstants.DATE), - this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), - this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), - this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), - this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), - this.getHeaderValueToSign(request, HeaderConstants.RANGE) + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.DATE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.RANGE) ].join("\n") + "\n" + this.getCanonicalizedHeadersString(request) + this.getCanonicalizedResourceString(request); const signature = this.factory.computeHMACSHA256(stringToSign); - request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + request.headers.set(constants_js_1.HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); return request; } /** @@ -35845,7 +36614,7 @@ var require_dist4 = __commonJS({ if (!value) { return ""; } - if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + if (headerName === constants_js_1.HeaderConstants.CONTENT_LENGTH && value === "0") { return ""; } return value; @@ -35865,10 +36634,10 @@ var require_dist4 = __commonJS({ */ getCanonicalizedHeadersString(request) { let headersArray = request.headers.headersArray().filter((value) => { - return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); + return value.name.toLowerCase().startsWith(constants_js_1.HeaderConstants.PREFIX_FOR_STORAGE); }); headersArray.sort((a, b) => { - return compareHeader(a.name.toLowerCase(), b.name.toLowerCase()); + return (0, SharedKeyComparator_js_1.compareHeader)(a.name.toLowerCase(), b.name.toLowerCase()); }); headersArray = headersArray.filter((value, index2, array) => { if (index2 > 0 && value.name.toLowerCase() === array[index2 - 1].name.toLowerCase()) { @@ -35889,10 +36658,10 @@ var require_dist4 = __commonJS({ * @param request - */ getCanonicalizedResourceString(request) { - const path6 = getURLPath(request.url) || "/"; + const path6 = (0, utils_common_js_1.getURLPath)(request.url) || "/"; let canonicalizedResourceString = ""; canonicalizedResourceString += `/${this.factory.accountName}${path6}`; - const queries = getURLQueries(request.url); + const queries = (0, utils_common_js_1.getURLQueries)(request.url); const lowercaseQueries = {}; if (queries) { const queryKeys = []; @@ -35912,18 +36681,28 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return canonicalizedResourceString; } }; - var Credential = class { + exports2.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/credentials/StorageSharedKeyCredential.js +var require_StorageSharedKeyCredential2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/credentials/StorageSharedKeyCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageSharedKeyCredential = void 0; + var node_crypto_1 = require("node:crypto"); + var StorageSharedKeyCredentialPolicy_js_1 = require_StorageSharedKeyCredentialPolicy2(); + var Credential_js_1 = require_Credential2(); + var StorageSharedKeyCredential = class extends Credential_js_1.Credential { /** - * Creates a RequestPolicy object. - * - * @param _nextPolicy - - * @param _options - + * Azure Storage account name; readonly. */ - create(_nextPolicy, _options) { - throw new Error("Method should be implemented in children classes."); - } - }; - var StorageSharedKeyCredential = class extends Credential { + accountName; + /** + * Azure Storage account key; readonly. + */ + accountKey; /** * Creates an instance of StorageSharedKeyCredential. * @param accountName - @@ -35941,7 +36720,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - */ create(nextPolicy, options) { - return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + return new StorageSharedKeyCredentialPolicy_js_1.StorageSharedKeyCredentialPolicy(nextPolicy, options, this); } /** * Generates a hash signature for an HTTP request or for a SAS. @@ -35949,67 +36728,327 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param stringToSign - */ computeHMACSHA256(stringToSign) { - return crypto2.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + return (0, node_crypto_1.createHmac)("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); } }; - var AnonymousCredentialPolicy = class extends CredentialPolicy { + exports2.StorageSharedKeyCredential = StorageSharedKeyCredential; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/log.js +var require_log6 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/log.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.logger = void 0; + var logger_1 = require_commonjs2(); + exports2.logger = (0, logger_1.createClientLogger)("storage-common"); + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicyType.js +var require_StorageRetryPolicyType2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicyType.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageRetryPolicyType = void 0; + var StorageRetryPolicyType; + (function(StorageRetryPolicyType2) { + StorageRetryPolicyType2[StorageRetryPolicyType2["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + StorageRetryPolicyType2[StorageRetryPolicyType2["FIXED"] = 1] = "FIXED"; + })(StorageRetryPolicyType || (exports2.StorageRetryPolicyType = StorageRetryPolicyType = {})); + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicy.js +var require_StorageRetryPolicy2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageRetryPolicy = void 0; + exports2.NewRetryPolicyFactory = NewRetryPolicyFactory; + var abort_controller_1 = require_commonjs3(); + var RequestPolicy_js_1 = require_RequestPolicy2(); + var constants_js_1 = require_constants10(); + var utils_common_js_1 = require_utils_common2(); + var log_js_1 = require_log6(); + var StorageRetryPolicyType_js_1 = require_StorageRetryPolicyType2(); + function NewRetryPolicyFactory(retryOptions) { + return { + create: (nextPolicy, options) => { + return new StorageRetryPolicy(nextPolicy, options, retryOptions); + } + }; + } + var DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1e3, + maxTries: 4, + retryDelayInMs: 4 * 1e3, + retryPolicyType: StorageRetryPolicyType_js_1.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: void 0 + // Use server side default timeout strategy + }; + var RETRY_ABORT_ERROR = new abort_controller_1.AbortError("The operation was aborted."); + var StorageRetryPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { /** - * Creates an instance of AnonymousCredentialPolicy. + * RetryOptions. + */ + retryOptions; + /** + * Creates an instance of RetryPolicy. + * * @param nextPolicy - * @param options - + * @param retryOptions - */ - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { super(nextPolicy, options); + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType ? retryOptions.retryPolicyType : DEFAULT_RETRY_OPTIONS.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 ? Math.floor(retryOptions.maxTries) : DEFAULT_RETRY_OPTIONS.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 ? retryOptions.tryTimeoutInMs : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) : DEFAULT_RETRY_OPTIONS.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost : DEFAULT_RETRY_OPTIONS.secondaryHost + }; + } + /** + * Sends request. + * + * @param request - + */ + async sendRequest(request) { + return this.attemptSendRequest(request, false, 1); + } + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + async attemptSendRequest(request, secondaryHas404, attempt) { + const newRequest = request.clone(); + const isPrimaryRetry = secondaryHas404 || !this.retryOptions.secondaryHost || !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = (0, utils_common_js_1.setURLHost)(newRequest.url, this.retryOptions.secondaryHost); + } + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = (0, utils_common_js_1.setURLParameter)(newRequest.url, constants_js_1.URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1e3).toString()); + } + let response; + try { + log_js_1.logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; + } + secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; + } catch (err) { + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; + } + } + await this.delay(isPrimaryRetry, attempt, request.abortSignal); + return this.attemptSendRequest(request, secondaryHas404, ++attempt); + } + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + log_js_1.logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions.maxTries}, no further try.`); + return false; + } + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR" + // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || err.message.toUpperCase().includes(retriableError) || err.code && err.code.toString().toUpperCase() === retriableError) { + log_js_1.logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + } + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + log_js_1.logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + if (statusCode === 503 || statusCode === 500) { + log_js_1.logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + if (err?.code === "PARSE_ERROR" && err?.message.startsWith(`Error "Error: Unclosed root tag`)) { + log_js_1.logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + return false; + } + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case StorageRetryPolicyType_js_1.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case StorageRetryPolicyType_js_1.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } + } else { + delayTimeInMs = Math.random() * 1e3; + } + log_js_1.logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return (0, utils_common_js_1.delay)(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); } }; - var AnonymousCredential = class extends Credential { + exports2.StorageRetryPolicy = StorageRetryPolicy; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/StorageRetryPolicyFactory.js +var require_StorageRetryPolicyFactory2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/StorageRetryPolicyFactory.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageRetryPolicyFactory = exports2.StorageRetryPolicy = exports2.StorageRetryPolicyType = void 0; + var StorageRetryPolicy_js_1 = require_StorageRetryPolicy2(); + Object.defineProperty(exports2, "StorageRetryPolicy", { enumerable: true, get: function() { + return StorageRetryPolicy_js_1.StorageRetryPolicy; + } }); + var StorageRetryPolicyType_js_1 = require_StorageRetryPolicyType2(); + Object.defineProperty(exports2, "StorageRetryPolicyType", { enumerable: true, get: function() { + return StorageRetryPolicyType_js_1.StorageRetryPolicyType; + } }); + var StorageRetryPolicyFactory = class { + retryOptions; /** - * Creates an {@link AnonymousCredentialPolicy} object. + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions) { + this.retryOptions = retryOptions; + } + /** + * Creates a StorageRetryPolicy object. * * @param nextPolicy - * @param options - */ create(nextPolicy, options) { - return new AnonymousCredentialPolicy(nextPolicy, options); + return new StorageRetryPolicy_js_1.StorageRetryPolicy(nextPolicy, options, this.retryOptions); } }; - var _defaultHttpClient; - function getCachedDefaultHttpClient() { - if (!_defaultHttpClient) { - _defaultHttpClient = coreRestPipeline.createDefaultHttpClient(); - } - return _defaultHttpClient; - } - var storageBrowserPolicyName = "storageBrowserPolicy"; + exports2.StorageRetryPolicyFactory = StorageRetryPolicyFactory; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageBrowserPolicyV2.js +var require_StorageBrowserPolicyV2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageBrowserPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageBrowserPolicyName = void 0; + exports2.storageBrowserPolicy = storageBrowserPolicy; + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants10(); + var utils_common_js_1 = require_utils_common2(); + exports2.storageBrowserPolicyName = "storageBrowserPolicy"; function storageBrowserPolicy() { return { - name: storageBrowserPolicyName, + name: exports2.storageBrowserPolicyName, async sendRequest(request, next2) { - if (coreUtil.isNode) { + if (core_util_1.isNodeLike) { return next2(request); } if (request.method === "GET" || request.method === "HEAD") { - request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); + request.url = (0, utils_common_js_1.setURLParameter)(request.url, constants_js_1.URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); } - request.headers.delete(HeaderConstants.COOKIE); - request.headers.delete(HeaderConstants.CONTENT_LENGTH); + request.headers.delete(constants_js_1.HeaderConstants.COOKIE); + request.headers.delete(constants_js_1.HeaderConstants.CONTENT_LENGTH); return next2(request); } }; } - var storageRetryPolicyName = "storageRetryPolicy"; - var StorageRetryPolicyType; - (function(StorageRetryPolicyType2) { - StorageRetryPolicyType2[StorageRetryPolicyType2["EXPONENTIAL"] = 0] = "EXPONENTIAL"; - StorageRetryPolicyType2[StorageRetryPolicyType2["FIXED"] = 1] = "FIXED"; - })(StorageRetryPolicyType || (StorageRetryPolicyType = {})); + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageCorrectContentLengthPolicy.js +var require_StorageCorrectContentLengthPolicy = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageCorrectContentLengthPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageCorrectContentLengthPolicyName = void 0; + exports2.storageCorrectContentLengthPolicy = storageCorrectContentLengthPolicy; + var constants_js_1 = require_constants10(); + exports2.storageCorrectContentLengthPolicyName = "StorageCorrectContentLengthPolicy"; + function storageCorrectContentLengthPolicy() { + function correctContentLength(request) { + if (request.body && (typeof request.body === "string" || Buffer.isBuffer(request.body)) && request.body.length > 0) { + request.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + } + return { + name: exports2.storageCorrectContentLengthPolicyName, + async sendRequest(request, next2) { + correctContentLength(request); + return next2(request); + } + }; + } + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicyV2.js +var require_StorageRetryPolicyV2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageRetryPolicyName = void 0; + exports2.storageRetryPolicy = storageRetryPolicy; + var abort_controller_1 = require_commonjs3(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); + var StorageRetryPolicyFactory_js_1 = require_StorageRetryPolicyFactory2(); + var constants_js_1 = require_constants10(); + var utils_common_js_1 = require_utils_common2(); + var log_js_1 = require_log6(); + exports2.storageRetryPolicyName = "storageRetryPolicy"; var DEFAULT_RETRY_OPTIONS = { maxRetryDelayInMs: 120 * 1e3, maxTries: 4, retryDelayInMs: 4 * 1e3, - retryPolicyType: StorageRetryPolicyType.EXPONENTIAL, + retryPolicyType: StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.EXPONENTIAL, secondaryHost: "", tryTimeoutInMs: void 0 // Use server side default timeout strategy @@ -36025,41 +37064,39 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; "EPIPE", "REQUEST_SEND_ERROR" ]; - var RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); + var RETRY_ABORT_ERROR = new abort_controller_1.AbortError("The operation was aborted."); function storageRetryPolicy(options = {}) { - var _a4, _b, _c, _d, _e, _f; - const retryPolicyType = (_a4 = options.retryPolicyType) !== null && _a4 !== void 0 ? _a4 : DEFAULT_RETRY_OPTIONS.retryPolicyType; - const maxTries = (_b = options.maxTries) !== null && _b !== void 0 ? _b : DEFAULT_RETRY_OPTIONS.maxTries; - const retryDelayInMs = (_c = options.retryDelayInMs) !== null && _c !== void 0 ? _c : DEFAULT_RETRY_OPTIONS.retryDelayInMs; - const maxRetryDelayInMs = (_d = options.maxRetryDelayInMs) !== null && _d !== void 0 ? _d : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; - const secondaryHost = (_e = options.secondaryHost) !== null && _e !== void 0 ? _e : DEFAULT_RETRY_OPTIONS.secondaryHost; - const tryTimeoutInMs = (_f = options.tryTimeoutInMs) !== null && _f !== void 0 ? _f : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; + const retryPolicyType = options.retryPolicyType ?? DEFAULT_RETRY_OPTIONS.retryPolicyType; + const maxTries = options.maxTries ?? DEFAULT_RETRY_OPTIONS.maxTries; + const retryDelayInMs = options.retryDelayInMs ?? DEFAULT_RETRY_OPTIONS.retryDelayInMs; + const maxRetryDelayInMs = options.maxRetryDelayInMs ?? DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; + const secondaryHost = options.secondaryHost ?? DEFAULT_RETRY_OPTIONS.secondaryHost; + const tryTimeoutInMs = options.tryTimeoutInMs ?? DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; function shouldRetry({ isPrimaryRetry, attempt, response, error: error2 }) { - var _a5, _b2; if (attempt >= maxTries) { - logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); + log_js_1.logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); return false; } if (error2) { for (const retriableError of retriableErrors) { if (error2.name.toUpperCase().includes(retriableError) || error2.message.toUpperCase().includes(retriableError) || error2.code && error2.code.toString().toUpperCase() === retriableError) { - logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + log_js_1.logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); return true; } } - if ((error2 === null || error2 === void 0 ? void 0 : error2.code) === "PARSE_ERROR" && (error2 === null || error2 === void 0 ? void 0 : error2.message.startsWith(`Error "Error: Unclosed root tag`))) { - logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + if (error2?.code === "PARSE_ERROR" && error2?.message.startsWith(`Error "Error: Unclosed root tag`)) { + log_js_1.logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); return true; } } if (response || error2) { - const statusCode = (_b2 = (_a5 = response === null || response === void 0 ? void 0 : response.status) !== null && _a5 !== void 0 ? _a5 : error2 === null || error2 === void 0 ? void 0 : error2.statusCode) !== null && _b2 !== void 0 ? _b2 : 0; + const statusCode = response?.status ?? error2?.statusCode ?? 0; if (!isPrimaryRetry && statusCode === 404) { - logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + log_js_1.logger.info(`RetryPolicy: Secondary access with 404, will retry.`); return true; } if (statusCode === 503 || statusCode === 500) { - logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + log_js_1.logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); return true; } } @@ -36069,27 +37106,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; let delayTimeInMs = 0; if (isPrimaryRetry) { switch (retryPolicyType) { - case StorageRetryPolicyType.EXPONENTIAL: + case StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.EXPONENTIAL: delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * retryDelayInMs, maxRetryDelayInMs); break; - case StorageRetryPolicyType.FIXED: + case StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.FIXED: delayTimeInMs = retryDelayInMs; break; } } else { delayTimeInMs = Math.random() * 1e3; } - logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + log_js_1.logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); return delayTimeInMs; } return { - name: storageRetryPolicyName, + name: exports2.storageRetryPolicyName, async sendRequest(request, next2) { if (tryTimeoutInMs) { - request.url = setURLParameter(request.url, URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1e3))); + request.url = (0, utils_common_js_1.setURLParameter)(request.url, constants_js_1.URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1e3))); } const primaryUrl = request.url; - const secondaryUrl = secondaryHost ? setURLHost(request.url, secondaryHost) : void 0; + const secondaryUrl = secondaryHost ? (0, utils_common_js_1.setURLHost)(request.url, secondaryHost) : void 0; let secondaryHas404 = false; let attempt = 1; let retryAgain = true; @@ -36101,61 +37138,75 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; response = void 0; error2 = void 0; try { - logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + log_js_1.logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); response = await next2(request); secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; } catch (e) { - if (coreRestPipeline.isRestError(e)) { - logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); + if ((0, core_rest_pipeline_1.isRestError)(e)) { + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); error2 = e; } else { - logger.error(`RetryPolicy: Caught error, message: ${coreUtil.getErrorMessage(e)}`); + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${(0, core_util_1.getErrorMessage)(e)}`); throw e; } } retryAgain = shouldRetry({ isPrimaryRetry, attempt, response, error: error2 }); if (retryAgain) { - await delay(calculateDelay(isPrimaryRetry, attempt), request.abortSignal, RETRY_ABORT_ERROR); + await (0, utils_common_js_1.delay)(calculateDelay(isPrimaryRetry, attempt), request.abortSignal, RETRY_ABORT_ERROR); } attempt++; } if (response) { return response; } - throw error2 !== null && error2 !== void 0 ? error2 : new coreRestPipeline.RestError("RetryPolicy failed without known error."); + throw error2 ?? new core_rest_pipeline_1.RestError("RetryPolicy failed without known error."); } }; } - var storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageSharedKeyCredentialPolicyV2.js +var require_StorageSharedKeyCredentialPolicyV2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageSharedKeyCredentialPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageSharedKeyCredentialPolicyName = void 0; + exports2.storageSharedKeyCredentialPolicy = storageSharedKeyCredentialPolicy; + var node_crypto_1 = require("node:crypto"); + var constants_js_1 = require_constants10(); + var utils_common_js_1 = require_utils_common2(); + var SharedKeyComparator_js_1 = require_SharedKeyComparator2(); + exports2.storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; function storageSharedKeyCredentialPolicy(options) { function signRequest(request) { - request.headers.set(HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); + request.headers.set(constants_js_1.HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); if (request.body && (typeof request.body === "string" || Buffer.isBuffer(request.body)) && request.body.length > 0) { - request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + request.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } const stringToSign = [ request.method.toUpperCase(), - getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), - getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), - getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), - getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), - getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), - getHeaderValueToSign(request, HeaderConstants.DATE), - getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), - getHeaderValueToSign(request, HeaderConstants.IF_MATCH), - getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), - getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), - getHeaderValueToSign(request, HeaderConstants.RANGE) + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_LANGUAGE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_ENCODING), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_LENGTH), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_MD5), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_TYPE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.DATE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_MODIFIED_SINCE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_MATCH), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_NONE_MATCH), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_UNMODIFIED_SINCE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.RANGE) ].join("\n") + "\n" + getCanonicalizedHeadersString(request) + getCanonicalizedResourceString(request); - const signature = crypto2.createHmac("sha256", options.accountKey).update(stringToSign, "utf8").digest("base64"); - request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); + const signature = (0, node_crypto_1.createHmac)("sha256", options.accountKey).update(stringToSign, "utf8").digest("base64"); + request.headers.set(constants_js_1.HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); } function getHeaderValueToSign(request, headerName) { const value = request.headers.get(headerName); if (!value) { return ""; } - if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + if (headerName === constants_js_1.HeaderConstants.CONTENT_LENGTH && value === "0") { return ""; } return value; @@ -36163,12 +37214,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; function getCanonicalizedHeadersString(request) { let headersArray = []; for (const [name, value] of request.headers) { - if (name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE)) { + if (name.toLowerCase().startsWith(constants_js_1.HeaderConstants.PREFIX_FOR_STORAGE)) { headersArray.push({ name, value }); } } headersArray.sort((a, b) => { - return compareHeader(a.name.toLowerCase(), b.name.toLowerCase()); + return (0, SharedKeyComparator_js_1.compareHeader)(a.name.toLowerCase(), b.name.toLowerCase()); }); headersArray = headersArray.filter((value, index2, array) => { if (index2 > 0 && value.name.toLowerCase() === array[index2 - 1].name.toLowerCase()) { @@ -36184,10 +37235,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request) { - const path6 = getURLPath(request.url) || "/"; + const path6 = (0, utils_common_js_1.getURLPath)(request.url) || "/"; let canonicalizedResourceString = ""; canonicalizedResourceString += `/${options.accountName}${path6}`; - const queries = getURLQueries(request.url); + const queries = (0, utils_common_js_1.getURLQueries)(request.url); const lowercaseQueries = {}; if (queries) { const queryKeys = []; @@ -36207,14 +37258,331 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return canonicalizedResourceString; } return { - name: storageSharedKeyCredentialPolicyName, + name: exports2.storageSharedKeyCredentialPolicyName, async sendRequest(request, next2) { signRequest(request); return next2(request); } }; } - var StorageBrowserPolicy = class extends BaseRequestPolicy { + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/index.js +var require_commonjs11 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BaseRequestPolicy = exports2.getCachedDefaultHttpClient = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + tslib_1.__exportStar(require_BufferScheduler(), exports2); + var cache_js_1 = require_cache2(); + Object.defineProperty(exports2, "getCachedDefaultHttpClient", { enumerable: true, get: function() { + return cache_js_1.getCachedDefaultHttpClient; + } }); + tslib_1.__exportStar(require_StorageBrowserPolicyFactory(), exports2); + tslib_1.__exportStar(require_AnonymousCredential2(), exports2); + tslib_1.__exportStar(require_Credential2(), exports2); + tslib_1.__exportStar(require_StorageSharedKeyCredential2(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicyFactory2(), exports2); + var RequestPolicy_js_1 = require_RequestPolicy2(); + Object.defineProperty(exports2, "BaseRequestPolicy", { enumerable: true, get: function() { + return RequestPolicy_js_1.BaseRequestPolicy; + } }); + tslib_1.__exportStar(require_AnonymousCredentialPolicy2(), exports2); + tslib_1.__exportStar(require_CredentialPolicy2(), exports2); + tslib_1.__exportStar(require_StorageBrowserPolicy(), exports2); + tslib_1.__exportStar(require_StorageBrowserPolicyV2(), exports2); + tslib_1.__exportStar(require_StorageCorrectContentLengthPolicy(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicyType2(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicy2(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicyV2(), exports2); + tslib_1.__exportStar(require_StorageSharedKeyCredentialPolicy2(), exports2); + tslib_1.__exportStar(require_StorageSharedKeyCredentialPolicyV2(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicyFactory2(), exports2); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageBrowserPolicyV2.js +var require_StorageBrowserPolicyV22 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageBrowserPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageBrowserPolicyName = void 0; + exports2.storageBrowserPolicy = storageBrowserPolicy; + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants9(); + var utils_common_js_1 = require_utils_common(); + exports2.storageBrowserPolicyName = "storageBrowserPolicy"; + function storageBrowserPolicy() { + return { + name: exports2.storageBrowserPolicyName, + async sendRequest(request, next2) { + if (core_util_1.isNodeLike) { + return next2(request); + } + if (request.method === "GET" || request.method === "HEAD") { + request.url = (0, utils_common_js_1.setURLParameter)(request.url, constants_js_1.URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); + } + request.headers.delete(constants_js_1.HeaderConstants.COOKIE); + request.headers.delete(constants_js_1.HeaderConstants.CONTENT_LENGTH); + return next2(request); + } + }; + } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicyV2.js +var require_StorageRetryPolicyV22 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageRetryPolicyName = void 0; + exports2.storageRetryPolicy = storageRetryPolicy; + var abort_controller_1 = require_commonjs3(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); + var StorageRetryPolicyFactory_js_1 = require_StorageRetryPolicyFactory(); + var constants_js_1 = require_constants9(); + var utils_common_js_1 = require_utils_common(); + var log_js_1 = require_log5(); + exports2.storageRetryPolicyName = "storageRetryPolicy"; + var DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1e3, + maxTries: 4, + retryDelayInMs: 4 * 1e3, + retryPolicyType: StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: void 0 + // Use server side default timeout strategy + }; + var retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR" + ]; + var RETRY_ABORT_ERROR = new abort_controller_1.AbortError("The operation was aborted."); + function storageRetryPolicy(options = {}) { + const retryPolicyType = options.retryPolicyType ?? DEFAULT_RETRY_OPTIONS.retryPolicyType; + const maxTries = options.maxTries ?? DEFAULT_RETRY_OPTIONS.maxTries; + const retryDelayInMs = options.retryDelayInMs ?? DEFAULT_RETRY_OPTIONS.retryDelayInMs; + const maxRetryDelayInMs = options.maxRetryDelayInMs ?? DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; + const secondaryHost = options.secondaryHost ?? DEFAULT_RETRY_OPTIONS.secondaryHost; + const tryTimeoutInMs = options.tryTimeoutInMs ?? DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; + function shouldRetry({ isPrimaryRetry, attempt, response, error: error2 }) { + if (attempt >= maxTries) { + log_js_1.logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); + return false; + } + if (error2) { + for (const retriableError of retriableErrors) { + if (error2.name.toUpperCase().includes(retriableError) || error2.message.toUpperCase().includes(retriableError) || error2.code && error2.code.toString().toUpperCase() === retriableError) { + log_js_1.logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + if (error2?.code === "PARSE_ERROR" && error2?.message.startsWith(`Error "Error: Unclosed root tag`)) { + log_js_1.logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + } + if (response || error2) { + const statusCode = response?.status ?? error2?.statusCode ?? 0; + if (!isPrimaryRetry && statusCode === 404) { + log_js_1.logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + if (statusCode === 503 || statusCode === 500) { + log_js_1.logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + return false; + } + function calculateDelay(isPrimaryRetry, attempt) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (retryPolicyType) { + case StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * retryDelayInMs, maxRetryDelayInMs); + break; + case StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.FIXED: + delayTimeInMs = retryDelayInMs; + break; + } + } else { + delayTimeInMs = Math.random() * 1e3; + } + log_js_1.logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delayTimeInMs; + } + return { + name: exports2.storageRetryPolicyName, + async sendRequest(request, next2) { + if (tryTimeoutInMs) { + request.url = (0, utils_common_js_1.setURLParameter)(request.url, constants_js_1.URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1e3))); + } + const primaryUrl = request.url; + const secondaryUrl = secondaryHost ? (0, utils_common_js_1.setURLHost)(request.url, secondaryHost) : void 0; + let secondaryHas404 = false; + let attempt = 1; + let retryAgain = true; + let response; + let error2; + while (retryAgain) { + const isPrimaryRetry = secondaryHas404 || !secondaryUrl || !["GET", "HEAD", "OPTIONS"].includes(request.method) || attempt % 2 === 1; + request.url = isPrimaryRetry ? primaryUrl : secondaryUrl; + response = void 0; + error2 = void 0; + try { + log_js_1.logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await next2(request); + secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; + } catch (e) { + if ((0, core_rest_pipeline_1.isRestError)(e)) { + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); + error2 = e; + } else { + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${(0, core_util_1.getErrorMessage)(e)}`); + throw e; + } + } + retryAgain = shouldRetry({ isPrimaryRetry, attempt, response, error: error2 }); + if (retryAgain) { + await (0, utils_common_js_1.delay)(calculateDelay(isPrimaryRetry, attempt), request.abortSignal, RETRY_ABORT_ERROR); + } + attempt++; + } + if (response) { + return response; + } + throw error2 ?? new core_rest_pipeline_1.RestError("RetryPolicy failed without known error."); + } + }; + } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageSharedKeyCredentialPolicyV2.js +var require_StorageSharedKeyCredentialPolicyV22 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageSharedKeyCredentialPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageSharedKeyCredentialPolicyName = void 0; + exports2.storageSharedKeyCredentialPolicy = storageSharedKeyCredentialPolicy; + var node_crypto_1 = require("node:crypto"); + var constants_js_1 = require_constants9(); + var utils_common_js_1 = require_utils_common(); + var SharedKeyComparator_js_1 = require_SharedKeyComparator(); + exports2.storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; + function storageSharedKeyCredentialPolicy(options) { + function signRequest(request) { + request.headers.set(constants_js_1.HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); + if (request.body && (typeof request.body === "string" || Buffer.isBuffer(request.body)) && request.body.length > 0) { + request.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + const stringToSign = [ + request.method.toUpperCase(), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_LANGUAGE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_ENCODING), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_LENGTH), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_MD5), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_TYPE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.DATE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_MODIFIED_SINCE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_MATCH), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_NONE_MATCH), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_UNMODIFIED_SINCE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.RANGE) + ].join("\n") + "\n" + getCanonicalizedHeadersString(request) + getCanonicalizedResourceString(request); + const signature = (0, node_crypto_1.createHmac)("sha256", options.accountKey).update(stringToSign, "utf8").digest("base64"); + request.headers.set(constants_js_1.HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); + } + function getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + if (headerName === constants_js_1.HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; + } + function getCanonicalizedHeadersString(request) { + let headersArray = []; + for (const [name, value] of request.headers) { + if (name.toLowerCase().startsWith(constants_js_1.HeaderConstants.PREFIX_FOR_STORAGE)) { + headersArray.push({ name, value }); + } + } + headersArray.sort((a, b) => { + return (0, SharedKeyComparator_js_1.compareHeader)(a.name.toLowerCase(), b.name.toLowerCase()); + }); + headersArray = headersArray.filter((value, index2, array) => { + if (index2 > 0 && value.name.toLowerCase() === array[index2 - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name.toLowerCase().trimRight()}:${header.value.trimLeft()} +`; + }); + return canonicalizedHeadersStringToSign; + } + function getCanonicalizedResourceString(request) { + const path6 = (0, utils_common_js_1.getURLPath)(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${options.accountName}${path6}`; + const queries = (0, utils_common_js_1.getURLQueries)(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += ` +${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; + } + return { + name: exports2.storageSharedKeyCredentialPolicyName, + async sendRequest(request, next2) { + signRequest(request); + return next2(request); + } + }; + } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageBrowserPolicy.js +var require_StorageBrowserPolicy2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageBrowserPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageBrowserPolicy = void 0; + var RequestPolicy_js_1 = require_RequestPolicy(); + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants9(); + var utils_common_js_1 = require_utils_common(); + var StorageBrowserPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { /** * Creates an instance of StorageBrowserPolicy. * @param nextPolicy - @@ -36231,17 +37599,31 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param request - */ async sendRequest(request) { - if (coreUtil.isNode) { + if (core_util_1.isNodeLike) { return this._nextPolicy.sendRequest(request); } if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { - request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); + request.url = (0, utils_common_js_1.setURLParameter)(request.url, constants_js_1.URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); } - request.headers.remove(HeaderConstants.COOKIE); - request.headers.remove(HeaderConstants.CONTENT_LENGTH); + request.headers.remove(constants_js_1.HeaderConstants.COOKIE); + request.headers.remove(constants_js_1.HeaderConstants.CONTENT_LENGTH); return this._nextPolicy.sendRequest(request); } }; + exports2.StorageBrowserPolicy = StorageBrowserPolicy; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/StorageBrowserPolicyFactory.js +var require_StorageBrowserPolicyFactory2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/StorageBrowserPolicyFactory.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageBrowserPolicyFactory = exports2.StorageBrowserPolicy = void 0; + var StorageBrowserPolicy_js_1 = require_StorageBrowserPolicy2(); + Object.defineProperty(exports2, "StorageBrowserPolicy", { enumerable: true, get: function() { + return StorageBrowserPolicy_js_1.StorageBrowserPolicy; + } }); var StorageBrowserPolicyFactory = class { /** * Creates a StorageBrowserPolicyFactory object. @@ -36250,24 +37632,68 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - */ create(nextPolicy, options) { - return new StorageBrowserPolicy(nextPolicy, options); + return new StorageBrowserPolicy_js_1.StorageBrowserPolicy(nextPolicy, options); } }; - var storageCorrectContentLengthPolicyName = "StorageCorrectContentLengthPolicy"; + exports2.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageCorrectContentLengthPolicy.js +var require_StorageCorrectContentLengthPolicy2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageCorrectContentLengthPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageCorrectContentLengthPolicyName = void 0; + exports2.storageCorrectContentLengthPolicy = storageCorrectContentLengthPolicy; + var constants_js_1 = require_constants9(); + exports2.storageCorrectContentLengthPolicyName = "StorageCorrectContentLengthPolicy"; function storageCorrectContentLengthPolicy() { function correctContentLength(request) { if (request.body && (typeof request.body === "string" || Buffer.isBuffer(request.body)) && request.body.length > 0) { - request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + request.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } } return { - name: storageCorrectContentLengthPolicyName, + name: exports2.storageCorrectContentLengthPolicyName, async sendRequest(request, next2) { correctContentLength(request); return next2(request); } }; } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/Pipeline.js +var require_Pipeline = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/Pipeline.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Pipeline = exports2.StorageOAuthScopes = void 0; + exports2.isPipelineLike = isPipelineLike; + exports2.newPipeline = newPipeline; + exports2.getCoreClientOptions = getCoreClientOptions; + exports2.getCredentialFromPipeline = getCredentialFromPipeline; + var core_http_compat_1 = require_commonjs9(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_client_1 = require_commonjs8(); + var core_xml_1 = require_commonjs10(); + var core_auth_1 = require_commonjs7(); + var log_js_1 = require_log5(); + var StorageRetryPolicyFactory_js_1 = require_StorageRetryPolicyFactory(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var constants_js_1 = require_constants9(); + Object.defineProperty(exports2, "StorageOAuthScopes", { enumerable: true, get: function() { + return constants_js_1.StorageOAuthScopes; + } }); + var storage_common_1 = require_commonjs11(); + var StorageBrowserPolicyV2_js_1 = require_StorageBrowserPolicyV22(); + var StorageRetryPolicyV2_js_1 = require_StorageRetryPolicyV22(); + var StorageSharedKeyCredentialPolicyV2_js_1 = require_StorageSharedKeyCredentialPolicyV22(); + var StorageBrowserPolicyFactory_js_1 = require_StorageBrowserPolicyFactory2(); + var StorageCorrectContentLengthPolicy_js_1 = require_StorageCorrectContentLengthPolicy2(); function isPipelineLike(pipeline) { if (!pipeline || typeof pipeline !== "object") { return false; @@ -36276,6 +37702,14 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return Array.isArray(castPipeline.factories) && typeof castPipeline.options === "object" && typeof castPipeline.toServiceClientOptions === "function"; } var Pipeline = class { + /** + * A list of chained request policy factories. + */ + factories; + /** + * Configures pipeline logger and HTTP client. + */ + options; /** * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. * @@ -36299,9 +37733,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; }; } }; + exports2.Pipeline = Pipeline; function newPipeline(credential, pipelineOptions = {}) { if (!credential) { - credential = new AnonymousCredential(); + credential = new AnonymousCredential_js_1.AnonymousCredential(); } const pipeline = new Pipeline([], pipelineOptions); pipeline._credential = credential; @@ -36324,7 +37759,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (novelFactories.length) { const hasInjector = novelFactories.some((factory) => isInjectorPolicyFactory(factory)); return { - wrappedPolicies: coreHttpCompat.createRequestPolicyFactoryPolicy(novelFactories), + wrappedPolicies: (0, core_http_compat_1.createRequestPolicyFactoryPolicy)(novelFactories), afterRetry: hasInjector }; } @@ -36332,75 +37767,85 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return void 0; } function getCoreClientOptions(pipeline) { - var _a4; - const _b = pipeline.options, { httpClient: v1Client } = _b, restOptions = tslib.__rest(_b, ["httpClient"]); + const { httpClient: v1Client, ...restOptions } = pipeline.options; let httpClient = pipeline._coreHttpClient; if (!httpClient) { - httpClient = v1Client ? coreHttpCompat.convertHttpClient(v1Client) : getCachedDefaultHttpClient(); + httpClient = v1Client ? (0, core_http_compat_1.convertHttpClient)(v1Client) : (0, storage_common_1.getCachedDefaultHttpClient)(); pipeline._coreHttpClient = httpClient; } let corePipeline = pipeline._corePipeline; if (!corePipeline) { - const packageDetails = `azsdk-js-azure-storage-blob/${SDK_VERSION}`; + const packageDetails = `azsdk-js-azure-storage-blob/${constants_js_1.SDK_VERSION}`; const userAgentPrefix = restOptions.userAgentOptions && restOptions.userAgentOptions.userAgentPrefix ? `${restOptions.userAgentOptions.userAgentPrefix} ${packageDetails}` : `${packageDetails}`; - corePipeline = coreClient.createClientPipeline(Object.assign(Object.assign({}, restOptions), { loggingOptions: { - additionalAllowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, - additionalAllowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, - logger: logger.info - }, userAgentOptions: { - userAgentPrefix - }, serializationOptions: { - stringifyXML: coreXml.stringifyXML, - serializerOptions: { - xml: { - // Use customized XML char key of "#" so we can deserialize metadata - // with "_" key - xmlCharKey: "#" + corePipeline = (0, core_client_1.createClientPipeline)({ + ...restOptions, + loggingOptions: { + additionalAllowedHeaderNames: constants_js_1.StorageBlobLoggingAllowedHeaderNames, + additionalAllowedQueryParameters: constants_js_1.StorageBlobLoggingAllowedQueryParameters, + logger: log_js_1.logger.info + }, + userAgentOptions: { + userAgentPrefix + }, + serializationOptions: { + stringifyXML: core_xml_1.stringifyXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#" + } } - } - }, deserializationOptions: { - parseXML: coreXml.parseXML, - serializerOptions: { - xml: { - // Use customized XML char key of "#" so we can deserialize metadata - // with "_" key - xmlCharKey: "#" + }, + deserializationOptions: { + parseXML: core_xml_1.parseXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#" + } } } - } })); + }); corePipeline.removePolicy({ phase: "Retry" }); - corePipeline.removePolicy({ name: coreRestPipeline.decompressResponsePolicyName }); - corePipeline.addPolicy(storageCorrectContentLengthPolicy()); - corePipeline.addPolicy(storageRetryPolicy(restOptions.retryOptions), { phase: "Retry" }); - corePipeline.addPolicy(storageBrowserPolicy()); + corePipeline.removePolicy({ name: core_rest_pipeline_1.decompressResponsePolicyName }); + corePipeline.addPolicy((0, StorageCorrectContentLengthPolicy_js_1.storageCorrectContentLengthPolicy)()); + corePipeline.addPolicy((0, StorageRetryPolicyV2_js_1.storageRetryPolicy)(restOptions.retryOptions), { phase: "Retry" }); + corePipeline.addPolicy((0, StorageBrowserPolicyV2_js_1.storageBrowserPolicy)()); const downlevelResults = processDownlevelPipeline(pipeline); if (downlevelResults) { corePipeline.addPolicy(downlevelResults.wrappedPolicies, downlevelResults.afterRetry ? { afterPhase: "Retry" } : void 0); } const credential = getCredentialFromPipeline(pipeline); - if (coreAuth.isTokenCredential(credential)) { - corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ + if ((0, core_auth_1.isTokenCredential)(credential)) { + corePipeline.addPolicy((0, core_rest_pipeline_1.bearerTokenAuthenticationPolicy)({ credential, - scopes: (_a4 = restOptions.audience) !== null && _a4 !== void 0 ? _a4 : StorageOAuthScopes, - challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge } + scopes: restOptions.audience ?? constants_js_1.StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: core_client_1.authorizeRequestOnTenantChallenge } }), { phase: "Sign" }); - } else if (credential instanceof StorageSharedKeyCredential) { - corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ + } else if (credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential) { + corePipeline.addPolicy((0, StorageSharedKeyCredentialPolicyV2_js_1.storageSharedKeyCredentialPolicy)({ accountName: credential.accountName, accountKey: credential.accountKey }), { phase: "Sign" }); } pipeline._corePipeline = corePipeline; } - return Object.assign(Object.assign({}, restOptions), { allowInsecureConnection: true, httpClient, pipeline: corePipeline }); + return { + ...restOptions, + allowInsecureConnection: true, + httpClient, + pipeline: corePipeline + }; } function getCredentialFromPipeline(pipeline) { if (pipeline._credential) { return pipeline._credential; } - let credential = new AnonymousCredential(); + let credential = new AnonymousCredential_js_1.AnonymousCredential(); for (const factory of pipeline.factories) { - if (coreAuth.isTokenCredential(factory.credential)) { + if ((0, core_auth_1.isTokenCredential)(factory.credential)) { credential = factory.credential; } else if (isStorageSharedKeyCredential(factory)) { return factory; @@ -36409,28 +37854,28 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return credential; } function isStorageSharedKeyCredential(factory) { - if (factory instanceof StorageSharedKeyCredential) { + if (factory instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential) { return true; } return factory.constructor.name === "StorageSharedKeyCredential"; } function isAnonymousCredential(factory) { - if (factory instanceof AnonymousCredential) { + if (factory instanceof AnonymousCredential_js_1.AnonymousCredential) { return true; } return factory.constructor.name === "AnonymousCredential"; } function isCoreHttpBearerTokenFactory(factory) { - return coreAuth.isTokenCredential(factory.credential); + return (0, core_auth_1.isTokenCredential)(factory.credential); } function isStorageBrowserPolicyFactory(factory) { - if (factory instanceof StorageBrowserPolicyFactory) { + if (factory instanceof StorageBrowserPolicyFactory_js_1.StorageBrowserPolicyFactory) { return true; } return factory.constructor.name === "StorageBrowserPolicyFactory"; } function isStorageRetryPolicyFactory(factory) { - if (factory instanceof StorageRetryPolicyFactory) { + if (factory instanceof StorageRetryPolicyFactory_js_1.StorageRetryPolicyFactory) { return true; } return factory.constructor.name === "StorageRetryPolicyFactory"; @@ -36473,7 +37918,160 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return policyName.startsWith(knownPolicyName); }); } - var BlobServiceProperties = { + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/index.js +var require_models = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.KnownStorageErrorCode = exports2.KnownBlobExpiryOptions = exports2.KnownFileShareTokenIntent = exports2.KnownEncryptionAlgorithmType = void 0; + var KnownEncryptionAlgorithmType; + (function(KnownEncryptionAlgorithmType2) { + KnownEncryptionAlgorithmType2["AES256"] = "AES256"; + })(KnownEncryptionAlgorithmType || (exports2.KnownEncryptionAlgorithmType = KnownEncryptionAlgorithmType = {})); + var KnownFileShareTokenIntent; + (function(KnownFileShareTokenIntent2) { + KnownFileShareTokenIntent2["Backup"] = "backup"; + })(KnownFileShareTokenIntent || (exports2.KnownFileShareTokenIntent = KnownFileShareTokenIntent = {})); + var KnownBlobExpiryOptions; + (function(KnownBlobExpiryOptions2) { + KnownBlobExpiryOptions2["NeverExpire"] = "NeverExpire"; + KnownBlobExpiryOptions2["RelativeToCreation"] = "RelativeToCreation"; + KnownBlobExpiryOptions2["RelativeToNow"] = "RelativeToNow"; + KnownBlobExpiryOptions2["Absolute"] = "Absolute"; + })(KnownBlobExpiryOptions || (exports2.KnownBlobExpiryOptions = KnownBlobExpiryOptions = {})); + var KnownStorageErrorCode; + (function(KnownStorageErrorCode2) { + KnownStorageErrorCode2["AccountAlreadyExists"] = "AccountAlreadyExists"; + KnownStorageErrorCode2["AccountBeingCreated"] = "AccountBeingCreated"; + KnownStorageErrorCode2["AccountIsDisabled"] = "AccountIsDisabled"; + KnownStorageErrorCode2["AuthenticationFailed"] = "AuthenticationFailed"; + KnownStorageErrorCode2["AuthorizationFailure"] = "AuthorizationFailure"; + KnownStorageErrorCode2["ConditionHeadersNotSupported"] = "ConditionHeadersNotSupported"; + KnownStorageErrorCode2["ConditionNotMet"] = "ConditionNotMet"; + KnownStorageErrorCode2["EmptyMetadataKey"] = "EmptyMetadataKey"; + KnownStorageErrorCode2["InsufficientAccountPermissions"] = "InsufficientAccountPermissions"; + KnownStorageErrorCode2["InternalError"] = "InternalError"; + KnownStorageErrorCode2["InvalidAuthenticationInfo"] = "InvalidAuthenticationInfo"; + KnownStorageErrorCode2["InvalidHeaderValue"] = "InvalidHeaderValue"; + KnownStorageErrorCode2["InvalidHttpVerb"] = "InvalidHttpVerb"; + KnownStorageErrorCode2["InvalidInput"] = "InvalidInput"; + KnownStorageErrorCode2["InvalidMd5"] = "InvalidMd5"; + KnownStorageErrorCode2["InvalidMetadata"] = "InvalidMetadata"; + KnownStorageErrorCode2["InvalidQueryParameterValue"] = "InvalidQueryParameterValue"; + KnownStorageErrorCode2["InvalidRange"] = "InvalidRange"; + KnownStorageErrorCode2["InvalidResourceName"] = "InvalidResourceName"; + KnownStorageErrorCode2["InvalidUri"] = "InvalidUri"; + KnownStorageErrorCode2["InvalidXmlDocument"] = "InvalidXmlDocument"; + KnownStorageErrorCode2["InvalidXmlNodeValue"] = "InvalidXmlNodeValue"; + KnownStorageErrorCode2["Md5Mismatch"] = "Md5Mismatch"; + KnownStorageErrorCode2["MetadataTooLarge"] = "MetadataTooLarge"; + KnownStorageErrorCode2["MissingContentLengthHeader"] = "MissingContentLengthHeader"; + KnownStorageErrorCode2["MissingRequiredQueryParameter"] = "MissingRequiredQueryParameter"; + KnownStorageErrorCode2["MissingRequiredHeader"] = "MissingRequiredHeader"; + KnownStorageErrorCode2["MissingRequiredXmlNode"] = "MissingRequiredXmlNode"; + KnownStorageErrorCode2["MultipleConditionHeadersNotSupported"] = "MultipleConditionHeadersNotSupported"; + KnownStorageErrorCode2["OperationTimedOut"] = "OperationTimedOut"; + KnownStorageErrorCode2["OutOfRangeInput"] = "OutOfRangeInput"; + KnownStorageErrorCode2["OutOfRangeQueryParameterValue"] = "OutOfRangeQueryParameterValue"; + KnownStorageErrorCode2["RequestBodyTooLarge"] = "RequestBodyTooLarge"; + KnownStorageErrorCode2["ResourceTypeMismatch"] = "ResourceTypeMismatch"; + KnownStorageErrorCode2["RequestUrlFailedToParse"] = "RequestUrlFailedToParse"; + KnownStorageErrorCode2["ResourceAlreadyExists"] = "ResourceAlreadyExists"; + KnownStorageErrorCode2["ResourceNotFound"] = "ResourceNotFound"; + KnownStorageErrorCode2["ServerBusy"] = "ServerBusy"; + KnownStorageErrorCode2["UnsupportedHeader"] = "UnsupportedHeader"; + KnownStorageErrorCode2["UnsupportedXmlNode"] = "UnsupportedXmlNode"; + KnownStorageErrorCode2["UnsupportedQueryParameter"] = "UnsupportedQueryParameter"; + KnownStorageErrorCode2["UnsupportedHttpVerb"] = "UnsupportedHttpVerb"; + KnownStorageErrorCode2["AppendPositionConditionNotMet"] = "AppendPositionConditionNotMet"; + KnownStorageErrorCode2["BlobAlreadyExists"] = "BlobAlreadyExists"; + KnownStorageErrorCode2["BlobImmutableDueToPolicy"] = "BlobImmutableDueToPolicy"; + KnownStorageErrorCode2["BlobNotFound"] = "BlobNotFound"; + KnownStorageErrorCode2["BlobOverwritten"] = "BlobOverwritten"; + KnownStorageErrorCode2["BlobTierInadequateForContentLength"] = "BlobTierInadequateForContentLength"; + KnownStorageErrorCode2["BlobUsesCustomerSpecifiedEncryption"] = "BlobUsesCustomerSpecifiedEncryption"; + KnownStorageErrorCode2["BlockCountExceedsLimit"] = "BlockCountExceedsLimit"; + KnownStorageErrorCode2["BlockListTooLong"] = "BlockListTooLong"; + KnownStorageErrorCode2["CannotChangeToLowerTier"] = "CannotChangeToLowerTier"; + KnownStorageErrorCode2["CannotVerifyCopySource"] = "CannotVerifyCopySource"; + KnownStorageErrorCode2["ContainerAlreadyExists"] = "ContainerAlreadyExists"; + KnownStorageErrorCode2["ContainerBeingDeleted"] = "ContainerBeingDeleted"; + KnownStorageErrorCode2["ContainerDisabled"] = "ContainerDisabled"; + KnownStorageErrorCode2["ContainerNotFound"] = "ContainerNotFound"; + KnownStorageErrorCode2["ContentLengthLargerThanTierLimit"] = "ContentLengthLargerThanTierLimit"; + KnownStorageErrorCode2["CopyAcrossAccountsNotSupported"] = "CopyAcrossAccountsNotSupported"; + KnownStorageErrorCode2["CopyIdMismatch"] = "CopyIdMismatch"; + KnownStorageErrorCode2["FeatureVersionMismatch"] = "FeatureVersionMismatch"; + KnownStorageErrorCode2["IncrementalCopyBlobMismatch"] = "IncrementalCopyBlobMismatch"; + KnownStorageErrorCode2["IncrementalCopyOfEarlierVersionSnapshotNotAllowed"] = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed"; + KnownStorageErrorCode2["IncrementalCopySourceMustBeSnapshot"] = "IncrementalCopySourceMustBeSnapshot"; + KnownStorageErrorCode2["InfiniteLeaseDurationRequired"] = "InfiniteLeaseDurationRequired"; + KnownStorageErrorCode2["InvalidBlobOrBlock"] = "InvalidBlobOrBlock"; + KnownStorageErrorCode2["InvalidBlobTier"] = "InvalidBlobTier"; + KnownStorageErrorCode2["InvalidBlobType"] = "InvalidBlobType"; + KnownStorageErrorCode2["InvalidBlockId"] = "InvalidBlockId"; + KnownStorageErrorCode2["InvalidBlockList"] = "InvalidBlockList"; + KnownStorageErrorCode2["InvalidOperation"] = "InvalidOperation"; + KnownStorageErrorCode2["InvalidPageRange"] = "InvalidPageRange"; + KnownStorageErrorCode2["InvalidSourceBlobType"] = "InvalidSourceBlobType"; + KnownStorageErrorCode2["InvalidSourceBlobUrl"] = "InvalidSourceBlobUrl"; + KnownStorageErrorCode2["InvalidVersionForPageBlobOperation"] = "InvalidVersionForPageBlobOperation"; + KnownStorageErrorCode2["LeaseAlreadyPresent"] = "LeaseAlreadyPresent"; + KnownStorageErrorCode2["LeaseAlreadyBroken"] = "LeaseAlreadyBroken"; + KnownStorageErrorCode2["LeaseIdMismatchWithBlobOperation"] = "LeaseIdMismatchWithBlobOperation"; + KnownStorageErrorCode2["LeaseIdMismatchWithContainerOperation"] = "LeaseIdMismatchWithContainerOperation"; + KnownStorageErrorCode2["LeaseIdMismatchWithLeaseOperation"] = "LeaseIdMismatchWithLeaseOperation"; + KnownStorageErrorCode2["LeaseIdMissing"] = "LeaseIdMissing"; + KnownStorageErrorCode2["LeaseIsBreakingAndCannotBeAcquired"] = "LeaseIsBreakingAndCannotBeAcquired"; + KnownStorageErrorCode2["LeaseIsBreakingAndCannotBeChanged"] = "LeaseIsBreakingAndCannotBeChanged"; + KnownStorageErrorCode2["LeaseIsBrokenAndCannotBeRenewed"] = "LeaseIsBrokenAndCannotBeRenewed"; + KnownStorageErrorCode2["LeaseLost"] = "LeaseLost"; + KnownStorageErrorCode2["LeaseNotPresentWithBlobOperation"] = "LeaseNotPresentWithBlobOperation"; + KnownStorageErrorCode2["LeaseNotPresentWithContainerOperation"] = "LeaseNotPresentWithContainerOperation"; + KnownStorageErrorCode2["LeaseNotPresentWithLeaseOperation"] = "LeaseNotPresentWithLeaseOperation"; + KnownStorageErrorCode2["MaxBlobSizeConditionNotMet"] = "MaxBlobSizeConditionNotMet"; + KnownStorageErrorCode2["NoAuthenticationInformation"] = "NoAuthenticationInformation"; + KnownStorageErrorCode2["NoPendingCopyOperation"] = "NoPendingCopyOperation"; + KnownStorageErrorCode2["OperationNotAllowedOnIncrementalCopyBlob"] = "OperationNotAllowedOnIncrementalCopyBlob"; + KnownStorageErrorCode2["PendingCopyOperation"] = "PendingCopyOperation"; + KnownStorageErrorCode2["PreviousSnapshotCannotBeNewer"] = "PreviousSnapshotCannotBeNewer"; + KnownStorageErrorCode2["PreviousSnapshotNotFound"] = "PreviousSnapshotNotFound"; + KnownStorageErrorCode2["PreviousSnapshotOperationNotSupported"] = "PreviousSnapshotOperationNotSupported"; + KnownStorageErrorCode2["SequenceNumberConditionNotMet"] = "SequenceNumberConditionNotMet"; + KnownStorageErrorCode2["SequenceNumberIncrementTooLarge"] = "SequenceNumberIncrementTooLarge"; + KnownStorageErrorCode2["SnapshotCountExceeded"] = "SnapshotCountExceeded"; + KnownStorageErrorCode2["SnapshotOperationRateExceeded"] = "SnapshotOperationRateExceeded"; + KnownStorageErrorCode2["SnapshotsPresent"] = "SnapshotsPresent"; + KnownStorageErrorCode2["SourceConditionNotMet"] = "SourceConditionNotMet"; + KnownStorageErrorCode2["SystemInUse"] = "SystemInUse"; + KnownStorageErrorCode2["TargetConditionNotMet"] = "TargetConditionNotMet"; + KnownStorageErrorCode2["UnauthorizedBlobOverwrite"] = "UnauthorizedBlobOverwrite"; + KnownStorageErrorCode2["BlobBeingRehydrated"] = "BlobBeingRehydrated"; + KnownStorageErrorCode2["BlobArchived"] = "BlobArchived"; + KnownStorageErrorCode2["BlobNotArchived"] = "BlobNotArchived"; + KnownStorageErrorCode2["AuthorizationSourceIPMismatch"] = "AuthorizationSourceIPMismatch"; + KnownStorageErrorCode2["AuthorizationProtocolMismatch"] = "AuthorizationProtocolMismatch"; + KnownStorageErrorCode2["AuthorizationPermissionMismatch"] = "AuthorizationPermissionMismatch"; + KnownStorageErrorCode2["AuthorizationServiceMismatch"] = "AuthorizationServiceMismatch"; + KnownStorageErrorCode2["AuthorizationResourceTypeMismatch"] = "AuthorizationResourceTypeMismatch"; + KnownStorageErrorCode2["BlobAccessTierNotSupportedForAccountType"] = "BlobAccessTierNotSupportedForAccountType"; + })(KnownStorageErrorCode || (exports2.KnownStorageErrorCode = KnownStorageErrorCode = {})); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/mappers.js +var require_mappers = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/mappers.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ServiceGetUserDelegationKeyHeaders = exports2.ServiceListContainersSegmentExceptionHeaders = exports2.ServiceListContainersSegmentHeaders = exports2.ServiceGetStatisticsExceptionHeaders = exports2.ServiceGetStatisticsHeaders = exports2.ServiceGetPropertiesExceptionHeaders = exports2.ServiceGetPropertiesHeaders = exports2.ServiceSetPropertiesExceptionHeaders = exports2.ServiceSetPropertiesHeaders = exports2.ArrowField = exports2.ArrowConfiguration = exports2.JsonTextConfiguration = exports2.DelimitedTextConfiguration = exports2.QueryFormat = exports2.QuerySerialization = exports2.QueryRequest = exports2.ClearRange = exports2.PageRange = exports2.PageList = exports2.Block = exports2.BlockList = exports2.BlockLookupList = exports2.BlobPrefix = exports2.BlobHierarchyListSegment = exports2.ListBlobsHierarchySegmentResponse = exports2.BlobPropertiesInternal = exports2.BlobName = exports2.BlobItemInternal = exports2.BlobFlatListSegment = exports2.ListBlobsFlatSegmentResponse = exports2.AccessPolicy = exports2.SignedIdentifier = exports2.BlobTag = exports2.BlobTags = exports2.FilterBlobItem = exports2.FilterBlobSegment = exports2.UserDelegationKey = exports2.KeyInfo = exports2.ContainerProperties = exports2.ContainerItem = exports2.ListContainersSegmentResponse = exports2.GeoReplication = exports2.BlobServiceStatistics = exports2.StorageError = exports2.StaticWebsite = exports2.CorsRule = exports2.Metrics = exports2.RetentionPolicy = exports2.Logging = exports2.BlobServiceProperties = void 0; + exports2.BlobUndeleteHeaders = exports2.BlobDeleteExceptionHeaders = exports2.BlobDeleteHeaders = exports2.BlobGetPropertiesExceptionHeaders = exports2.BlobGetPropertiesHeaders = exports2.BlobDownloadExceptionHeaders = exports2.BlobDownloadHeaders = exports2.ContainerGetAccountInfoExceptionHeaders = exports2.ContainerGetAccountInfoHeaders = exports2.ContainerListBlobHierarchySegmentExceptionHeaders = exports2.ContainerListBlobHierarchySegmentHeaders = exports2.ContainerListBlobFlatSegmentExceptionHeaders = exports2.ContainerListBlobFlatSegmentHeaders = exports2.ContainerChangeLeaseExceptionHeaders = exports2.ContainerChangeLeaseHeaders = exports2.ContainerBreakLeaseExceptionHeaders = exports2.ContainerBreakLeaseHeaders = exports2.ContainerRenewLeaseExceptionHeaders = exports2.ContainerRenewLeaseHeaders = exports2.ContainerReleaseLeaseExceptionHeaders = exports2.ContainerReleaseLeaseHeaders = exports2.ContainerAcquireLeaseExceptionHeaders = exports2.ContainerAcquireLeaseHeaders = exports2.ContainerFilterBlobsExceptionHeaders = exports2.ContainerFilterBlobsHeaders = exports2.ContainerSubmitBatchExceptionHeaders = exports2.ContainerSubmitBatchHeaders = exports2.ContainerRenameExceptionHeaders = exports2.ContainerRenameHeaders = exports2.ContainerRestoreExceptionHeaders = exports2.ContainerRestoreHeaders = exports2.ContainerSetAccessPolicyExceptionHeaders = exports2.ContainerSetAccessPolicyHeaders = exports2.ContainerGetAccessPolicyExceptionHeaders = exports2.ContainerGetAccessPolicyHeaders = exports2.ContainerSetMetadataExceptionHeaders = exports2.ContainerSetMetadataHeaders = exports2.ContainerDeleteExceptionHeaders = exports2.ContainerDeleteHeaders = exports2.ContainerGetPropertiesExceptionHeaders = exports2.ContainerGetPropertiesHeaders = exports2.ContainerCreateExceptionHeaders = exports2.ContainerCreateHeaders = exports2.ServiceFilterBlobsExceptionHeaders = exports2.ServiceFilterBlobsHeaders = exports2.ServiceSubmitBatchExceptionHeaders = exports2.ServiceSubmitBatchHeaders = exports2.ServiceGetAccountInfoExceptionHeaders = exports2.ServiceGetAccountInfoHeaders = exports2.ServiceGetUserDelegationKeyExceptionHeaders = void 0; + exports2.PageBlobGetPageRangesHeaders = exports2.PageBlobUploadPagesFromURLExceptionHeaders = exports2.PageBlobUploadPagesFromURLHeaders = exports2.PageBlobClearPagesExceptionHeaders = exports2.PageBlobClearPagesHeaders = exports2.PageBlobUploadPagesExceptionHeaders = exports2.PageBlobUploadPagesHeaders = exports2.PageBlobCreateExceptionHeaders = exports2.PageBlobCreateHeaders = exports2.BlobSetTagsExceptionHeaders = exports2.BlobSetTagsHeaders = exports2.BlobGetTagsExceptionHeaders = exports2.BlobGetTagsHeaders = exports2.BlobQueryExceptionHeaders = exports2.BlobQueryHeaders = exports2.BlobGetAccountInfoExceptionHeaders = exports2.BlobGetAccountInfoHeaders = exports2.BlobSetTierExceptionHeaders = exports2.BlobSetTierHeaders = exports2.BlobAbortCopyFromURLExceptionHeaders = exports2.BlobAbortCopyFromURLHeaders = exports2.BlobCopyFromURLExceptionHeaders = exports2.BlobCopyFromURLHeaders = exports2.BlobStartCopyFromURLExceptionHeaders = exports2.BlobStartCopyFromURLHeaders = exports2.BlobCreateSnapshotExceptionHeaders = exports2.BlobCreateSnapshotHeaders = exports2.BlobBreakLeaseExceptionHeaders = exports2.BlobBreakLeaseHeaders = exports2.BlobChangeLeaseExceptionHeaders = exports2.BlobChangeLeaseHeaders = exports2.BlobRenewLeaseExceptionHeaders = exports2.BlobRenewLeaseHeaders = exports2.BlobReleaseLeaseExceptionHeaders = exports2.BlobReleaseLeaseHeaders = exports2.BlobAcquireLeaseExceptionHeaders = exports2.BlobAcquireLeaseHeaders = exports2.BlobSetMetadataExceptionHeaders = exports2.BlobSetMetadataHeaders = exports2.BlobSetLegalHoldExceptionHeaders = exports2.BlobSetLegalHoldHeaders = exports2.BlobDeleteImmutabilityPolicyExceptionHeaders = exports2.BlobDeleteImmutabilityPolicyHeaders = exports2.BlobSetImmutabilityPolicyExceptionHeaders = exports2.BlobSetImmutabilityPolicyHeaders = exports2.BlobSetHttpHeadersExceptionHeaders = exports2.BlobSetHttpHeadersHeaders = exports2.BlobSetExpiryExceptionHeaders = exports2.BlobSetExpiryHeaders = exports2.BlobUndeleteExceptionHeaders = void 0; + exports2.BlockBlobGetBlockListExceptionHeaders = exports2.BlockBlobGetBlockListHeaders = exports2.BlockBlobCommitBlockListExceptionHeaders = exports2.BlockBlobCommitBlockListHeaders = exports2.BlockBlobStageBlockFromURLExceptionHeaders = exports2.BlockBlobStageBlockFromURLHeaders = exports2.BlockBlobStageBlockExceptionHeaders = exports2.BlockBlobStageBlockHeaders = exports2.BlockBlobPutBlobFromUrlExceptionHeaders = exports2.BlockBlobPutBlobFromUrlHeaders = exports2.BlockBlobUploadExceptionHeaders = exports2.BlockBlobUploadHeaders = exports2.AppendBlobSealExceptionHeaders = exports2.AppendBlobSealHeaders = exports2.AppendBlobAppendBlockFromUrlExceptionHeaders = exports2.AppendBlobAppendBlockFromUrlHeaders = exports2.AppendBlobAppendBlockExceptionHeaders = exports2.AppendBlobAppendBlockHeaders = exports2.AppendBlobCreateExceptionHeaders = exports2.AppendBlobCreateHeaders = exports2.PageBlobCopyIncrementalExceptionHeaders = exports2.PageBlobCopyIncrementalHeaders = exports2.PageBlobUpdateSequenceNumberExceptionHeaders = exports2.PageBlobUpdateSequenceNumberHeaders = exports2.PageBlobResizeExceptionHeaders = exports2.PageBlobResizeHeaders = exports2.PageBlobGetPageRangesDiffExceptionHeaders = exports2.PageBlobGetPageRangesDiffHeaders = exports2.PageBlobGetPageRangesExceptionHeaders = void 0; + exports2.BlobServiceProperties = { serializedName: "BlobServiceProperties", xmlName: "StorageServiceProperties", type: { @@ -36545,7 +38143,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var Logging = { + exports2.Logging = { serializedName: "Logging", type: { name: "Composite", @@ -36594,7 +38192,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var RetentionPolicy = { + exports2.RetentionPolicy = { serializedName: "RetentionPolicy", type: { name: "Composite", @@ -36621,7 +38219,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var Metrics = { + exports2.Metrics = { serializedName: "Metrics", type: { name: "Composite", @@ -36660,7 +38258,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var CorsRule = { + exports2.CorsRule = { serializedName: "CorsRule", type: { name: "Composite", @@ -36712,7 +38310,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var StaticWebsite = { + exports2.StaticWebsite = { serializedName: "StaticWebsite", type: { name: "Composite", @@ -36750,7 +38348,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var StorageError = { + exports2.StorageError = { serializedName: "StorageError", type: { name: "Composite", @@ -36780,7 +38378,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobServiceStatistics = { + exports2.BlobServiceStatistics = { serializedName: "BlobServiceStatistics", xmlName: "StorageServiceStats", type: { @@ -36798,7 +38396,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var GeoReplication = { + exports2.GeoReplication = { serializedName: "GeoReplication", type: { name: "Composite", @@ -36824,7 +38422,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ListContainersSegmentResponse = { + exports2.ListContainersSegmentResponse = { serializedName: "ListContainersSegmentResponse", xmlName: "EnumerationResults", type: { @@ -36887,7 +38485,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerItem = { + exports2.ContainerItem = { serializedName: "ContainerItem", xmlName: "Container", type: { @@ -36935,7 +38533,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerProperties = { + exports2.ContainerProperties = { serializedName: "ContainerProperties", type: { name: "Composite", @@ -37047,7 +38645,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var KeyInfo = { + exports2.KeyInfo = { serializedName: "KeyInfo", type: { name: "Composite", @@ -37072,7 +38670,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var UserDelegationKey = { + exports2.UserDelegationKey = { serializedName: "UserDelegationKey", type: { name: "Composite", @@ -37137,7 +38735,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var FilterBlobSegment = { + exports2.FilterBlobSegment = { serializedName: "FilterBlobSegment", xmlName: "EnumerationResults", type: { @@ -37187,7 +38785,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var FilterBlobItem = { + exports2.FilterBlobItem = { serializedName: "FilterBlobItem", xmlName: "Blob", type: { @@ -37221,7 +38819,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobTags = { + exports2.BlobTags = { serializedName: "BlobTags", xmlName: "Tags", type: { @@ -37247,7 +38845,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobTag = { + exports2.BlobTag = { serializedName: "BlobTag", xmlName: "Tag", type: { @@ -37273,7 +38871,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var SignedIdentifier = { + exports2.SignedIdentifier = { serializedName: "SignedIdentifier", xmlName: "SignedIdentifier", type: { @@ -37299,7 +38897,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var AccessPolicy = { + exports2.AccessPolicy = { serializedName: "AccessPolicy", type: { name: "Composite", @@ -37329,7 +38927,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ListBlobsFlatSegmentResponse = { + exports2.ListBlobsFlatSegmentResponse = { serializedName: "ListBlobsFlatSegmentResponse", xmlName: "EnumerationResults", type: { @@ -37393,7 +38991,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobFlatListSegment = { + exports2.BlobFlatListSegment = { serializedName: "BlobFlatListSegment", xmlName: "Blobs", type: { @@ -37418,7 +39016,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobItemInternal = { + exports2.BlobItemInternal = { serializedName: "BlobItemInternal", xmlName: "Blob", type: { @@ -37505,7 +39103,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobName = { + exports2.BlobName = { serializedName: "BlobName", type: { name: "Composite", @@ -37530,7 +39128,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobPropertiesInternal = { + exports2.BlobPropertiesInternal = { serializedName: "BlobPropertiesInternal", xmlName: "Properties", type: { @@ -37857,7 +39455,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ListBlobsHierarchySegmentResponse = { + exports2.ListBlobsHierarchySegmentResponse = { serializedName: "ListBlobsHierarchySegmentResponse", xmlName: "EnumerationResults", type: { @@ -37928,7 +39526,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobHierarchyListSegment = { + exports2.BlobHierarchyListSegment = { serializedName: "BlobHierarchyListSegment", xmlName: "Blobs", type: { @@ -37967,7 +39565,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobPrefix = { + exports2.BlobPrefix = { serializedName: "BlobPrefix", type: { name: "Composite", @@ -37984,7 +39582,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlockLookupList = { + exports2.BlockLookupList = { serializedName: "BlockLookupList", xmlName: "BlockList", type: { @@ -38033,7 +39631,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlockList = { + exports2.BlockList = { serializedName: "BlockList", type: { name: "Composite", @@ -38072,7 +39670,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var Block = { + exports2.Block = { serializedName: "Block", type: { name: "Composite", @@ -38097,7 +39695,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageList = { + exports2.PageList = { serializedName: "PageList", type: { name: "Composite", @@ -38141,7 +39739,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageRange = { + exports2.PageRange = { serializedName: "PageRange", xmlName: "PageRange", type: { @@ -38167,7 +39765,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ClearRange = { + exports2.ClearRange = { serializedName: "ClearRange", xmlName: "ClearRange", type: { @@ -38193,7 +39791,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var QueryRequest = { + exports2.QueryRequest = { serializedName: "QueryRequest", xmlName: "QueryRequest", type: { @@ -38235,7 +39833,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var QuerySerialization = { + exports2.QuerySerialization = { serializedName: "QuerySerialization", type: { name: "Composite", @@ -38252,7 +39850,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var QueryFormat = { + exports2.QueryFormat = { serializedName: "QueryFormat", type: { name: "Composite", @@ -38302,7 +39900,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var DelimitedTextConfiguration = { + exports2.DelimitedTextConfiguration = { serializedName: "DelimitedTextConfiguration", xmlName: "DelimitedTextConfiguration", type: { @@ -38347,7 +39945,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var JsonTextConfiguration = { + exports2.JsonTextConfiguration = { serializedName: "JsonTextConfiguration", xmlName: "JsonTextConfiguration", type: { @@ -38364,7 +39962,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ArrowConfiguration = { + exports2.ArrowConfiguration = { serializedName: "ArrowConfiguration", xmlName: "ArrowConfiguration", type: { @@ -38390,7 +39988,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ArrowField = { + exports2.ArrowField = { serializedName: "ArrowField", xmlName: "Field", type: { @@ -38429,7 +40027,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceSetPropertiesHeaders = { + exports2.ServiceSetPropertiesHeaders = { serializedName: "Service_setPropertiesHeaders", type: { name: "Composite", @@ -38466,7 +40064,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceSetPropertiesExceptionHeaders = { + exports2.ServiceSetPropertiesExceptionHeaders = { serializedName: "Service_setPropertiesExceptionHeaders", type: { name: "Composite", @@ -38482,7 +40080,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceGetPropertiesHeaders = { + exports2.ServiceGetPropertiesHeaders = { serializedName: "Service_getPropertiesHeaders", type: { name: "Composite", @@ -38519,7 +40117,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceGetPropertiesExceptionHeaders = { + exports2.ServiceGetPropertiesExceptionHeaders = { serializedName: "Service_getPropertiesExceptionHeaders", type: { name: "Composite", @@ -38535,7 +40133,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceGetStatisticsHeaders = { + exports2.ServiceGetStatisticsHeaders = { serializedName: "Service_getStatisticsHeaders", type: { name: "Composite", @@ -38579,7 +40177,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceGetStatisticsExceptionHeaders = { + exports2.ServiceGetStatisticsExceptionHeaders = { serializedName: "Service_getStatisticsExceptionHeaders", type: { name: "Composite", @@ -38595,7 +40193,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceListContainersSegmentHeaders = { + exports2.ServiceListContainersSegmentHeaders = { serializedName: "Service_listContainersSegmentHeaders", type: { name: "Composite", @@ -38632,7 +40230,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceListContainersSegmentExceptionHeaders = { + exports2.ServiceListContainersSegmentExceptionHeaders = { serializedName: "Service_listContainersSegmentExceptionHeaders", type: { name: "Composite", @@ -38648,7 +40246,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceGetUserDelegationKeyHeaders = { + exports2.ServiceGetUserDelegationKeyHeaders = { serializedName: "Service_getUserDelegationKeyHeaders", type: { name: "Composite", @@ -38692,7 +40290,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceGetUserDelegationKeyExceptionHeaders = { + exports2.ServiceGetUserDelegationKeyExceptionHeaders = { serializedName: "Service_getUserDelegationKeyExceptionHeaders", type: { name: "Composite", @@ -38708,7 +40306,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceGetAccountInfoHeaders = { + exports2.ServiceGetAccountInfoHeaders = { serializedName: "Service_getAccountInfoHeaders", type: { name: "Composite", @@ -38787,7 +40385,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceGetAccountInfoExceptionHeaders = { + exports2.ServiceGetAccountInfoExceptionHeaders = { serializedName: "Service_getAccountInfoExceptionHeaders", type: { name: "Composite", @@ -38803,7 +40401,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceSubmitBatchHeaders = { + exports2.ServiceSubmitBatchHeaders = { serializedName: "Service_submitBatchHeaders", type: { name: "Composite", @@ -38847,7 +40445,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceSubmitBatchExceptionHeaders = { + exports2.ServiceSubmitBatchExceptionHeaders = { serializedName: "Service_submitBatchExceptionHeaders", type: { name: "Composite", @@ -38863,7 +40461,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceFilterBlobsHeaders = { + exports2.ServiceFilterBlobsHeaders = { serializedName: "Service_filterBlobsHeaders", type: { name: "Composite", @@ -38907,7 +40505,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceFilterBlobsExceptionHeaders = { + exports2.ServiceFilterBlobsExceptionHeaders = { serializedName: "Service_filterBlobsExceptionHeaders", type: { name: "Composite", @@ -38923,7 +40521,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerCreateHeaders = { + exports2.ContainerCreateHeaders = { serializedName: "Container_createHeaders", type: { name: "Composite", @@ -38981,7 +40579,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerCreateExceptionHeaders = { + exports2.ContainerCreateExceptionHeaders = { serializedName: "Container_createExceptionHeaders", type: { name: "Composite", @@ -38997,7 +40595,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerGetPropertiesHeaders = { + exports2.ContainerGetPropertiesHeaders = { serializedName: "Container_getPropertiesHeaders", type: { name: "Composite", @@ -39137,7 +40735,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerGetPropertiesExceptionHeaders = { + exports2.ContainerGetPropertiesExceptionHeaders = { serializedName: "Container_getPropertiesExceptionHeaders", type: { name: "Composite", @@ -39153,7 +40751,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerDeleteHeaders = { + exports2.ContainerDeleteHeaders = { serializedName: "Container_deleteHeaders", type: { name: "Composite", @@ -39197,7 +40795,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerDeleteExceptionHeaders = { + exports2.ContainerDeleteExceptionHeaders = { serializedName: "Container_deleteExceptionHeaders", type: { name: "Composite", @@ -39213,7 +40811,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerSetMetadataHeaders = { + exports2.ContainerSetMetadataHeaders = { serializedName: "Container_setMetadataHeaders", type: { name: "Composite", @@ -39271,7 +40869,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerSetMetadataExceptionHeaders = { + exports2.ContainerSetMetadataExceptionHeaders = { serializedName: "Container_setMetadataExceptionHeaders", type: { name: "Composite", @@ -39287,7 +40885,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerGetAccessPolicyHeaders = { + exports2.ContainerGetAccessPolicyHeaders = { serializedName: "Container_getAccessPolicyHeaders", type: { name: "Composite", @@ -39353,7 +40951,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerGetAccessPolicyExceptionHeaders = { + exports2.ContainerGetAccessPolicyExceptionHeaders = { serializedName: "Container_getAccessPolicyExceptionHeaders", type: { name: "Composite", @@ -39369,7 +40967,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerSetAccessPolicyHeaders = { + exports2.ContainerSetAccessPolicyHeaders = { serializedName: "Container_setAccessPolicyHeaders", type: { name: "Composite", @@ -39427,7 +41025,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerSetAccessPolicyExceptionHeaders = { + exports2.ContainerSetAccessPolicyExceptionHeaders = { serializedName: "Container_setAccessPolicyExceptionHeaders", type: { name: "Composite", @@ -39443,7 +41041,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerRestoreHeaders = { + exports2.ContainerRestoreHeaders = { serializedName: "Container_restoreHeaders", type: { name: "Composite", @@ -39487,7 +41085,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerRestoreExceptionHeaders = { + exports2.ContainerRestoreExceptionHeaders = { serializedName: "Container_restoreExceptionHeaders", type: { name: "Composite", @@ -39503,7 +41101,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerRenameHeaders = { + exports2.ContainerRenameHeaders = { serializedName: "Container_renameHeaders", type: { name: "Composite", @@ -39547,7 +41145,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerRenameExceptionHeaders = { + exports2.ContainerRenameExceptionHeaders = { serializedName: "Container_renameExceptionHeaders", type: { name: "Composite", @@ -39563,7 +41161,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerSubmitBatchHeaders = { + exports2.ContainerSubmitBatchHeaders = { serializedName: "Container_submitBatchHeaders", type: { name: "Composite", @@ -39593,7 +41191,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerSubmitBatchExceptionHeaders = { + exports2.ContainerSubmitBatchExceptionHeaders = { serializedName: "Container_submitBatchExceptionHeaders", type: { name: "Composite", @@ -39609,7 +41207,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerFilterBlobsHeaders = { + exports2.ContainerFilterBlobsHeaders = { serializedName: "Container_filterBlobsHeaders", type: { name: "Composite", @@ -39646,7 +41244,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerFilterBlobsExceptionHeaders = { + exports2.ContainerFilterBlobsExceptionHeaders = { serializedName: "Container_filterBlobsExceptionHeaders", type: { name: "Composite", @@ -39662,7 +41260,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerAcquireLeaseHeaders = { + exports2.ContainerAcquireLeaseHeaders = { serializedName: "Container_acquireLeaseHeaders", type: { name: "Composite", @@ -39720,7 +41318,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerAcquireLeaseExceptionHeaders = { + exports2.ContainerAcquireLeaseExceptionHeaders = { serializedName: "Container_acquireLeaseExceptionHeaders", type: { name: "Composite", @@ -39736,7 +41334,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerReleaseLeaseHeaders = { + exports2.ContainerReleaseLeaseHeaders = { serializedName: "Container_releaseLeaseHeaders", type: { name: "Composite", @@ -39787,7 +41385,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerReleaseLeaseExceptionHeaders = { + exports2.ContainerReleaseLeaseExceptionHeaders = { serializedName: "Container_releaseLeaseExceptionHeaders", type: { name: "Composite", @@ -39803,7 +41401,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerRenewLeaseHeaders = { + exports2.ContainerRenewLeaseHeaders = { serializedName: "Container_renewLeaseHeaders", type: { name: "Composite", @@ -39861,7 +41459,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerRenewLeaseExceptionHeaders = { + exports2.ContainerRenewLeaseExceptionHeaders = { serializedName: "Container_renewLeaseExceptionHeaders", type: { name: "Composite", @@ -39877,7 +41475,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerBreakLeaseHeaders = { + exports2.ContainerBreakLeaseHeaders = { serializedName: "Container_breakLeaseHeaders", type: { name: "Composite", @@ -39935,7 +41533,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerBreakLeaseExceptionHeaders = { + exports2.ContainerBreakLeaseExceptionHeaders = { serializedName: "Container_breakLeaseExceptionHeaders", type: { name: "Composite", @@ -39951,7 +41549,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerChangeLeaseHeaders = { + exports2.ContainerChangeLeaseHeaders = { serializedName: "Container_changeLeaseHeaders", type: { name: "Composite", @@ -40009,7 +41607,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerChangeLeaseExceptionHeaders = { + exports2.ContainerChangeLeaseExceptionHeaders = { serializedName: "Container_changeLeaseExceptionHeaders", type: { name: "Composite", @@ -40025,7 +41623,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerListBlobFlatSegmentHeaders = { + exports2.ContainerListBlobFlatSegmentHeaders = { serializedName: "Container_listBlobFlatSegmentHeaders", type: { name: "Composite", @@ -40076,7 +41674,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerListBlobFlatSegmentExceptionHeaders = { + exports2.ContainerListBlobFlatSegmentExceptionHeaders = { serializedName: "Container_listBlobFlatSegmentExceptionHeaders", type: { name: "Composite", @@ -40092,7 +41690,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerListBlobHierarchySegmentHeaders = { + exports2.ContainerListBlobHierarchySegmentHeaders = { serializedName: "Container_listBlobHierarchySegmentHeaders", type: { name: "Composite", @@ -40143,7 +41741,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerListBlobHierarchySegmentExceptionHeaders = { + exports2.ContainerListBlobHierarchySegmentExceptionHeaders = { serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", type: { name: "Composite", @@ -40159,7 +41757,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerGetAccountInfoHeaders = { + exports2.ContainerGetAccountInfoHeaders = { serializedName: "Container_getAccountInfoHeaders", type: { name: "Composite", @@ -40231,7 +41829,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerGetAccountInfoExceptionHeaders = { + exports2.ContainerGetAccountInfoExceptionHeaders = { serializedName: "Container_getAccountInfoExceptionHeaders", type: { name: "Composite", @@ -40247,7 +41845,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobDownloadHeaders = { + exports2.BlobDownloadHeaders = { serializedName: "Blob_downloadHeaders", type: { name: "Composite", @@ -40587,7 +42185,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobDownloadExceptionHeaders = { + exports2.BlobDownloadExceptionHeaders = { serializedName: "Blob_downloadExceptionHeaders", type: { name: "Composite", @@ -40603,7 +42201,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobGetPropertiesHeaders = { + exports2.BlobGetPropertiesHeaders = { serializedName: "Blob_getPropertiesHeaders", type: { name: "Composite", @@ -40979,7 +42577,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobGetPropertiesExceptionHeaders = { + exports2.BlobGetPropertiesExceptionHeaders = { serializedName: "Blob_getPropertiesExceptionHeaders", type: { name: "Composite", @@ -40995,7 +42593,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobDeleteHeaders = { + exports2.BlobDeleteHeaders = { serializedName: "Blob_deleteHeaders", type: { name: "Composite", @@ -41039,7 +42637,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobDeleteExceptionHeaders = { + exports2.BlobDeleteExceptionHeaders = { serializedName: "Blob_deleteExceptionHeaders", type: { name: "Composite", @@ -41055,7 +42653,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobUndeleteHeaders = { + exports2.BlobUndeleteHeaders = { serializedName: "Blob_undeleteHeaders", type: { name: "Composite", @@ -41099,7 +42697,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobUndeleteExceptionHeaders = { + exports2.BlobUndeleteExceptionHeaders = { serializedName: "Blob_undeleteExceptionHeaders", type: { name: "Composite", @@ -41115,7 +42713,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetExpiryHeaders = { + exports2.BlobSetExpiryHeaders = { serializedName: "Blob_setExpiryHeaders", type: { name: "Composite", @@ -41166,7 +42764,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetExpiryExceptionHeaders = { + exports2.BlobSetExpiryExceptionHeaders = { serializedName: "Blob_setExpiryExceptionHeaders", type: { name: "Composite", @@ -41182,7 +42780,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetHttpHeadersHeaders = { + exports2.BlobSetHttpHeadersHeaders = { serializedName: "Blob_setHttpHeadersHeaders", type: { name: "Composite", @@ -41247,7 +42845,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetHttpHeadersExceptionHeaders = { + exports2.BlobSetHttpHeadersExceptionHeaders = { serializedName: "Blob_setHttpHeadersExceptionHeaders", type: { name: "Composite", @@ -41263,7 +42861,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetImmutabilityPolicyHeaders = { + exports2.BlobSetImmutabilityPolicyHeaders = { serializedName: "Blob_setImmutabilityPolicyHeaders", type: { name: "Composite", @@ -41315,7 +42913,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetImmutabilityPolicyExceptionHeaders = { + exports2.BlobSetImmutabilityPolicyExceptionHeaders = { serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", type: { name: "Composite", @@ -41331,7 +42929,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobDeleteImmutabilityPolicyHeaders = { + exports2.BlobDeleteImmutabilityPolicyHeaders = { serializedName: "Blob_deleteImmutabilityPolicyHeaders", type: { name: "Composite", @@ -41368,7 +42966,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobDeleteImmutabilityPolicyExceptionHeaders = { + exports2.BlobDeleteImmutabilityPolicyExceptionHeaders = { serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", type: { name: "Composite", @@ -41384,7 +42982,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetLegalHoldHeaders = { + exports2.BlobSetLegalHoldHeaders = { serializedName: "Blob_setLegalHoldHeaders", type: { name: "Composite", @@ -41428,7 +43026,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetLegalHoldExceptionHeaders = { + exports2.BlobSetLegalHoldExceptionHeaders = { serializedName: "Blob_setLegalHoldExceptionHeaders", type: { name: "Composite", @@ -41444,7 +43042,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetMetadataHeaders = { + exports2.BlobSetMetadataHeaders = { serializedName: "Blob_setMetadataHeaders", type: { name: "Composite", @@ -41530,7 +43128,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetMetadataExceptionHeaders = { + exports2.BlobSetMetadataExceptionHeaders = { serializedName: "Blob_setMetadataExceptionHeaders", type: { name: "Composite", @@ -41546,7 +43144,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobAcquireLeaseHeaders = { + exports2.BlobAcquireLeaseHeaders = { serializedName: "Blob_acquireLeaseHeaders", type: { name: "Composite", @@ -41604,7 +43202,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobAcquireLeaseExceptionHeaders = { + exports2.BlobAcquireLeaseExceptionHeaders = { serializedName: "Blob_acquireLeaseExceptionHeaders", type: { name: "Composite", @@ -41620,7 +43218,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobReleaseLeaseHeaders = { + exports2.BlobReleaseLeaseHeaders = { serializedName: "Blob_releaseLeaseHeaders", type: { name: "Composite", @@ -41671,7 +43269,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobReleaseLeaseExceptionHeaders = { + exports2.BlobReleaseLeaseExceptionHeaders = { serializedName: "Blob_releaseLeaseExceptionHeaders", type: { name: "Composite", @@ -41687,7 +43285,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobRenewLeaseHeaders = { + exports2.BlobRenewLeaseHeaders = { serializedName: "Blob_renewLeaseHeaders", type: { name: "Composite", @@ -41745,7 +43343,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobRenewLeaseExceptionHeaders = { + exports2.BlobRenewLeaseExceptionHeaders = { serializedName: "Blob_renewLeaseExceptionHeaders", type: { name: "Composite", @@ -41761,7 +43359,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobChangeLeaseHeaders = { + exports2.BlobChangeLeaseHeaders = { serializedName: "Blob_changeLeaseHeaders", type: { name: "Composite", @@ -41819,7 +43417,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobChangeLeaseExceptionHeaders = { + exports2.BlobChangeLeaseExceptionHeaders = { serializedName: "Blob_changeLeaseExceptionHeaders", type: { name: "Composite", @@ -41835,7 +43433,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobBreakLeaseHeaders = { + exports2.BlobBreakLeaseHeaders = { serializedName: "Blob_breakLeaseHeaders", type: { name: "Composite", @@ -41893,7 +43491,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobBreakLeaseExceptionHeaders = { + exports2.BlobBreakLeaseExceptionHeaders = { serializedName: "Blob_breakLeaseExceptionHeaders", type: { name: "Composite", @@ -41909,7 +43507,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobCreateSnapshotHeaders = { + exports2.BlobCreateSnapshotHeaders = { serializedName: "Blob_createSnapshotHeaders", type: { name: "Composite", @@ -41988,7 +43586,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobCreateSnapshotExceptionHeaders = { + exports2.BlobCreateSnapshotExceptionHeaders = { serializedName: "Blob_createSnapshotExceptionHeaders", type: { name: "Composite", @@ -42004,7 +43602,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobStartCopyFromURLHeaders = { + exports2.BlobStartCopyFromURLHeaders = { serializedName: "Blob_startCopyFromURLHeaders", type: { name: "Composite", @@ -42084,7 +43682,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobStartCopyFromURLExceptionHeaders = { + exports2.BlobStartCopyFromURLExceptionHeaders = { serializedName: "Blob_startCopyFromURLExceptionHeaders", type: { name: "Composite", @@ -42100,7 +43698,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobCopyFromURLHeaders = { + exports2.BlobCopyFromURLHeaders = { serializedName: "Blob_copyFromURLHeaders", type: { name: "Composite", @@ -42201,7 +43799,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobCopyFromURLExceptionHeaders = { + exports2.BlobCopyFromURLExceptionHeaders = { serializedName: "Blob_copyFromURLExceptionHeaders", type: { name: "Composite", @@ -42217,7 +43815,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobAbortCopyFromURLHeaders = { + exports2.BlobAbortCopyFromURLHeaders = { serializedName: "Blob_abortCopyFromURLHeaders", type: { name: "Composite", @@ -42261,7 +43859,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobAbortCopyFromURLExceptionHeaders = { + exports2.BlobAbortCopyFromURLExceptionHeaders = { serializedName: "Blob_abortCopyFromURLExceptionHeaders", type: { name: "Composite", @@ -42277,7 +43875,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetTierHeaders = { + exports2.BlobSetTierHeaders = { serializedName: "Blob_setTierHeaders", type: { name: "Composite", @@ -42314,7 +43912,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetTierExceptionHeaders = { + exports2.BlobSetTierExceptionHeaders = { serializedName: "Blob_setTierExceptionHeaders", type: { name: "Composite", @@ -42330,7 +43928,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobGetAccountInfoHeaders = { + exports2.BlobGetAccountInfoHeaders = { serializedName: "Blob_getAccountInfoHeaders", type: { name: "Composite", @@ -42402,7 +44000,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobGetAccountInfoExceptionHeaders = { + exports2.BlobGetAccountInfoExceptionHeaders = { serializedName: "Blob_getAccountInfoExceptionHeaders", type: { name: "Composite", @@ -42418,7 +44016,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobQueryHeaders = { + exports2.BlobQueryHeaders = { serializedName: "Blob_queryHeaders", type: { name: "Composite", @@ -42678,7 +44276,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobQueryExceptionHeaders = { + exports2.BlobQueryExceptionHeaders = { serializedName: "Blob_queryExceptionHeaders", type: { name: "Composite", @@ -42694,7 +44292,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobGetTagsHeaders = { + exports2.BlobGetTagsHeaders = { serializedName: "Blob_getTagsHeaders", type: { name: "Composite", @@ -42738,7 +44336,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobGetTagsExceptionHeaders = { + exports2.BlobGetTagsExceptionHeaders = { serializedName: "Blob_getTagsExceptionHeaders", type: { name: "Composite", @@ -42754,7 +44352,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetTagsHeaders = { + exports2.BlobSetTagsHeaders = { serializedName: "Blob_setTagsHeaders", type: { name: "Composite", @@ -42798,7 +44396,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetTagsExceptionHeaders = { + exports2.BlobSetTagsExceptionHeaders = { serializedName: "Blob_setTagsExceptionHeaders", type: { name: "Composite", @@ -42814,7 +44412,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobCreateHeaders = { + exports2.PageBlobCreateHeaders = { serializedName: "PageBlob_createHeaders", type: { name: "Composite", @@ -42907,7 +44505,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobCreateExceptionHeaders = { + exports2.PageBlobCreateExceptionHeaders = { serializedName: "PageBlob_createExceptionHeaders", type: { name: "Composite", @@ -42923,7 +44521,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobUploadPagesHeaders = { + exports2.PageBlobUploadPagesHeaders = { serializedName: "PageBlob_uploadPagesHeaders", type: { name: "Composite", @@ -43023,7 +44621,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobUploadPagesExceptionHeaders = { + exports2.PageBlobUploadPagesExceptionHeaders = { serializedName: "PageBlob_uploadPagesExceptionHeaders", type: { name: "Composite", @@ -43039,7 +44637,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobClearPagesHeaders = { + exports2.PageBlobClearPagesHeaders = { serializedName: "PageBlob_clearPagesHeaders", type: { name: "Composite", @@ -43118,7 +44716,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobClearPagesExceptionHeaders = { + exports2.PageBlobClearPagesExceptionHeaders = { serializedName: "PageBlob_clearPagesExceptionHeaders", type: { name: "Composite", @@ -43134,7 +44732,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobUploadPagesFromURLHeaders = { + exports2.PageBlobUploadPagesFromURLHeaders = { serializedName: "PageBlob_uploadPagesFromURLHeaders", type: { name: "Composite", @@ -43227,7 +44825,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobUploadPagesFromURLExceptionHeaders = { + exports2.PageBlobUploadPagesFromURLExceptionHeaders = { serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", type: { name: "Composite", @@ -43243,7 +44841,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobGetPageRangesHeaders = { + exports2.PageBlobGetPageRangesHeaders = { serializedName: "PageBlob_getPageRangesHeaders", type: { name: "Composite", @@ -43308,7 +44906,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobGetPageRangesExceptionHeaders = { + exports2.PageBlobGetPageRangesExceptionHeaders = { serializedName: "PageBlob_getPageRangesExceptionHeaders", type: { name: "Composite", @@ -43324,7 +44922,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobGetPageRangesDiffHeaders = { + exports2.PageBlobGetPageRangesDiffHeaders = { serializedName: "PageBlob_getPageRangesDiffHeaders", type: { name: "Composite", @@ -43389,7 +44987,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobGetPageRangesDiffExceptionHeaders = { + exports2.PageBlobGetPageRangesDiffExceptionHeaders = { serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", type: { name: "Composite", @@ -43405,7 +45003,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobResizeHeaders = { + exports2.PageBlobResizeHeaders = { serializedName: "PageBlob_resizeHeaders", type: { name: "Composite", @@ -43470,7 +45068,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobResizeExceptionHeaders = { + exports2.PageBlobResizeExceptionHeaders = { serializedName: "PageBlob_resizeExceptionHeaders", type: { name: "Composite", @@ -43486,7 +45084,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobUpdateSequenceNumberHeaders = { + exports2.PageBlobUpdateSequenceNumberHeaders = { serializedName: "PageBlob_updateSequenceNumberHeaders", type: { name: "Composite", @@ -43551,7 +45149,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobUpdateSequenceNumberExceptionHeaders = { + exports2.PageBlobUpdateSequenceNumberExceptionHeaders = { serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", type: { name: "Composite", @@ -43567,7 +45165,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobCopyIncrementalHeaders = { + exports2.PageBlobCopyIncrementalHeaders = { serializedName: "PageBlob_copyIncrementalHeaders", type: { name: "Composite", @@ -43640,7 +45238,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobCopyIncrementalExceptionHeaders = { + exports2.PageBlobCopyIncrementalExceptionHeaders = { serializedName: "PageBlob_copyIncrementalExceptionHeaders", type: { name: "Composite", @@ -43656,7 +45254,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var AppendBlobCreateHeaders = { + exports2.AppendBlobCreateHeaders = { serializedName: "AppendBlob_createHeaders", type: { name: "Composite", @@ -43749,7 +45347,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var AppendBlobCreateExceptionHeaders = { + exports2.AppendBlobCreateExceptionHeaders = { serializedName: "AppendBlob_createExceptionHeaders", type: { name: "Composite", @@ -43765,7 +45363,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var AppendBlobAppendBlockHeaders = { + exports2.AppendBlobAppendBlockHeaders = { serializedName: "AppendBlob_appendBlockHeaders", type: { name: "Composite", @@ -43872,7 +45470,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var AppendBlobAppendBlockExceptionHeaders = { + exports2.AppendBlobAppendBlockExceptionHeaders = { serializedName: "AppendBlob_appendBlockExceptionHeaders", type: { name: "Composite", @@ -43888,7 +45486,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var AppendBlobAppendBlockFromUrlHeaders = { + exports2.AppendBlobAppendBlockFromUrlHeaders = { serializedName: "AppendBlob_appendBlockFromUrlHeaders", type: { name: "Composite", @@ -43988,7 +45586,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var AppendBlobAppendBlockFromUrlExceptionHeaders = { + exports2.AppendBlobAppendBlockFromUrlExceptionHeaders = { serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", type: { name: "Composite", @@ -44004,7 +45602,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var AppendBlobSealHeaders = { + exports2.AppendBlobSealHeaders = { serializedName: "AppendBlob_sealHeaders", type: { name: "Composite", @@ -44062,7 +45660,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var AppendBlobSealExceptionHeaders = { + exports2.AppendBlobSealExceptionHeaders = { serializedName: "AppendBlob_sealExceptionHeaders", type: { name: "Composite", @@ -44078,7 +45676,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlockBlobUploadHeaders = { + exports2.BlockBlobUploadHeaders = { serializedName: "BlockBlob_uploadHeaders", type: { name: "Composite", @@ -44171,7 +45769,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlockBlobUploadExceptionHeaders = { + exports2.BlockBlobUploadExceptionHeaders = { serializedName: "BlockBlob_uploadExceptionHeaders", type: { name: "Composite", @@ -44187,7 +45785,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlockBlobPutBlobFromUrlHeaders = { + exports2.BlockBlobPutBlobFromUrlHeaders = { serializedName: "BlockBlob_putBlobFromUrlHeaders", type: { name: "Composite", @@ -44280,7 +45878,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlockBlobPutBlobFromUrlExceptionHeaders = { + exports2.BlockBlobPutBlobFromUrlExceptionHeaders = { serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", type: { name: "Composite", @@ -44296,7 +45894,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlockBlobStageBlockHeaders = { + exports2.BlockBlobStageBlockHeaders = { serializedName: "BlockBlob_stageBlockHeaders", type: { name: "Composite", @@ -44375,7 +45973,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlockBlobStageBlockExceptionHeaders = { + exports2.BlockBlobStageBlockExceptionHeaders = { serializedName: "BlockBlob_stageBlockExceptionHeaders", type: { name: "Composite", @@ -44391,7 +45989,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlockBlobStageBlockFromURLHeaders = { + exports2.BlockBlobStageBlockFromURLHeaders = { serializedName: "BlockBlob_stageBlockFromURLHeaders", type: { name: "Composite", @@ -44469,6322 +46067,7986 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } } - }; - var BlockBlobStageBlockFromURLExceptionHeaders = { - serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", - type: { - name: "Composite", - className: "BlockBlobStageBlockFromURLExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + }; + exports2.BlockBlobStageBlockFromURLExceptionHeaders = { + serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.BlockBlobCommitBlockListHeaders = { + serializedName: "BlockBlob_commitBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.BlockBlobCommitBlockListExceptionHeaders = { + serializedName: "BlockBlob_commitBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.BlockBlobGetBlockListHeaders = { + serializedName: "BlockBlob_getBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.BlockBlobGetBlockListExceptionHeaders = { + serializedName: "BlockBlob_getBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/parameters.js +var require_parameters = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/parameters.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.action3 = exports2.action2 = exports2.leaseId1 = exports2.action1 = exports2.proposedLeaseId = exports2.duration = exports2.action = exports2.comp10 = exports2.sourceLeaseId = exports2.sourceContainerName = exports2.comp9 = exports2.deletedContainerVersion = exports2.deletedContainerName = exports2.comp8 = exports2.containerAcl = exports2.comp7 = exports2.comp6 = exports2.ifUnmodifiedSince = exports2.ifModifiedSince = exports2.leaseId = exports2.preventEncryptionScopeOverride = exports2.defaultEncryptionScope = exports2.access = exports2.metadata = exports2.restype2 = exports2.where = exports2.comp5 = exports2.multipartContentType = exports2.contentLength = exports2.comp4 = exports2.body = exports2.restype1 = exports2.comp3 = exports2.keyInfo = exports2.include = exports2.maxPageSize = exports2.marker = exports2.prefix = exports2.comp2 = exports2.comp1 = exports2.accept1 = exports2.requestId = exports2.version = exports2.timeoutInSeconds = exports2.comp = exports2.restype = exports2.url = exports2.accept = exports2.blobServiceProperties = exports2.contentType = void 0; + exports2.fileRequestIntent = exports2.copySourceTags = exports2.copySourceAuthorization = exports2.sourceContentMD5 = exports2.xMsRequiresSync = exports2.legalHold1 = exports2.sealBlob = exports2.blobTagsString = exports2.copySource = exports2.sourceIfTags = exports2.sourceIfNoneMatch = exports2.sourceIfMatch = exports2.sourceIfUnmodifiedSince = exports2.sourceIfModifiedSince = exports2.rehydratePriority = exports2.tier = exports2.comp14 = exports2.encryptionScope = exports2.legalHold = exports2.comp13 = exports2.immutabilityPolicyMode = exports2.immutabilityPolicyExpiry = exports2.comp12 = exports2.blobContentDisposition = exports2.blobContentLanguage = exports2.blobContentEncoding = exports2.blobContentMD5 = exports2.blobContentType = exports2.blobCacheControl = exports2.expiresOn = exports2.expiryOptions = exports2.comp11 = exports2.blobDeleteType = exports2.deleteSnapshots = exports2.ifTags = exports2.ifNoneMatch = exports2.ifMatch = exports2.encryptionAlgorithm = exports2.encryptionKeySha256 = exports2.encryptionKey = exports2.rangeGetContentCRC64 = exports2.rangeGetContentMD5 = exports2.range = exports2.versionId = exports2.snapshot = exports2.delimiter = exports2.include1 = exports2.proposedLeaseId1 = exports2.action4 = exports2.breakPeriod = void 0; + exports2.listType = exports2.comp25 = exports2.blocks = exports2.blockId = exports2.comp24 = exports2.copySourceBlobProperties = exports2.blobType2 = exports2.comp23 = exports2.sourceRange1 = exports2.appendPosition = exports2.maxSize = exports2.comp22 = exports2.blobType1 = exports2.comp21 = exports2.sequenceNumberAction = exports2.prevSnapshotUrl = exports2.prevsnapshot = exports2.comp20 = exports2.range1 = exports2.sourceContentCrc64 = exports2.sourceRange = exports2.sourceUrl = exports2.pageWrite1 = exports2.ifSequenceNumberEqualTo = exports2.ifSequenceNumberLessThan = exports2.ifSequenceNumberLessThanOrEqualTo = exports2.pageWrite = exports2.comp19 = exports2.accept2 = exports2.body1 = exports2.contentType1 = exports2.blobSequenceNumber = exports2.blobContentLength = exports2.blobType = exports2.transactionalContentCrc64 = exports2.transactionalContentMD5 = exports2.tags = exports2.comp18 = exports2.comp17 = exports2.queryRequest = exports2.tier1 = exports2.comp16 = exports2.copyId = exports2.copyActionAbortConstant = exports2.comp15 = void 0; + var mappers_js_1 = require_mappers(); + exports2.contentType = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } + }; + exports2.blobServiceProperties = { + parameterPath: "blobServiceProperties", + mapper: mappers_js_1.BlobServiceProperties + }; + exports2.accept = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } + }; + exports2.url = { + parameterPath: "url", + mapper: { + serializedName: "url", + required: true, + xmlName: "url", + type: { + name: "String" + } + }, + skipEncoding: true + }; + exports2.restype = { + parameterPath: "restype", + mapper: { + defaultValue: "service", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } + }; + exports2.comp = { + parameterPath: "comp", + mapper: { + defaultValue: "properties", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.timeoutInSeconds = { + parameterPath: ["options", "timeoutInSeconds"], + mapper: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "timeout", + xmlName: "timeout", + type: { + name: "Number" + } + } + }; + exports2.version = { + parameterPath: "version", + mapper: { + defaultValue: "2025-07-05", + isConstant: true, + serializedName: "x-ms-version", + type: { + name: "String" + } + } + }; + exports2.requestId = { + parameterPath: ["options", "requestId"], + mapper: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + } + }; + exports2.accept1 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } + }; + exports2.comp1 = { + parameterPath: "comp", + mapper: { + defaultValue: "stats", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.comp2 = { + parameterPath: "comp", + mapper: { + defaultValue: "list", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.prefix = { + parameterPath: ["options", "prefix"], + mapper: { + serializedName: "prefix", + xmlName: "prefix", + type: { + name: "String" + } + } + }; + exports2.marker = { + parameterPath: ["options", "marker"], + mapper: { + serializedName: "marker", + xmlName: "marker", + type: { + name: "String" + } + } + }; + exports2.maxPageSize = { + parameterPath: ["options", "maxPageSize"], + mapper: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "maxresults", + xmlName: "maxresults", + type: { + name: "Number" + } + } + }; + exports2.include = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListContainersIncludeType", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: ["metadata", "deleted", "system"] + } + } + } + }, + collectionFormat: "CSV" + }; + exports2.keyInfo = { + parameterPath: "keyInfo", + mapper: mappers_js_1.KeyInfo + }; + exports2.comp3 = { + parameterPath: "comp", + mapper: { + defaultValue: "userdelegationkey", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.restype1 = { + parameterPath: "restype", + mapper: { + defaultValue: "account", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } + }; + exports2.body = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } + }; + exports2.comp4 = { + parameterPath: "comp", + mapper: { + defaultValue: "batch", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.contentLength = { + parameterPath: "contentLength", + mapper: { + serializedName: "Content-Length", + required: true, + xmlName: "Content-Length", + type: { + name: "Number" + } + } + }; + exports2.multipartContentType = { + parameterPath: "multipartContentType", + mapper: { + serializedName: "Content-Type", + required: true, + xmlName: "Content-Type", + type: { + name: "String" + } + } + }; + exports2.comp5 = { + parameterPath: "comp", + mapper: { + defaultValue: "blobs", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.where = { + parameterPath: ["options", "where"], + mapper: { + serializedName: "where", + xmlName: "where", + type: { + name: "String" + } + } + }; + exports2.restype2 = { + parameterPath: "restype", + mapper: { + defaultValue: "container", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } + }; + exports2.metadata = { + parameterPath: ["options", "metadata"], + mapper: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + } + }; + exports2.access = { + parameterPath: ["options", "access"], + mapper: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + } + }; + exports2.defaultEncryptionScope = { + parameterPath: [ + "options", + "containerEncryptionScope", + "defaultEncryptionScope" + ], + mapper: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } + } + }; + exports2.preventEncryptionScopeOverride = { + parameterPath: [ + "options", + "containerEncryptionScope", + "preventEncryptionScopeOverride" + ], + mapper: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + } + }; + exports2.leaseId = { + parameterPath: ["options", "leaseAccessConditions", "leaseId"], + mapper: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } + }; + exports2.ifModifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], + mapper: { + serializedName: "If-Modified-Since", + xmlName: "If-Modified-Since", + type: { + name: "DateTimeRfc1123" + } + } + }; + exports2.ifUnmodifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], + mapper: { + serializedName: "If-Unmodified-Since", + xmlName: "If-Unmodified-Since", + type: { + name: "DateTimeRfc1123" + } + } + }; + exports2.comp6 = { + parameterPath: "comp", + mapper: { + defaultValue: "metadata", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.comp7 = { + parameterPath: "comp", + mapper: { + defaultValue: "acl", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.containerAcl = { + parameterPath: ["options", "containerAcl"], + mapper: { + serializedName: "containerAcl", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SignedIdentifier" + } + } + } + } + }; + exports2.comp8 = { + parameterPath: "comp", + mapper: { + defaultValue: "undelete", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.deletedContainerName = { + parameterPath: ["options", "deletedContainerName"], + mapper: { + serializedName: "x-ms-deleted-container-name", + xmlName: "x-ms-deleted-container-name", + type: { + name: "String" + } + } + }; + exports2.deletedContainerVersion = { + parameterPath: ["options", "deletedContainerVersion"], + mapper: { + serializedName: "x-ms-deleted-container-version", + xmlName: "x-ms-deleted-container-version", + type: { + name: "String" + } + } + }; + exports2.comp9 = { + parameterPath: "comp", + mapper: { + defaultValue: "rename", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.sourceContainerName = { + parameterPath: "sourceContainerName", + mapper: { + serializedName: "x-ms-source-container-name", + required: true, + xmlName: "x-ms-source-container-name", + type: { + name: "String" + } + } + }; + exports2.sourceLeaseId = { + parameterPath: ["options", "sourceLeaseId"], + mapper: { + serializedName: "x-ms-source-lease-id", + xmlName: "x-ms-source-lease-id", + type: { + name: "String" + } + } + }; + exports2.comp10 = { + parameterPath: "comp", + mapper: { + defaultValue: "lease", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.action = { + parameterPath: "action", + mapper: { + defaultValue: "acquire", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } + }; + exports2.duration = { + parameterPath: ["options", "duration"], + mapper: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Number" + } + } + }; + exports2.proposedLeaseId = { + parameterPath: ["options", "proposedLeaseId"], + mapper: { + serializedName: "x-ms-proposed-lease-id", + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } + } + }; + exports2.action1 = { + parameterPath: "action", + mapper: { + defaultValue: "release", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } + }; + exports2.leaseId1 = { + parameterPath: "leaseId", + mapper: { + serializedName: "x-ms-lease-id", + required: true, + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } + }; + exports2.action2 = { + parameterPath: "action", + mapper: { + defaultValue: "renew", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } + }; + exports2.action3 = { + parameterPath: "action", + mapper: { + defaultValue: "break", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } + }; + exports2.breakPeriod = { + parameterPath: ["options", "breakPeriod"], + mapper: { + serializedName: "x-ms-lease-break-period", + xmlName: "x-ms-lease-break-period", + type: { + name: "Number" + } + } + }; + exports2.action4 = { + parameterPath: "action", + mapper: { + defaultValue: "change", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } + }; + exports2.proposedLeaseId1 = { + parameterPath: "proposedLeaseId", + mapper: { + serializedName: "x-ms-proposed-lease-id", + required: true, + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } + } + }; + exports2.include1 = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListBlobsIncludeItem", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: [ + "copy", + "deleted", + "metadata", + "snapshots", + "uncommittedblobs", + "versions", + "tags", + "immutabilitypolicy", + "legalhold", + "deletedwithversions" + ] + } + } + } + }, + collectionFormat: "CSV" + }; + exports2.delimiter = { + parameterPath: "delimiter", + mapper: { + serializedName: "delimiter", + required: true, + xmlName: "delimiter", + type: { + name: "String" + } + } + }; + exports2.snapshot = { + parameterPath: ["options", "snapshot"], + mapper: { + serializedName: "snapshot", + xmlName: "snapshot", + type: { + name: "String" + } + } + }; + exports2.versionId = { + parameterPath: ["options", "versionId"], + mapper: { + serializedName: "versionid", + xmlName: "versionid", + type: { + name: "String" + } + } + }; + exports2.range = { + parameterPath: ["options", "range"], + mapper: { + serializedName: "x-ms-range", + xmlName: "x-ms-range", + type: { + name: "String" + } + } + }; + exports2.rangeGetContentMD5 = { + parameterPath: ["options", "rangeGetContentMD5"], + mapper: { + serializedName: "x-ms-range-get-content-md5", + xmlName: "x-ms-range-get-content-md5", + type: { + name: "Boolean" + } + } + }; + exports2.rangeGetContentCRC64 = { + parameterPath: ["options", "rangeGetContentCRC64"], + mapper: { + serializedName: "x-ms-range-get-content-crc64", + xmlName: "x-ms-range-get-content-crc64", + type: { + name: "Boolean" + } + } + }; + exports2.encryptionKey = { + parameterPath: ["options", "cpkInfo", "encryptionKey"], + mapper: { + serializedName: "x-ms-encryption-key", + xmlName: "x-ms-encryption-key", + type: { + name: "String" + } + } + }; + exports2.encryptionKeySha256 = { + parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], + mapper: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + } + }; + exports2.encryptionAlgorithm = { + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], + mapper: { + serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", + type: { + name: "String" + } + } + }; + exports2.ifMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], + mapper: { + serializedName: "If-Match", + xmlName: "If-Match", + type: { + name: "String" + } + } + }; + exports2.ifNoneMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], + mapper: { + serializedName: "If-None-Match", + xmlName: "If-None-Match", + type: { + name: "String" + } + } + }; + exports2.ifTags = { + parameterPath: ["options", "modifiedAccessConditions", "ifTags"], + mapper: { + serializedName: "x-ms-if-tags", + xmlName: "x-ms-if-tags", + type: { + name: "String" + } + } + }; + exports2.deleteSnapshots = { + parameterPath: ["options", "deleteSnapshots"], + mapper: { + serializedName: "x-ms-delete-snapshots", + xmlName: "x-ms-delete-snapshots", + type: { + name: "Enum", + allowedValues: ["include", "only"] + } + } + }; + exports2.blobDeleteType = { + parameterPath: ["options", "blobDeleteType"], + mapper: { + serializedName: "deletetype", + xmlName: "deletetype", + type: { + name: "String" + } + } + }; + exports2.comp11 = { + parameterPath: "comp", + mapper: { + defaultValue: "expiry", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.expiryOptions = { + parameterPath: "expiryOptions", + mapper: { + serializedName: "x-ms-expiry-option", + required: true, + xmlName: "x-ms-expiry-option", + type: { + name: "String" + } + } + }; + exports2.expiresOn = { + parameterPath: ["options", "expiresOn"], + mapper: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "String" + } + } + }; + exports2.blobCacheControl = { + parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], + mapper: { + serializedName: "x-ms-blob-cache-control", + xmlName: "x-ms-blob-cache-control", + type: { + name: "String" + } + } + }; + exports2.blobContentType = { + parameterPath: ["options", "blobHttpHeaders", "blobContentType"], + mapper: { + serializedName: "x-ms-blob-content-type", + xmlName: "x-ms-blob-content-type", + type: { + name: "String" + } + } + }; + exports2.blobContentMD5 = { + parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], + mapper: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + } + }; + exports2.blobContentEncoding = { + parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], + mapper: { + serializedName: "x-ms-blob-content-encoding", + xmlName: "x-ms-blob-content-encoding", + type: { + name: "String" + } + } + }; + exports2.blobContentLanguage = { + parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], + mapper: { + serializedName: "x-ms-blob-content-language", + xmlName: "x-ms-blob-content-language", + type: { + name: "String" + } + } + }; + exports2.blobContentDisposition = { + parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], + mapper: { + serializedName: "x-ms-blob-content-disposition", + xmlName: "x-ms-blob-content-disposition", + type: { + name: "String" + } + } + }; + exports2.comp12 = { + parameterPath: "comp", + mapper: { + defaultValue: "immutabilityPolicies", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.immutabilityPolicyExpiry = { + parameterPath: ["options", "immutabilityPolicyExpiry"], + mapper: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + } + }; + exports2.immutabilityPolicyMode = { + parameterPath: ["options", "immutabilityPolicyMode"], + mapper: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } + }; + exports2.comp13 = { + parameterPath: "comp", + mapper: { + defaultValue: "legalhold", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.legalHold = { + parameterPath: "legalHold", + mapper: { + serializedName: "x-ms-legal-hold", + required: true, + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } + }; + exports2.encryptionScope = { + parameterPath: ["options", "encryptionScope"], + mapper: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + } + }; + exports2.comp14 = { + parameterPath: "comp", + mapper: { + defaultValue: "snapshot", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.tier = { + parameterPath: ["options", "tier"], + mapper: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold" + ] + } + } + }; + exports2.rehydratePriority = { + parameterPath: ["options", "rehydratePriority"], + mapper: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + } + }; + exports2.sourceIfModifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfModifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-modified-since", + xmlName: "x-ms-source-if-modified-since", + type: { + name: "DateTimeRfc1123" + } + } + }; + exports2.sourceIfUnmodifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfUnmodifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-unmodified-since", + xmlName: "x-ms-source-if-unmodified-since", + type: { + name: "DateTimeRfc1123" + } + } + }; + exports2.sourceIfMatch = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], + mapper: { + serializedName: "x-ms-source-if-match", + xmlName: "x-ms-source-if-match", + type: { + name: "String" + } + } + }; + exports2.sourceIfNoneMatch = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfNoneMatch" + ], + mapper: { + serializedName: "x-ms-source-if-none-match", + xmlName: "x-ms-source-if-none-match", + type: { + name: "String" + } + } + }; + exports2.sourceIfTags = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], + mapper: { + serializedName: "x-ms-source-if-tags", + xmlName: "x-ms-source-if-tags", + type: { + name: "String" + } + } + }; + exports2.copySource = { + parameterPath: "copySource", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + } + }; + exports2.blobTagsString = { + parameterPath: ["options", "blobTagsString"], + mapper: { + serializedName: "x-ms-tags", + xmlName: "x-ms-tags", + type: { + name: "String" + } + } + }; + exports2.sealBlob = { + parameterPath: ["options", "sealBlob"], + mapper: { + serializedName: "x-ms-seal-blob", + xmlName: "x-ms-seal-blob", + type: { + name: "Boolean" + } + } + }; + exports2.legalHold1 = { + parameterPath: ["options", "legalHold"], + mapper: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } + }; + exports2.xMsRequiresSync = { + parameterPath: "xMsRequiresSync", + mapper: { + defaultValue: "true", + isConstant: true, + serializedName: "x-ms-requires-sync", + type: { + name: "String" + } + } + }; + exports2.sourceContentMD5 = { + parameterPath: ["options", "sourceContentMD5"], + mapper: { + serializedName: "x-ms-source-content-md5", + xmlName: "x-ms-source-content-md5", + type: { + name: "ByteArray" + } + } + }; + exports2.copySourceAuthorization = { + parameterPath: ["options", "copySourceAuthorization"], + mapper: { + serializedName: "x-ms-copy-source-authorization", + xmlName: "x-ms-copy-source-authorization", + type: { + name: "String" + } + } + }; + exports2.copySourceTags = { + parameterPath: ["options", "copySourceTags"], + mapper: { + serializedName: "x-ms-copy-source-tag-option", + xmlName: "x-ms-copy-source-tag-option", + type: { + name: "Enum", + allowedValues: ["REPLACE", "COPY"] + } + } + }; + exports2.fileRequestIntent = { + parameterPath: ["options", "fileRequestIntent"], + mapper: { + serializedName: "x-ms-file-request-intent", + xmlName: "x-ms-file-request-intent", + type: { + name: "String" + } + } + }; + exports2.comp15 = { + parameterPath: "comp", + mapper: { + defaultValue: "copy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.copyActionAbortConstant = { + parameterPath: "copyActionAbortConstant", + mapper: { + defaultValue: "abort", + isConstant: true, + serializedName: "x-ms-copy-action", + type: { + name: "String" + } + } + }; + exports2.copyId = { + parameterPath: "copyId", + mapper: { + serializedName: "copyid", + required: true, + xmlName: "copyid", + type: { + name: "String" + } + } + }; + exports2.comp16 = { + parameterPath: "comp", + mapper: { + defaultValue: "tier", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.tier1 = { + parameterPath: "tier", + mapper: { + serializedName: "x-ms-access-tier", + required: true, + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold" + ] + } + } + }; + exports2.queryRequest = { + parameterPath: ["options", "queryRequest"], + mapper: mappers_js_1.QueryRequest + }; + exports2.comp17 = { + parameterPath: "comp", + mapper: { + defaultValue: "query", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.comp18 = { + parameterPath: "comp", + mapper: { + defaultValue: "tags", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.tags = { + parameterPath: ["options", "tags"], + mapper: mappers_js_1.BlobTags + }; + exports2.transactionalContentMD5 = { + parameterPath: ["options", "transactionalContentMD5"], + mapper: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray" + } + } + }; + exports2.transactionalContentCrc64 = { + parameterPath: ["options", "transactionalContentCrc64"], + mapper: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } + }; + exports2.blobType = { + parameterPath: "blobType", + mapper: { + defaultValue: "PageBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } + }; + exports2.blobContentLength = { + parameterPath: "blobContentLength", + mapper: { + serializedName: "x-ms-blob-content-length", + required: true, + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + } + }; + exports2.blobSequenceNumber = { + parameterPath: ["options", "blobSequenceNumber"], + mapper: { + defaultValue: 0, + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + } + }; + exports2.contentType1 = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/octet-stream", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } + }; + exports2.body1 = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } + }; + exports2.accept2 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } + }; + exports2.comp19 = { + parameterPath: "comp", + mapper: { + defaultValue: "page", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.pageWrite = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "update", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" + } + } + }; + exports2.ifSequenceNumberLessThanOrEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThanOrEqualTo" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-le", + xmlName: "x-ms-if-sequence-number-le", + type: { + name: "Number" + } + } + }; + exports2.ifSequenceNumberLessThan = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThan" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-lt", + xmlName: "x-ms-if-sequence-number-lt", + type: { + name: "Number" + } + } + }; + exports2.ifSequenceNumberEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberEqualTo" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-eq", + xmlName: "x-ms-if-sequence-number-eq", + type: { + name: "Number" + } + } + }; + exports2.pageWrite1 = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "clear", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" + } + } + }; + exports2.sourceUrl = { + parameterPath: "sourceUrl", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + } + }; + exports2.sourceRange = { + parameterPath: "sourceRange", + mapper: { + serializedName: "x-ms-source-range", + required: true, + xmlName: "x-ms-source-range", + type: { + name: "String" + } + } + }; + exports2.sourceContentCrc64 = { + parameterPath: ["options", "sourceContentCrc64"], + mapper: { + serializedName: "x-ms-source-content-crc64", + xmlName: "x-ms-source-content-crc64", + type: { + name: "ByteArray" + } + } + }; + exports2.range1 = { + parameterPath: "range", + mapper: { + serializedName: "x-ms-range", + required: true, + xmlName: "x-ms-range", + type: { + name: "String" + } + } + }; + exports2.comp20 = { + parameterPath: "comp", + mapper: { + defaultValue: "pagelist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.prevsnapshot = { + parameterPath: ["options", "prevsnapshot"], + mapper: { + serializedName: "prevsnapshot", + xmlName: "prevsnapshot", + type: { + name: "String" + } + } + }; + exports2.prevSnapshotUrl = { + parameterPath: ["options", "prevSnapshotUrl"], + mapper: { + serializedName: "x-ms-previous-snapshot-url", + xmlName: "x-ms-previous-snapshot-url", + type: { + name: "String" + } + } + }; + exports2.sequenceNumberAction = { + parameterPath: "sequenceNumberAction", + mapper: { + serializedName: "x-ms-sequence-number-action", + required: true, + xmlName: "x-ms-sequence-number-action", + type: { + name: "Enum", + allowedValues: ["max", "update", "increment"] + } + } + }; + exports2.comp21 = { + parameterPath: "comp", + mapper: { + defaultValue: "incrementalcopy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.blobType1 = { + parameterPath: "blobType", + mapper: { + defaultValue: "AppendBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } + }; + exports2.comp22 = { + parameterPath: "comp", + mapper: { + defaultValue: "appendblock", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.maxSize = { + parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], + mapper: { + serializedName: "x-ms-blob-condition-maxsize", + xmlName: "x-ms-blob-condition-maxsize", + type: { + name: "Number" + } + } + }; + exports2.appendPosition = { + parameterPath: [ + "options", + "appendPositionAccessConditions", + "appendPosition" + ], + mapper: { + serializedName: "x-ms-blob-condition-appendpos", + xmlName: "x-ms-blob-condition-appendpos", + type: { + name: "Number" + } + } + }; + exports2.sourceRange1 = { + parameterPath: ["options", "sourceRange"], + mapper: { + serializedName: "x-ms-source-range", + xmlName: "x-ms-source-range", + type: { + name: "String" + } + } + }; + exports2.comp23 = { + parameterPath: "comp", + mapper: { + defaultValue: "seal", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.blobType2 = { + parameterPath: "blobType", + mapper: { + defaultValue: "BlockBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } + }; + exports2.copySourceBlobProperties = { + parameterPath: ["options", "copySourceBlobProperties"], + mapper: { + serializedName: "x-ms-copy-source-blob-properties", + xmlName: "x-ms-copy-source-blob-properties", + type: { + name: "Boolean" + } + } + }; + exports2.comp24 = { + parameterPath: "comp", + mapper: { + defaultValue: "block", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.blockId = { + parameterPath: "blockId", + mapper: { + serializedName: "blockid", + required: true, + xmlName: "blockid", + type: { + name: "String" + } + } + }; + exports2.blocks = { + parameterPath: "blocks", + mapper: mappers_js_1.BlockLookupList + }; + exports2.comp25 = { + parameterPath: "comp", + mapper: { + defaultValue: "blocklist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.listType = { + parameterPath: "listType", + mapper: { + defaultValue: "committed", + serializedName: "blocklisttype", + required: true, + xmlName: "blocklisttype", + type: { + name: "Enum", + allowedValues: ["committed", "uncommitted", "all"] + } + } + }; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/service.js +var require_service = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/service.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ServiceImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var ServiceImpl = class { + client; + /** + * Initialize a new instance of the class Service class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * Sets properties for a storage account's Blob service endpoint, including properties for Storage + * Analytics and CORS (Cross-Origin Resource Sharing) rules + * @param blobServiceProperties The StorageService properties. + * @param options The options parameters. + */ + setProperties(blobServiceProperties, options) { + return this.client.sendOperationRequest({ blobServiceProperties, options }, setPropertiesOperationSpec); + } + /** + * gets the properties of a storage account's Blob service, including properties for Storage Analytics + * and CORS (Cross-Origin Resource Sharing) rules. + * @param options The options parameters. + */ + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); + } + /** + * Retrieves statistics related to replication for the Blob service. It is only available on the + * secondary location endpoint when read-access geo-redundant replication is enabled for the storage + * account. + * @param options The options parameters. + */ + getStatistics(options) { + return this.client.sendOperationRequest({ options }, getStatisticsOperationSpec); + } + /** + * The List Containers Segment operation returns a list of the containers under the specified account + * @param options The options parameters. + */ + listContainersSegment(options) { + return this.client.sendOperationRequest({ options }, listContainersSegmentOperationSpec); + } + /** + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * @param keyInfo Key information + * @param options The options parameters. + */ + getUserDelegationKey(keyInfo, options) { + return this.client.sendOperationRequest({ keyInfo, options }, getUserDelegationKeyOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a + * given search expression. Filter blobs searches across all containers within a storage account but + * can be scoped within the expression to a single container. + * @param options The options parameters. + */ + filterBlobs(options) { + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); + } + }; + exports2.ServiceImpl = ServiceImpl; + var xmlSerializer = coreClient.createSerializer( + Mappers, + /* isXml */ + true + ); + var setPropertiesOperationSpec = { + path: "/", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.ServiceSetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceSetPropertiesExceptionHeaders + } + }, + requestBody: Parameters.blobServiceProperties, + queryParameters: [ + Parameters.restype, + Parameters.comp, + Parameters.timeoutInSeconds + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer + }; + var getPropertiesOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobServiceProperties, + headersMapper: Mappers.ServiceGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + Parameters.restype, + Parameters.comp, + Parameters.timeoutInSeconds + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer + }; + var getStatisticsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobServiceStatistics, + headersMapper: Mappers.ServiceGetStatisticsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetStatisticsExceptionHeaders + } + }, + queryParameters: [ + Parameters.restype, + Parameters.timeoutInSeconds, + Parameters.comp1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer + }; + var listContainersSegmentOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListContainersSegmentResponse, + headersMapper: Mappers.ServiceListContainersSegmentHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceListContainersSegmentExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.include + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer + }; + var getUserDelegationKeyOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.UserDelegationKey, + headersMapper: Mappers.ServiceGetUserDelegationKeyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetUserDelegationKeyExceptionHeaders + } + }, + requestBody: Parameters.keyInfo, + queryParameters: [ + Parameters.restype, + Parameters.timeoutInSeconds, + Parameters.comp3 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer + }; + var getAccountInfoOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ServiceGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetAccountInfoExceptionHeaders + } + }, + queryParameters: [ + Parameters.comp, + Parameters.timeoutInSeconds, + Parameters.restype1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer + }; + var submitBatchOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.ServiceSubmitBatchHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceSubmitBatchExceptionHeaders + } + }, + requestBody: Parameters.body, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp4], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer + }; + var filterBlobsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.FilterBlobSegment, + headersMapper: Mappers.ServiceFilterBlobsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.comp5, + Parameters.where + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer + }; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/container.js +var require_container = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/container.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ContainerImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var ContainerImpl = class { + client; + /** + * Initialize a new instance of the class Container class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * creates a new container under the specified account. If the container with the same name already + * exists, the operation fails + * @param options The options parameters. + */ + create(options) { + return this.client.sendOperationRequest({ options }, createOperationSpec); + } + /** + * returns all user-defined metadata and system properties for the specified container. The data + * returned does not include the container's list of blobs + * @param options The options parameters. + */ + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); + } + /** + * operation marks the specified container for deletion. The container and any blobs contained within + * it are later deleted during garbage collection + * @param options The options parameters. + */ + delete(options) { + return this.client.sendOperationRequest({ options }, deleteOperationSpec); + } + /** + * operation sets one or more user-defined name-value pairs for the specified container. + * @param options The options parameters. + */ + setMetadata(options) { + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); + } + /** + * gets the permissions for the specified container. The permissions indicate whether container data + * may be accessed publicly. + * @param options The options parameters. + */ + getAccessPolicy(options) { + return this.client.sendOperationRequest({ options }, getAccessPolicyOperationSpec); + } + /** + * sets the permissions for the specified container. The permissions indicate whether blobs in a + * container may be accessed publicly. + * @param options The options parameters. + */ + setAccessPolicy(options) { + return this.client.sendOperationRequest({ options }, setAccessPolicyOperationSpec); + } + /** + * Restores a previously-deleted container. + * @param options The options parameters. + */ + restore(options) { + return this.client.sendOperationRequest({ options }, restoreOperationSpec); + } + /** + * Renames an existing container. + * @param sourceContainerName Required. Specifies the name of the container to rename. + * @param options The options parameters. + */ + rename(sourceContainerName, options) { + return this.client.sendOperationRequest({ sourceContainerName, options }, renameOperationSpec); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. + */ + filterBlobs(options) { + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + acquireLease(options) { + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec); } - }; - var BlockBlobCommitBlockListHeaders = { - serializedName: "BlockBlob_commitBlockListHeaders", - type: { - name: "Composite", - className: "BlockBlobCommitBlockListHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec); } - }; - var BlockBlobCommitBlockListExceptionHeaders = { - serializedName: "BlockBlob_commitBlockListExceptionHeaders", - type: { - name: "Composite", - className: "BlockBlobCommitBlockListExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + breakLease(options) { + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param options The options parameters. + */ + listBlobFlatSegment(options) { + return this.client.sendOperationRequest({ options }, listBlobFlatSegmentOperationSpec); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix + * element in the response body that acts as a placeholder for all blobs whose names begin with the + * same substring up to the appearance of the delimiter character. The delimiter may be a single + * character or a string. + * @param options The options parameters. + */ + listBlobHierarchySegment(delimiter, options) { + return this.client.sendOperationRequest({ delimiter, options }, listBlobHierarchySegmentOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); } }; - var BlockBlobGetBlockListHeaders = { - serializedName: "BlockBlob_getBlockListHeaders", - type: { - name: "Composite", - className: "BlockBlobGetBlockListHeaders", - modelProperties: { - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - contentType: { - serializedName: "content-type", - xmlName: "content-type", - type: { - name: "String" - } - }, - blobContentLength: { - serializedName: "x-ms-blob-content-length", - xmlName: "x-ms-blob-content-length", - type: { - name: "Number" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.ContainerImpl = ContainerImpl; + var xmlSerializer = coreClient.createSerializer( + Mappers, + /* isXml */ + true + ); + var createOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerCreateHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerCreateExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.access, + Parameters.defaultEncryptionScope, + Parameters.preventEncryptionScopeOverride + ], + isXML: true, + serializer: xmlSerializer }; - var BlockBlobGetBlockListExceptionHeaders = { - serializedName: "BlockBlob_getBlockListExceptionHeaders", - type: { - name: "Composite", - className: "BlockBlobGetBlockListExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var getPropertiesOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ContainerGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetPropertiesExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId + ], + isXML: true, + serializer: xmlSerializer }; - var Mappers = /* @__PURE__ */ Object.freeze({ - __proto__: null, - AccessPolicy, - AppendBlobAppendBlockExceptionHeaders, - AppendBlobAppendBlockFromUrlExceptionHeaders, - AppendBlobAppendBlockFromUrlHeaders, - AppendBlobAppendBlockHeaders, - AppendBlobCreateExceptionHeaders, - AppendBlobCreateHeaders, - AppendBlobSealExceptionHeaders, - AppendBlobSealHeaders, - ArrowConfiguration, - ArrowField, - BlobAbortCopyFromURLExceptionHeaders, - BlobAbortCopyFromURLHeaders, - BlobAcquireLeaseExceptionHeaders, - BlobAcquireLeaseHeaders, - BlobBreakLeaseExceptionHeaders, - BlobBreakLeaseHeaders, - BlobChangeLeaseExceptionHeaders, - BlobChangeLeaseHeaders, - BlobCopyFromURLExceptionHeaders, - BlobCopyFromURLHeaders, - BlobCreateSnapshotExceptionHeaders, - BlobCreateSnapshotHeaders, - BlobDeleteExceptionHeaders, - BlobDeleteHeaders, - BlobDeleteImmutabilityPolicyExceptionHeaders, - BlobDeleteImmutabilityPolicyHeaders, - BlobDownloadExceptionHeaders, - BlobDownloadHeaders, - BlobFlatListSegment, - BlobGetAccountInfoExceptionHeaders, - BlobGetAccountInfoHeaders, - BlobGetPropertiesExceptionHeaders, - BlobGetPropertiesHeaders, - BlobGetTagsExceptionHeaders, - BlobGetTagsHeaders, - BlobHierarchyListSegment, - BlobItemInternal, - BlobName, - BlobPrefix, - BlobPropertiesInternal, - BlobQueryExceptionHeaders, - BlobQueryHeaders, - BlobReleaseLeaseExceptionHeaders, - BlobReleaseLeaseHeaders, - BlobRenewLeaseExceptionHeaders, - BlobRenewLeaseHeaders, - BlobServiceProperties, - BlobServiceStatistics, - BlobSetExpiryExceptionHeaders, - BlobSetExpiryHeaders, - BlobSetHttpHeadersExceptionHeaders, - BlobSetHttpHeadersHeaders, - BlobSetImmutabilityPolicyExceptionHeaders, - BlobSetImmutabilityPolicyHeaders, - BlobSetLegalHoldExceptionHeaders, - BlobSetLegalHoldHeaders, - BlobSetMetadataExceptionHeaders, - BlobSetMetadataHeaders, - BlobSetTagsExceptionHeaders, - BlobSetTagsHeaders, - BlobSetTierExceptionHeaders, - BlobSetTierHeaders, - BlobStartCopyFromURLExceptionHeaders, - BlobStartCopyFromURLHeaders, - BlobTag, - BlobTags, - BlobUndeleteExceptionHeaders, - BlobUndeleteHeaders, - Block, - BlockBlobCommitBlockListExceptionHeaders, - BlockBlobCommitBlockListHeaders, - BlockBlobGetBlockListExceptionHeaders, - BlockBlobGetBlockListHeaders, - BlockBlobPutBlobFromUrlExceptionHeaders, - BlockBlobPutBlobFromUrlHeaders, - BlockBlobStageBlockExceptionHeaders, - BlockBlobStageBlockFromURLExceptionHeaders, - BlockBlobStageBlockFromURLHeaders, - BlockBlobStageBlockHeaders, - BlockBlobUploadExceptionHeaders, - BlockBlobUploadHeaders, - BlockList, - BlockLookupList, - ClearRange, - ContainerAcquireLeaseExceptionHeaders, - ContainerAcquireLeaseHeaders, - ContainerBreakLeaseExceptionHeaders, - ContainerBreakLeaseHeaders, - ContainerChangeLeaseExceptionHeaders, - ContainerChangeLeaseHeaders, - ContainerCreateExceptionHeaders, - ContainerCreateHeaders, - ContainerDeleteExceptionHeaders, - ContainerDeleteHeaders, - ContainerFilterBlobsExceptionHeaders, - ContainerFilterBlobsHeaders, - ContainerGetAccessPolicyExceptionHeaders, - ContainerGetAccessPolicyHeaders, - ContainerGetAccountInfoExceptionHeaders, - ContainerGetAccountInfoHeaders, - ContainerGetPropertiesExceptionHeaders, - ContainerGetPropertiesHeaders, - ContainerItem, - ContainerListBlobFlatSegmentExceptionHeaders, - ContainerListBlobFlatSegmentHeaders, - ContainerListBlobHierarchySegmentExceptionHeaders, - ContainerListBlobHierarchySegmentHeaders, - ContainerProperties, - ContainerReleaseLeaseExceptionHeaders, - ContainerReleaseLeaseHeaders, - ContainerRenameExceptionHeaders, - ContainerRenameHeaders, - ContainerRenewLeaseExceptionHeaders, - ContainerRenewLeaseHeaders, - ContainerRestoreExceptionHeaders, - ContainerRestoreHeaders, - ContainerSetAccessPolicyExceptionHeaders, - ContainerSetAccessPolicyHeaders, - ContainerSetMetadataExceptionHeaders, - ContainerSetMetadataHeaders, - ContainerSubmitBatchExceptionHeaders, - ContainerSubmitBatchHeaders, - CorsRule, - DelimitedTextConfiguration, - FilterBlobItem, - FilterBlobSegment, - GeoReplication, - JsonTextConfiguration, - KeyInfo, - ListBlobsFlatSegmentResponse, - ListBlobsHierarchySegmentResponse, - ListContainersSegmentResponse, - Logging, - Metrics, - PageBlobClearPagesExceptionHeaders, - PageBlobClearPagesHeaders, - PageBlobCopyIncrementalExceptionHeaders, - PageBlobCopyIncrementalHeaders, - PageBlobCreateExceptionHeaders, - PageBlobCreateHeaders, - PageBlobGetPageRangesDiffExceptionHeaders, - PageBlobGetPageRangesDiffHeaders, - PageBlobGetPageRangesExceptionHeaders, - PageBlobGetPageRangesHeaders, - PageBlobResizeExceptionHeaders, - PageBlobResizeHeaders, - PageBlobUpdateSequenceNumberExceptionHeaders, - PageBlobUpdateSequenceNumberHeaders, - PageBlobUploadPagesExceptionHeaders, - PageBlobUploadPagesFromURLExceptionHeaders, - PageBlobUploadPagesFromURLHeaders, - PageBlobUploadPagesHeaders, - PageList, - PageRange, - QueryFormat, - QueryRequest, - QuerySerialization, - RetentionPolicy, - ServiceFilterBlobsExceptionHeaders, - ServiceFilterBlobsHeaders, - ServiceGetAccountInfoExceptionHeaders, - ServiceGetAccountInfoHeaders, - ServiceGetPropertiesExceptionHeaders, - ServiceGetPropertiesHeaders, - ServiceGetStatisticsExceptionHeaders, - ServiceGetStatisticsHeaders, - ServiceGetUserDelegationKeyExceptionHeaders, - ServiceGetUserDelegationKeyHeaders, - ServiceListContainersSegmentExceptionHeaders, - ServiceListContainersSegmentHeaders, - ServiceSetPropertiesExceptionHeaders, - ServiceSetPropertiesHeaders, - ServiceSubmitBatchExceptionHeaders, - ServiceSubmitBatchHeaders, - SignedIdentifier, - StaticWebsite, - StorageError, - UserDelegationKey - }); - var contentType = { - parameterPath: ["options", "contentType"], - mapper: { - defaultValue: "application/xml", - isConstant: true, - serializedName: "Content-Type", - type: { - name: "String" + var deleteOperationSpec = { + path: "/{containerName}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: Mappers.ContainerDeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerDeleteExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince + ], + isXML: true, + serializer: xmlSerializer }; - var blobServiceProperties = { - parameterPath: "blobServiceProperties", - mapper: BlobServiceProperties + var setMetadataOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerSetMetadataHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSetMetadataExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp6 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince + ], + isXML: true, + serializer: xmlSerializer }; - var accept = { - parameterPath: "accept", - mapper: { - defaultValue: "application/xml", - isConstant: true, - serializedName: "Accept", - type: { - name: "String" + var getAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { + name: "Sequence", + element: { + type: { name: "Composite", className: "SignedIdentifier" } + } + }, + serializedName: "SignedIdentifiers", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier" + }, + headersMapper: Mappers.ContainerGetAccessPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetAccessPolicyExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp7 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId + ], + isXML: true, + serializer: xmlSerializer }; - var url = { - parameterPath: "url", - mapper: { - serializedName: "url", - required: true, - xmlName: "url", - type: { - name: "String" + var setAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerSetAccessPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSetAccessPolicyExceptionHeaders } }, - skipEncoding: true + requestBody: Parameters.containerAcl, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp7 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.access, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer }; - var restype = { - parameterPath: "restype", - mapper: { - defaultValue: "service", - isConstant: true, - serializedName: "restype", - type: { - name: "String" + var restoreOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerRestoreHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRestoreExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp8 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.deletedContainerName, + Parameters.deletedContainerVersion + ], + isXML: true, + serializer: xmlSerializer }; - var comp = { - parameterPath: "comp", - mapper: { - defaultValue: "properties", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var renameOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerRenameHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRenameExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp9 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.sourceContainerName, + Parameters.sourceLeaseId + ], + isXML: true, + serializer: xmlSerializer }; - var timeoutInSeconds = { - parameterPath: ["options", "timeoutInSeconds"], - mapper: { - constraints: { - InclusiveMinimum: 0 + var submitBatchOperationSpec = { + path: "/{containerName}", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.ContainerSubmitBatchHeaders }, - serializedName: "timeout", - xmlName: "timeout", - type: { - name: "Number" + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSubmitBatchExceptionHeaders } - } + }, + requestBody: Parameters.body, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp4, + Parameters.restype2 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer }; - var version = { - parameterPath: "version", - mapper: { - defaultValue: "2025-05-05", - isConstant: true, - serializedName: "x-ms-version", - type: { - name: "String" + var filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.FilterBlobSegment, + headersMapper: Mappers.ContainerFilterBlobsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerFilterBlobsExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.comp5, + Parameters.where, + Parameters.restype2 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - var requestId = { - parameterPath: ["options", "requestId"], - mapper: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" + var acquireLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerAcquireLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerAcquireLeaseExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action, + Parameters.duration, + Parameters.proposedLeaseId + ], + isXML: true, + serializer: xmlSerializer }; - var accept1 = { - parameterPath: "accept", - mapper: { - defaultValue: "application/xml", - isConstant: true, - serializedName: "Accept", - type: { - name: "String" + var releaseLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerReleaseLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerReleaseLeaseExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action1, + Parameters.leaseId1 + ], + isXML: true, + serializer: xmlSerializer }; - var comp1 = { - parameterPath: "comp", - mapper: { - defaultValue: "stats", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var renewLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerRenewLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRenewLeaseExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action2 + ], + isXML: true, + serializer: xmlSerializer }; - var comp2 = { - parameterPath: "comp", - mapper: { - defaultValue: "list", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var breakLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.ContainerBreakLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerBreakLeaseExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action3, + Parameters.breakPeriod + ], + isXML: true, + serializer: xmlSerializer }; - var prefix = { - parameterPath: ["options", "prefix"], - mapper: { - serializedName: "prefix", - xmlName: "prefix", - type: { - name: "String" + var changeLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerChangeLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerChangeLeaseExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action4, + Parameters.proposedLeaseId1 + ], + isXML: true, + serializer: xmlSerializer }; - var marker = { - parameterPath: ["options", "marker"], - mapper: { - serializedName: "marker", - xmlName: "marker", - type: { - name: "String" + var listBlobFlatSegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListBlobsFlatSegmentResponse, + headersMapper: Mappers.ContainerListBlobFlatSegmentHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerListBlobFlatSegmentExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.restype2, + Parameters.include1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - var maxPageSize = { - parameterPath: ["options", "maxPageSize"], - mapper: { - constraints: { - InclusiveMinimum: 1 + var listBlobHierarchySegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListBlobsHierarchySegmentResponse, + headersMapper: Mappers.ContainerListBlobHierarchySegmentHeaders }, - serializedName: "maxresults", - xmlName: "maxresults", - type: { - name: "Number" + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerListBlobHierarchySegmentExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.restype2, + Parameters.include1, + Parameters.delimiter + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - var include = { - parameterPath: ["options", "include"], - mapper: { - serializedName: "include", - xmlName: "include", - xmlElementName: "ListContainersIncludeType", - type: { - name: "Sequence", - element: { - type: { - name: "Enum", - allowedValues: ["metadata", "deleted", "system"] - } - } + var getAccountInfoOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ContainerGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetAccountInfoExceptionHeaders } }, - collectionFormat: "CSV" - }; - var keyInfo = { - parameterPath: "keyInfo", - mapper: KeyInfo + queryParameters: [ + Parameters.comp, + Parameters.timeoutInSeconds, + Parameters.restype1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - var comp3 = { - parameterPath: "comp", - mapper: { - defaultValue: "userdelegationkey", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/blob.js +var require_blob = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/blob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var BlobImpl = class { + client; + /** + * Initialize a new instance of the class Blob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; } - }; - var restype1 = { - parameterPath: "restype", - mapper: { - defaultValue: "account", - isConstant: true, - serializedName: "restype", - type: { - name: "String" - } + /** + * The Download operation reads or downloads a blob from the system, including its metadata and + * properties. You can also call Download to read a snapshot. + * @param options The options parameters. + */ + download(options) { + return this.client.sendOperationRequest({ options }, downloadOperationSpec); } - }; - var body = { - parameterPath: "body", - mapper: { - serializedName: "body", - required: true, - xmlName: "body", - type: { - name: "Stream" - } + /** + * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system + * properties for the blob. It does not return the content of the blob. + * @param options The options parameters. + */ + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); } - }; - var comp4 = { - parameterPath: "comp", - mapper: { - defaultValue: "batch", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + * permanently removed from the storage account. If the storage account's soft delete feature is + * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + * immediately. However, the blob service retains the blob or snapshot for the number of days specified + * by the DeleteRetentionPolicy section of [Storage service properties] + * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is + * permanently removed from the storage account. Note that you continue to be charged for the + * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the + * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You + * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a + * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 + * (ResourceNotFound). + * @param options The options parameters. + */ + delete(options) { + return this.client.sendOperationRequest({ options }, deleteOperationSpec); } - }; - var contentLength = { - parameterPath: "contentLength", - mapper: { - serializedName: "Content-Length", - required: true, - xmlName: "Content-Length", - type: { - name: "Number" - } + /** + * Undelete a blob that was previously soft deleted + * @param options The options parameters. + */ + undelete(options) { + return this.client.sendOperationRequest({ options }, undeleteOperationSpec); } - }; - var multipartContentType = { - parameterPath: "multipartContentType", - mapper: { - serializedName: "Content-Type", - required: true, - xmlName: "Content-Type", - type: { - name: "String" - } + /** + * Sets the time a blob will expire and be deleted. + * @param expiryOptions Required. Indicates mode of the expiry time + * @param options The options parameters. + */ + setExpiry(expiryOptions, options) { + return this.client.sendOperationRequest({ expiryOptions, options }, setExpiryOperationSpec); } - }; - var comp5 = { - parameterPath: "comp", - mapper: { - defaultValue: "blobs", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * The Set HTTP Headers operation sets system properties on the blob + * @param options The options parameters. + */ + setHttpHeaders(options) { + return this.client.sendOperationRequest({ options }, setHttpHeadersOperationSpec); } - }; - var where = { - parameterPath: ["options", "where"], - mapper: { - serializedName: "where", - xmlName: "where", - type: { - name: "String" - } + /** + * The Set Immutability Policy operation sets the immutability policy on the blob + * @param options The options parameters. + */ + setImmutabilityPolicy(options) { + return this.client.sendOperationRequest({ options }, setImmutabilityPolicyOperationSpec); } - }; - var restype2 = { - parameterPath: "restype", - mapper: { - defaultValue: "container", - isConstant: true, - serializedName: "restype", - type: { - name: "String" - } + /** + * The Delete Immutability Policy operation deletes the immutability policy on the blob + * @param options The options parameters. + */ + deleteImmutabilityPolicy(options) { + return this.client.sendOperationRequest({ options }, deleteImmutabilityPolicyOperationSpec); } - }; - var metadata = { - parameterPath: ["options", "metadata"], - mapper: { - serializedName: "x-ms-meta", - xmlName: "x-ms-meta", - headerCollectionPrefix: "x-ms-meta-", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } + /** + * The Set Legal Hold operation sets a legal hold on the blob. + * @param legalHold Specified if a legal hold should be set on the blob. + * @param options The options parameters. + */ + setLegalHold(legalHold, options) { + return this.client.sendOperationRequest({ legalHold, options }, setLegalHoldOperationSpec); } - }; - var access3 = { - parameterPath: ["options", "access"], - mapper: { - serializedName: "x-ms-blob-public-access", - xmlName: "x-ms-blob-public-access", - type: { - name: "Enum", - allowedValues: ["container", "blob"] - } + /** + * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more + * name-value pairs + * @param options The options parameters. + */ + setMetadata(options) { + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + acquireLease(options) { + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + breakLease(options) { + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); + } + /** + * The Create Snapshot operation creates a read-only snapshot of a blob + * @param options The options parameters. + */ + createSnapshot(options) { + return this.client.sendOperationRequest({ options }, createSnapshotOperationSpec); + } + /** + * The Start Copy From URL operation copies a blob or an internet resource to a new blob. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + startCopyFromURL(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, startCopyFromURLOperationSpec); + } + /** + * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return + * a response until the copy is complete. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyFromURL(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, copyFromURLOperationSpec); + } + /** + * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination + * blob with zero length and full metadata. + * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob + * operation. + * @param options The options parameters. + */ + abortCopyFromURL(copyId, options) { + return this.client.sendOperationRequest({ copyId, options }, abortCopyFromURLOperationSpec); + } + /** + * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant storage only). A + * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block + * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's + * ETag. + * @param tier Indicates the tier to be set on the blob. + * @param options The options parameters. + */ + setTier(tier, options) { + return this.client.sendOperationRequest({ tier, options }, setTierOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); + } + /** + * The Query operation enables users to select/project on blob data by providing simple query + * expressions. + * @param options The options parameters. + */ + query(options) { + return this.client.sendOperationRequest({ options }, queryOperationSpec); + } + /** + * The Get Tags operation enables users to get the tags associated with a blob. + * @param options The options parameters. + */ + getTags(options) { + return this.client.sendOperationRequest({ options }, getTagsOperationSpec); + } + /** + * The Set Tags operation enables users to set tags on a blob. + * @param options The options parameters. + */ + setTags(options) { + return this.client.sendOperationRequest({ options }, setTagsOperationSpec); } }; - var defaultEncryptionScope = { - parameterPath: [ - "options", - "containerEncryptionScope", - "defaultEncryptionScope" - ], - mapper: { - serializedName: "x-ms-default-encryption-scope", - xmlName: "x-ms-default-encryption-scope", - type: { - name: "String" + exports2.BlobImpl = BlobImpl; + var xmlSerializer = coreClient.createSerializer( + Mappers, + /* isXml */ + true + ); + var downloadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobDownloadHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobDownloadHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDownloadExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.rangeGetContentMD5, + Parameters.rangeGetContentCRC64, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer }; - var preventEncryptionScopeOverride = { - parameterPath: [ - "options", - "containerEncryptionScope", - "preventEncryptionScopeOverride" + var getPropertiesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "HEAD", + responses: { + 200: { + headersMapper: Mappers.BlobGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId ], - mapper: { - serializedName: "x-ms-deny-encryption-scope-override", - xmlName: "x-ms-deny-encryption-scope-override", - type: { - name: "Boolean" + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer + }; + var deleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: Mappers.BlobDeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDeleteExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.blobDeleteType + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.deleteSnapshots + ], + isXML: true, + serializer: xmlSerializer }; - var leaseId = { - parameterPath: ["options", "leaseAccessConditions", "leaseId"], - mapper: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", - type: { - name: "String" + var undeleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobUndeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobUndeleteExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp8], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - var ifModifiedSince = { - parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], - mapper: { - serializedName: "If-Modified-Since", - xmlName: "If-Modified-Since", - type: { - name: "DateTimeRfc1123" + var setExpiryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetExpiryHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetExpiryExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp11], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.expiryOptions, + Parameters.expiresOn + ], + isXML: true, + serializer: xmlSerializer }; - var ifUnmodifiedSince = { - parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], - mapper: { - serializedName: "If-Unmodified-Since", - xmlName: "If-Unmodified-Since", - type: { - name: "DateTimeRfc1123" + var setHttpHeadersOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetHttpHeadersHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetHttpHeadersExceptionHeaders } - } + }, + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition + ], + isXML: true, + serializer: xmlSerializer }; - var comp6 = { - parameterPath: "comp", - mapper: { - defaultValue: "metadata", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var setImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetImmutabilityPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetImmutabilityPolicyExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp12 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifUnmodifiedSince, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode + ], + isXML: true, + serializer: xmlSerializer }; - var comp7 = { - parameterPath: "comp", - mapper: { - defaultValue: "acl", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var deleteImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 200: { + headersMapper: Mappers.BlobDeleteImmutabilityPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp12 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - var containerAcl = { - parameterPath: ["options", "containerAcl"], - mapper: { - serializedName: "containerAcl", - xmlName: "SignedIdentifiers", - xmlIsWrapped: true, - xmlElementName: "SignedIdentifier", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "SignedIdentifier" - } - } + var setLegalHoldOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetLegalHoldHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetLegalHoldExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp13 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.legalHold + ], + isXML: true, + serializer: xmlSerializer }; - var comp8 = { - parameterPath: "comp", - mapper: { - defaultValue: "undelete", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var setMetadataOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetMetadataHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetMetadataExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp6], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope + ], + isXML: true, + serializer: xmlSerializer }; - var deletedContainerName = { - parameterPath: ["options", "deletedContainerName"], - mapper: { - serializedName: "x-ms-deleted-container-name", - xmlName: "x-ms-deleted-container-name", - type: { - name: "String" + var acquireLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlobAcquireLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobAcquireLeaseExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action, + Parameters.duration, + Parameters.proposedLeaseId, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer }; - var deletedContainerVersion = { - parameterPath: ["options", "deletedContainerVersion"], - mapper: { - serializedName: "x-ms-deleted-container-version", - xmlName: "x-ms-deleted-container-version", - type: { - name: "String" + var releaseLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobReleaseLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobReleaseLeaseExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action1, + Parameters.leaseId1, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer }; - var comp9 = { - parameterPath: "comp", - mapper: { - defaultValue: "rename", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var renewLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobRenewLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobRenewLeaseExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action2, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer }; - var sourceContainerName = { - parameterPath: "sourceContainerName", - mapper: { - serializedName: "x-ms-source-container-name", - required: true, - xmlName: "x-ms-source-container-name", - type: { - name: "String" + var changeLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobChangeLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobChangeLeaseExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action4, + Parameters.proposedLeaseId1, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer }; - var sourceLeaseId = { - parameterPath: ["options", "sourceLeaseId"], - mapper: { - serializedName: "x-ms-source-lease-id", - xmlName: "x-ms-source-lease-id", - type: { - name: "String" + var breakLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobBreakLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobBreakLeaseExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action3, + Parameters.breakPeriod, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer }; - var comp10 = { - parameterPath: "comp", - mapper: { - defaultValue: "lease", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var createSnapshotOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlobCreateSnapshotHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobCreateSnapshotExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp14], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope + ], + isXML: true, + serializer: xmlSerializer }; - var action = { - parameterPath: "action", - mapper: { - defaultValue: "acquire", - isConstant: true, - serializedName: "x-ms-lease-action", - type: { - name: "String" + var startCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobStartCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobStartCopyFromURLExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.tier, + Parameters.rehydratePriority, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceIfTags, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.sealBlob, + Parameters.legalHold1 + ], + isXML: true, + serializer: xmlSerializer }; - var duration = { - parameterPath: ["options", "duration"], - mapper: { - serializedName: "x-ms-lease-duration", - xmlName: "x-ms-lease-duration", - type: { - name: "Number" + var copyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobCopyFromURLExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.xMsRequiresSync, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.copySourceTags, + Parameters.fileRequestIntent + ], + isXML: true, + serializer: xmlSerializer }; - var proposedLeaseId = { - parameterPath: ["options", "proposedLeaseId"], - mapper: { - serializedName: "x-ms-proposed-lease-id", - xmlName: "x-ms-proposed-lease-id", - type: { - name: "String" + var abortCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: Mappers.BlobAbortCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobAbortCopyFromURLExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp15, + Parameters.copyId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.copyActionAbortConstant + ], + isXML: true, + serializer: xmlSerializer }; - var action1 = { - parameterPath: "action", - mapper: { - defaultValue: "release", - isConstant: true, - serializedName: "x-ms-lease-action", - type: { - name: "String" + var setTierOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetTierHeaders + }, + 202: { + headersMapper: Mappers.BlobSetTierHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetTierExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp16 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags, + Parameters.rehydratePriority, + Parameters.tier1 + ], + isXML: true, + serializer: xmlSerializer }; - var leaseId1 = { - parameterPath: "leaseId", - mapper: { - serializedName: "x-ms-lease-id", - required: true, - xmlName: "x-ms-lease-id", - type: { - name: "String" + var getAccountInfoOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.BlobGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetAccountInfoExceptionHeaders } - } + }, + queryParameters: [ + Parameters.comp, + Parameters.timeoutInSeconds, + Parameters.restype1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - var action2 = { - parameterPath: "action", - mapper: { - defaultValue: "renew", - isConstant: true, - serializedName: "x-ms-lease-action", - type: { - name: "String" + var queryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobQueryHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobQueryHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobQueryExceptionHeaders } - } + }, + requestBody: Parameters.queryRequest, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.comp17 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer }; - var action3 = { - parameterPath: "action", - mapper: { - defaultValue: "break", - isConstant: true, - serializedName: "x-ms-lease-action", - type: { - name: "String" + var getTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobTags, + headersMapper: Mappers.BlobGetTagsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetTagsExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp18 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer }; - var breakPeriod = { - parameterPath: ["options", "breakPeriod"], - mapper: { - serializedName: "x-ms-lease-break-period", - xmlName: "x-ms-lease-break-period", - type: { - name: "Number" + var setTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: Mappers.BlobSetTagsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetTagsExceptionHeaders } - } + }, + requestBody: Parameters.tags, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.versionId, + Parameters.comp18 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.leaseId, + Parameters.ifTags, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer }; - var action4 = { - parameterPath: "action", - mapper: { - defaultValue: "change", - isConstant: true, - serializedName: "x-ms-lease-action", - type: { - name: "String" - } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/pageBlob.js +var require_pageBlob = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/pageBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.PageBlobImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var PageBlobImpl = class { + client; + /** + * Initialize a new instance of the class PageBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create operation creates a new page blob. + * @param contentLength The length of the request. + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + create(contentLength, blobContentLength, options) { + return this.client.sendOperationRequest({ contentLength, blobContentLength, options }, createOperationSpec); + } + /** + * The Upload Pages operation writes a range of pages to a page blob + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + uploadPages(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, uploadPagesOperationSpec); + } + /** + * The Clear Pages operation clears a set of pages from a page blob + * @param contentLength The length of the request. + * @param options The options parameters. + */ + clearPages(contentLength, options) { + return this.client.sendOperationRequest({ contentLength, options }, clearPagesOperationSpec); + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a + * URL + * @param sourceUrl Specify a URL to the copy source. + * @param sourceRange Bytes of source data in the specified range. The length of this range should + * match the ContentLength header and x-ms-range/Range destination range header. + * @param contentLength The length of the request. + * @param range The range of bytes to which the source range would be written. The range should be 512 + * aligned and range-end is required. + * @param options The options parameters. + */ + uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { + return this.client.sendOperationRequest({ sourceUrl, sourceRange, contentLength, range, options }, uploadPagesFromURLOperationSpec); } - }; - var proposedLeaseId1 = { - parameterPath: "proposedLeaseId", - mapper: { - serializedName: "x-ms-proposed-lease-id", - required: true, - xmlName: "x-ms-proposed-lease-id", - type: { - name: "String" - } + /** + * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a + * page blob + * @param options The options parameters. + */ + getPageRanges(options) { + return this.client.sendOperationRequest({ options }, getPageRangesOperationSpec); } - }; - var include1 = { - parameterPath: ["options", "include"], - mapper: { - serializedName: "include", - xmlName: "include", - xmlElementName: "ListBlobsIncludeItem", - type: { - name: "Sequence", - element: { - type: { - name: "Enum", - allowedValues: [ - "copy", - "deleted", - "metadata", - "snapshots", - "uncommittedblobs", - "versions", - "tags", - "immutabilitypolicy", - "legalhold", - "deletedwithversions" - ] - } - } - } - }, - collectionFormat: "CSV" - }; - var delimiter = { - parameterPath: "delimiter", - mapper: { - serializedName: "delimiter", - required: true, - xmlName: "delimiter", - type: { - name: "String" - } + /** + * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were + * changed between target blob and previous snapshot. + * @param options The options parameters. + */ + getPageRangesDiff(options) { + return this.client.sendOperationRequest({ options }, getPageRangesDiffOperationSpec); } - }; - var snapshot = { - parameterPath: ["options", "snapshot"], - mapper: { - serializedName: "snapshot", - xmlName: "snapshot", - type: { - name: "String" - } + /** + * Resize the Blob + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + resize(blobContentLength, options) { + return this.client.sendOperationRequest({ blobContentLength, options }, resizeOperationSpec); } - }; - var versionId = { - parameterPath: ["options", "versionId"], - mapper: { - serializedName: "versionid", - xmlName: "versionid", - type: { - name: "String" - } + /** + * Update the sequence number of the blob + * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. + * This property applies to page blobs only. This property indicates how the service should modify the + * blob's sequence number + * @param options The options parameters. + */ + updateSequenceNumber(sequenceNumberAction, options) { + return this.client.sendOperationRequest({ sequenceNumberAction, options }, updateSequenceNumberOperationSpec); } - }; - var range = { - parameterPath: ["options", "range"], - mapper: { - serializedName: "x-ms-range", - xmlName: "x-ms-range", - type: { - name: "String" - } + /** + * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. + * The snapshot is copied such that only the differential changes between the previously copied + * snapshot are transferred to the destination. The copied snapshots are complete copies of the + * original snapshot and can be read or copied from as usual. This API is supported since REST version + * 2016-05-31. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyIncremental(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, copyIncrementalOperationSpec); } }; - var rangeGetContentMD5 = { - parameterPath: ["options", "rangeGetContentMD5"], - mapper: { - serializedName: "x-ms-range-get-content-md5", - xmlName: "x-ms-range-get-content-md5", - type: { - name: "Boolean" + exports2.PageBlobImpl = PageBlobImpl; + var xmlSerializer = coreClient.createSerializer( + Mappers, + /* isXml */ + true + ); + var createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobCreateHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobCreateExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.blobType, + Parameters.blobContentLength, + Parameters.blobSequenceNumber + ], + isXML: true, + serializer: xmlSerializer }; - var rangeGetContentCRC64 = { - parameterPath: ["options", "rangeGetContentCRC64"], - mapper: { - serializedName: "x-ms-range-get-content-crc64", - xmlName: "x-ms-range-get-content-crc64", - type: { - name: "Boolean" + var uploadPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobUploadPagesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders } - } + }, + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2, + Parameters.pageWrite, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer }; - var encryptionKey = { - parameterPath: ["options", "cpkInfo", "encryptionKey"], - mapper: { - serializedName: "x-ms-encryption-key", - xmlName: "x-ms-encryption-key", - type: { - name: "String" + var clearPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobClearPagesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobClearPagesExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo, + Parameters.pageWrite1 + ], + isXML: true, + serializer: xmlSerializer }; - var encryptionKeySha256 = { - parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], - mapper: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" + var uploadPagesFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.fileRequestIntent, + Parameters.pageWrite, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo, + Parameters.sourceUrl, + Parameters.sourceRange, + Parameters.sourceContentCrc64, + Parameters.range1 + ], + isXML: true, + serializer: xmlSerializer }; - var encryptionAlgorithm = { - parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], - mapper: { - serializedName: "x-ms-encryption-algorithm", - xmlName: "x-ms-encryption-algorithm", - type: { - name: "String" + var getPageRangesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PageList, + headersMapper: Mappers.PageBlobGetPageRangesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.snapshot, + Parameters.comp20 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer }; - var ifMatch = { - parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], - mapper: { - serializedName: "If-Match", - xmlName: "If-Match", - type: { - name: "String" + var getPageRangesDiffOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PageList, + headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.snapshot, + Parameters.comp20, + Parameters.prevsnapshot + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.prevSnapshotUrl + ], + isXML: true, + serializer: xmlSerializer }; - var ifNoneMatch = { - parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], - mapper: { - serializedName: "If-None-Match", - xmlName: "If-None-Match", - type: { - name: "String" + var resizeOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.PageBlobResizeHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobResizeExceptionHeaders } - } + }, + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.blobContentLength + ], + isXML: true, + serializer: xmlSerializer }; - var ifTags = { - parameterPath: ["options", "modifiedAccessConditions", "ifTags"], - mapper: { - serializedName: "x-ms-if-tags", - xmlName: "x-ms-if-tags", - type: { - name: "String" + var updateSequenceNumberOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders } - } + }, + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobSequenceNumber, + Parameters.sequenceNumberAction + ], + isXML: true, + serializer: xmlSerializer }; - var deleteSnapshots = { - parameterPath: ["options", "deleteSnapshots"], - mapper: { - serializedName: "x-ms-delete-snapshots", - xmlName: "x-ms-delete-snapshots", - type: { - name: "Enum", - allowedValues: ["include", "only"] + var copyIncrementalOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.PageBlobCopyIncrementalHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp21], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.copySource + ], + isXML: true, + serializer: xmlSerializer }; - var blobDeleteType = { - parameterPath: ["options", "blobDeleteType"], - mapper: { - serializedName: "deletetype", - xmlName: "deletetype", - type: { - name: "String" - } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/appendBlob.js +var require_appendBlob = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/appendBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AppendBlobImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var AppendBlobImpl = class { + client; + /** + * Initialize a new instance of the class AppendBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; } - }; - var comp11 = { - parameterPath: "comp", - mapper: { - defaultValue: "expiry", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * The Create Append Blob operation creates a new append blob. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + create(contentLength, options) { + return this.client.sendOperationRequest({ contentLength, options }, createOperationSpec); } - }; - var expiryOptions = { - parameterPath: "expiryOptions", - mapper: { - serializedName: "x-ms-expiry-option", - required: true, - xmlName: "x-ms-expiry-option", - type: { - name: "String" - } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob. The + * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + appendBlock(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, appendBlockOperationSpec); } - }; - var expiresOn = { - parameterPath: ["options", "expiresOn"], - mapper: { - serializedName: "x-ms-expiry-time", - xmlName: "x-ms-expiry-time", - type: { - name: "String" - } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob where + * the contents are read from a source url. The Append Block operation is permitted only if the blob + * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version + * 2015-02-21 version or later. + * @param sourceUrl Specify a URL to the copy source. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + appendBlockFromUrl(sourceUrl, contentLength, options) { + return this.client.sendOperationRequest({ sourceUrl, contentLength, options }, appendBlockFromUrlOperationSpec); } - }; - var blobCacheControl = { - parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], - mapper: { - serializedName: "x-ms-blob-cache-control", - xmlName: "x-ms-blob-cache-control", - type: { - name: "String" - } + /** + * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version + * 2019-12-12 version or later. + * @param options The options parameters. + */ + seal(options) { + return this.client.sendOperationRequest({ options }, sealOperationSpec); } }; - var blobContentType = { - parameterPath: ["options", "blobHttpHeaders", "blobContentType"], - mapper: { - serializedName: "x-ms-blob-content-type", - xmlName: "x-ms-blob-content-type", - type: { - name: "String" + exports2.AppendBlobImpl = AppendBlobImpl; + var xmlSerializer = coreClient.createSerializer( + Mappers, + /* isXml */ + true + ); + var createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobCreateHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobCreateExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.blobType1 + ], + isXML: true, + serializer: xmlSerializer }; - var blobContentMD5 = { - parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], - mapper: { - serializedName: "x-ms-blob-content-md5", - xmlName: "x-ms-blob-content-md5", - type: { - name: "ByteArray" + var appendBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobAppendBlockHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobAppendBlockExceptionHeaders } - } + }, + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2, + Parameters.maxSize, + Parameters.appendPosition + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer }; - var blobContentEncoding = { - parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], - mapper: { - serializedName: "x-ms-blob-content-encoding", - xmlName: "x-ms-blob-content-encoding", - type: { - name: "String" + var appendBlockFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobAppendBlockFromUrlHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.fileRequestIntent, + Parameters.transactionalContentMD5, + Parameters.sourceUrl, + Parameters.sourceContentCrc64, + Parameters.maxSize, + Parameters.appendPosition, + Parameters.sourceRange1 + ], + isXML: true, + serializer: xmlSerializer }; - var blobContentLanguage = { - parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], - mapper: { - serializedName: "x-ms-blob-content-language", - xmlName: "x-ms-blob-content-language", - type: { - name: "String" + var sealOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.AppendBlobSealHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobSealExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp23], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.appendPosition + ], + isXML: true, + serializer: xmlSerializer }; - var blobContentDisposition = { - parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], - mapper: { - serializedName: "x-ms-blob-content-disposition", - xmlName: "x-ms-blob-content-disposition", - type: { - name: "String" - } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/blockBlob.js +var require_blockBlob = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/blockBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlockBlobImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var BlockBlobImpl = class { + client; + /** + * Initialize a new instance of the class BlockBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; } - }; - var comp12 = { - parameterPath: "comp", - mapper: { - defaultValue: "immutabilityPolicies", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing + * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put + * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a + * partial update of the content of a block blob, use the Put Block List operation. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + upload(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, uploadOperationSpec); } - }; - var immutabilityPolicyExpiry = { - parameterPath: ["options", "immutabilityPolicyExpiry"], - mapper: { - serializedName: "x-ms-immutability-policy-until-date", - xmlName: "x-ms-immutability-policy-until-date", - type: { - name: "DateTimeRfc1123" - } + /** + * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read + * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are + * not supported with Put Blob from URL; the content of an existing blob is overwritten with the + * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, + * use the Put Block from URL API in conjunction with Put Block List. + * @param contentLength The length of the request. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + putBlobFromUrl(contentLength, copySource, options) { + return this.client.sendOperationRequest({ contentLength, copySource, options }, putBlobFromUrlOperationSpec); } - }; - var immutabilityPolicyMode = { - parameterPath: ["options", "immutabilityPolicyMode"], - mapper: { - serializedName: "x-ms-immutability-policy-mode", - xmlName: "x-ms-immutability-policy-mode", - type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } + /** + * The Stage Block operation creates a new block to be committed as part of a blob + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + stageBlock(blockId, contentLength, body, options) { + return this.client.sendOperationRequest({ blockId, contentLength, body, options }, stageBlockOperationSpec); } - }; - var comp13 = { - parameterPath: "comp", - mapper: { - defaultValue: "legalhold", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * The Stage Block operation creates a new block to be committed as part of a blob where the contents + * are read from a URL. + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param sourceUrl Specify a URL to the copy source. + * @param options The options parameters. + */ + stageBlockFromURL(blockId, contentLength, sourceUrl, options) { + return this.client.sendOperationRequest({ blockId, contentLength, sourceUrl, options }, stageBlockFromURLOperationSpec); } - }; - var legalHold = { - parameterPath: "legalHold", - mapper: { - serializedName: "x-ms-legal-hold", - required: true, - xmlName: "x-ms-legal-hold", - type: { - name: "Boolean" - } + /** + * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the + * blob. In order to be written as part of a blob, a block must have been successfully written to the + * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading + * only those blocks that have changed, then committing the new and existing blocks together. You can + * do this by specifying whether to commit a block from the committed block list or from the + * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list + * it may belong to. + * @param blocks Blob Blocks. + * @param options The options parameters. + */ + commitBlockList(blocks, options) { + return this.client.sendOperationRequest({ blocks, options }, commitBlockListOperationSpec); } - }; - var encryptionScope = { - parameterPath: ["options", "encryptionScope"], - mapper: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } + /** + * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block + * blob + * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted + * blocks, or both lists together. + * @param options The options parameters. + */ + getBlockList(listType, options) { + return this.client.sendOperationRequest({ listType, options }, getBlockListOperationSpec); } }; - var comp14 = { - parameterPath: "comp", - mapper: { - defaultValue: "snapshot", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.BlockBlobImpl = BlockBlobImpl; + var xmlSerializer = coreClient.createSerializer( + Mappers, + /* isXml */ + true + ); + var uploadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobUploadHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobUploadExceptionHeaders } - } + }, + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2, + Parameters.blobType2 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer }; - var tier = { - parameterPath: ["options", "tier"], - mapper: { - serializedName: "x-ms-access-tier", - xmlName: "x-ms-access-tier", - type: { - name: "Enum", - allowedValues: [ - "P4", - "P6", - "P10", - "P15", - "P20", - "P30", - "P40", - "P50", - "P60", - "P70", - "P80", - "Hot", - "Cool", - "Archive", - "Cold" - ] + var putBlobFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobPutBlobFromUrlHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobPutBlobFromUrlExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.encryptionScope, + Parameters.tier, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceIfTags, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.copySourceTags, + Parameters.fileRequestIntent, + Parameters.transactionalContentMD5, + Parameters.blobType2, + Parameters.copySourceBlobProperties + ], + isXML: true, + serializer: xmlSerializer }; - var rehydratePriority = { - parameterPath: ["options", "rehydratePriority"], - mapper: { - serializedName: "x-ms-rehydrate-priority", - xmlName: "x-ms-rehydrate-priority", - type: { - name: "Enum", - allowedValues: ["High", "Standard"] + var stageBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobStageBlockHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobStageBlockExceptionHeaders } - } - }; - var sourceIfModifiedSince = { - parameterPath: [ - "options", - "sourceModifiedAccessConditions", - "sourceIfModifiedSince" + }, + requestBody: Parameters.body1, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp24, + Parameters.blockId ], - mapper: { - serializedName: "x-ms-source-if-modified-since", - xmlName: "x-ms-source-if-modified-since", - type: { - name: "DateTimeRfc1123" - } - } - }; - var sourceIfUnmodifiedSince = { - parameterPath: [ - "options", - "sourceModifiedAccessConditions", - "sourceIfUnmodifiedSince" + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2 ], - mapper: { - serializedName: "x-ms-source-if-unmodified-since", - xmlName: "x-ms-source-if-unmodified-since", - type: { - name: "DateTimeRfc1123" - } - } + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer }; - var sourceIfMatch = { - parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], - mapper: { - serializedName: "x-ms-source-if-match", - xmlName: "x-ms-source-if-match", - type: { - name: "String" + var stageBlockFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobStageBlockFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobStageBlockFromURLExceptionHeaders } - } - }; - var sourceIfNoneMatch = { - parameterPath: [ - "options", - "sourceModifiedAccessConditions", - "sourceIfNoneMatch" + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp24, + Parameters.blockId ], - mapper: { - serializedName: "x-ms-source-if-none-match", - xmlName: "x-ms-source-if-none-match", - type: { - name: "String" - } - } + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.fileRequestIntent, + Parameters.sourceUrl, + Parameters.sourceContentCrc64, + Parameters.sourceRange1 + ], + isXML: true, + serializer: xmlSerializer }; - var sourceIfTags = { - parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], - mapper: { - serializedName: "x-ms-source-if-tags", - xmlName: "x-ms-source-if-tags", - type: { - name: "String" + var commitBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobCommitBlockListHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobCommitBlockListExceptionHeaders } - } + }, + requestBody: Parameters.blocks, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp25], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer }; - var copySource = { - parameterPath: "copySource", - mapper: { - serializedName: "x-ms-copy-source", - required: true, - xmlName: "x-ms-copy-source", - type: { - name: "String" + var getBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlockList, + headersMapper: Mappers.BlockBlobGetBlockListHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobGetBlockListExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.comp25, + Parameters.listType + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer }; - var blobTagsString = { - parameterPath: ["options", "blobTagsString"], - mapper: { - serializedName: "x-ms-tags", - xmlName: "x-ms-tags", - type: { - name: "String" + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/index.js +var require_operations = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + tslib_1.__exportStar(require_service(), exports2); + tslib_1.__exportStar(require_container(), exports2); + tslib_1.__exportStar(require_blob(), exports2); + tslib_1.__exportStar(require_pageBlob(), exports2); + tslib_1.__exportStar(require_appendBlob(), exports2); + tslib_1.__exportStar(require_blockBlob(), exports2); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/storageClient.js +var require_storageClient = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/storageClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageClient = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreHttpCompat = tslib_1.__importStar(require_commonjs9()); + var index_js_1 = require_operations(); + var StorageClient = class extends coreHttpCompat.ExtendedServiceClient { + url; + version; + /** + * Initializes a new instance of the StorageClient class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url, options) { + if (url === void 0) { + throw new Error("'url' cannot be null"); } - } - }; - var sealBlob = { - parameterPath: ["options", "sealBlob"], - mapper: { - serializedName: "x-ms-seal-blob", - xmlName: "x-ms-seal-blob", - type: { - name: "Boolean" + if (!options) { + options = {}; } + const defaults = { + requestContentType: "application/json; charset=utf-8" + }; + const packageDetails = `azsdk-js-azure-storage-blob/12.28.0`; + const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` : `${packageDetails}`; + const optionsWithDefaults = { + ...defaults, + ...options, + userAgentOptions: { + userAgentPrefix + }, + endpoint: options.endpoint ?? options.baseUri ?? "{url}" + }; + super(optionsWithDefaults); + this.url = url; + this.version = options.version || "2025-07-05"; + this.service = new index_js_1.ServiceImpl(this); + this.container = new index_js_1.ContainerImpl(this); + this.blob = new index_js_1.BlobImpl(this); + this.pageBlob = new index_js_1.PageBlobImpl(this); + this.appendBlob = new index_js_1.AppendBlobImpl(this); + this.blockBlob = new index_js_1.BlockBlobImpl(this); } + service; + container; + blob; + pageBlob; + appendBlob; + blockBlob; }; - var legalHold1 = { - parameterPath: ["options", "legalHold"], - mapper: { - serializedName: "x-ms-legal-hold", - xmlName: "x-ms-legal-hold", - type: { - name: "Boolean" + exports2.StorageClient = StorageClient; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/service.js +var require_service2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/service.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/container.js +var require_container2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/container.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/blob.js +var require_blob2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/blob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/pageBlob.js +var require_pageBlob2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/pageBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/appendBlob.js +var require_appendBlob2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/appendBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/blockBlob.js +var require_blockBlob2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/blockBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/index.js +var require_operationsInterfaces = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + tslib_1.__exportStar(require_service2(), exports2); + tslib_1.__exportStar(require_container2(), exports2); + tslib_1.__exportStar(require_blob2(), exports2); + tslib_1.__exportStar(require_pageBlob2(), exports2); + tslib_1.__exportStar(require_appendBlob2(), exports2); + tslib_1.__exportStar(require_blockBlob2(), exports2); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/index.js +var require_src2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageClient = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + tslib_1.__exportStar(require_models(), exports2); + var storageClient_js_1 = require_storageClient(); + Object.defineProperty(exports2, "StorageClient", { enumerable: true, get: function() { + return storageClient_js_1.StorageClient; + } }); + tslib_1.__exportStar(require_operationsInterfaces(), exports2); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/StorageContextClient.js +var require_StorageContextClient = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/StorageContextClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageContextClient = void 0; + var index_js_1 = require_src2(); + var StorageContextClient = class extends index_js_1.StorageClient { + async sendOperationRequest(operationArguments, operationSpec) { + const operationSpecToSend = { ...operationSpec }; + if (operationSpecToSend.path === "/{containerName}" || operationSpecToSend.path === "/{containerName}/{blob}") { + operationSpecToSend.path = ""; } + return super.sendOperationRequest(operationArguments, operationSpecToSend); } }; - var xMsRequiresSync = { - parameterPath: "xMsRequiresSync", - mapper: { - defaultValue: "true", - isConstant: true, - serializedName: "x-ms-requires-sync", - type: { - name: "String" - } + exports2.StorageContextClient = StorageContextClient; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/StorageClient.js +var require_StorageClient = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/StorageClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageClient = void 0; + var StorageContextClient_js_1 = require_StorageContextClient(); + var Pipeline_js_1 = require_Pipeline(); + var utils_common_js_1 = require_utils_common(); + var StorageClient = class { + /** + * Encoded URL string value. + */ + url; + accountName; + /** + * Request policy pipeline. + * + * @internal + */ + pipeline; + /** + * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + credential; + /** + * StorageClient is a reference to protocol layer operations entry, which is + * generated by AutoRest generator. + */ + storageClientContext; + /** + */ + isHttps; + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + constructor(url, pipeline) { + this.url = (0, utils_common_js_1.escapeURLPath)(url); + this.accountName = (0, utils_common_js_1.getAccountNameFromUrl)(url); + this.pipeline = pipeline; + this.storageClientContext = new StorageContextClient_js_1.StorageContextClient(this.url, (0, Pipeline_js_1.getCoreClientOptions)(pipeline)); + this.isHttps = (0, utils_common_js_1.iEqual)((0, utils_common_js_1.getURLScheme)(this.url) || "", "https"); + this.credential = (0, Pipeline_js_1.getCredentialFromPipeline)(pipeline); + const storageClientContext = this.storageClientContext; + storageClientContext.requestContentType = void 0; } }; - var sourceContentMD5 = { - parameterPath: ["options", "sourceContentMD5"], - mapper: { - serializedName: "x-ms-source-content-md5", - xmlName: "x-ms-source-content-md5", - type: { - name: "ByteArray" + exports2.StorageClient = StorageClient; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/tracing.js +var require_tracing = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/tracing.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.tracingClient = void 0; + var core_tracing_1 = require_commonjs5(); + var constants_js_1 = require_constants9(); + exports2.tracingClient = (0, core_tracing_1.createTracingClient)({ + packageName: "@azure/storage-blob", + packageVersion: constants_js_1.SDK_VERSION, + namespace: "Microsoft.Storage" + }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/BlobSASPermissions.js +var require_BlobSASPermissions = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/BlobSASPermissions.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobSASPermissions = void 0; + var BlobSASPermissions = class _BlobSASPermissions { + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const blobSASPermissions = new _BlobSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + blobSASPermissions.read = true; + break; + case "a": + blobSASPermissions.add = true; + break; + case "c": + blobSASPermissions.create = true; + break; + case "w": + blobSASPermissions.write = true; + break; + case "d": + blobSASPermissions.delete = true; + break; + case "x": + blobSASPermissions.deleteVersion = true; + break; + case "t": + blobSASPermissions.tag = true; + break; + case "m": + blobSASPermissions.move = true; + break; + case "e": + blobSASPermissions.execute = true; + break; + case "i": + blobSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + blobSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission: ${char}`); + } } + return blobSASPermissions; } - }; - var copySourceAuthorization = { - parameterPath: ["options", "copySourceAuthorization"], - mapper: { - serializedName: "x-ms-copy-source-authorization", - xmlName: "x-ms-copy-source-authorization", - type: { - name: "String" + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const blobSASPermissions = new _BlobSASPermissions(); + if (permissionLike.read) { + blobSASPermissions.read = true; } - } - }; - var copySourceTags = { - parameterPath: ["options", "copySourceTags"], - mapper: { - serializedName: "x-ms-copy-source-tag-option", - xmlName: "x-ms-copy-source-tag-option", - type: { - name: "Enum", - allowedValues: ["REPLACE", "COPY"] + if (permissionLike.add) { + blobSASPermissions.add = true; } - } - }; - var comp15 = { - parameterPath: "comp", - mapper: { - defaultValue: "copy", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + if (permissionLike.create) { + blobSASPermissions.create = true; + } + if (permissionLike.write) { + blobSASPermissions.write = true; + } + if (permissionLike.delete) { + blobSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + blobSASPermissions.deleteVersion = true; } - } - }; - var copyActionAbortConstant = { - parameterPath: "copyActionAbortConstant", - mapper: { - defaultValue: "abort", - isConstant: true, - serializedName: "x-ms-copy-action", - type: { - name: "String" + if (permissionLike.tag) { + blobSASPermissions.tag = true; } - } - }; - var copyId = { - parameterPath: "copyId", - mapper: { - serializedName: "copyid", - required: true, - xmlName: "copyid", - type: { - name: "String" + if (permissionLike.move) { + blobSASPermissions.move = true; } - } - }; - var comp16 = { - parameterPath: "comp", - mapper: { - defaultValue: "tier", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + if (permissionLike.execute) { + blobSASPermissions.execute = true; } - } - }; - var tier1 = { - parameterPath: "tier", - mapper: { - serializedName: "x-ms-access-tier", - required: true, - xmlName: "x-ms-access-tier", - type: { - name: "Enum", - allowedValues: [ - "P4", - "P6", - "P10", - "P15", - "P20", - "P30", - "P40", - "P50", - "P60", - "P70", - "P80", - "Hot", - "Cool", - "Archive", - "Cold" - ] + if (permissionLike.setImmutabilityPolicy) { + blobSASPermissions.setImmutabilityPolicy = true; } - } - }; - var queryRequest = { - parameterPath: ["options", "queryRequest"], - mapper: QueryRequest - }; - var comp17 = { - parameterPath: "comp", - mapper: { - defaultValue: "query", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; } + return blobSASPermissions; } - }; - var comp18 = { - parameterPath: "comp", - mapper: { - defaultValue: "tags", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + /** + * Specifies Read access granted. + */ + read = false; + /** + * Specifies Add access granted. + */ + add = false; + /** + * Specifies Create access granted. + */ + create = false; + /** + * Specifies Write access granted. + */ + write = false; + /** + * Specifies Delete access granted. + */ + delete = false; + /** + * Specifies Delete version access granted. + */ + deleteVersion = false; + /** + * Specfies Tag access granted. + */ + tag = false; + /** + * Specifies Move access granted. + */ + move = false; + /** + * Specifies Execute access granted. + */ + execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete = false; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); } - } - }; - var tags = { - parameterPath: ["options", "tags"], - mapper: BlobTags - }; - var transactionalContentMD5 = { - parameterPath: ["options", "transactionalContentMD5"], - mapper: { - serializedName: "Content-MD5", - xmlName: "Content-MD5", - type: { - name: "ByteArray" + if (this.add) { + permissions.push("a"); } - } - }; - var transactionalContentCrc64 = { - parameterPath: ["options", "transactionalContentCrc64"], - mapper: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" + if (this.create) { + permissions.push("c"); } - } - }; - var blobType = { - parameterPath: "blobType", - mapper: { - defaultValue: "PageBlob", - isConstant: true, - serializedName: "x-ms-blob-type", - type: { - name: "String" + if (this.write) { + permissions.push("w"); } - } - }; - var blobContentLength = { - parameterPath: "blobContentLength", - mapper: { - serializedName: "x-ms-blob-content-length", - required: true, - xmlName: "x-ms-blob-content-length", - type: { - name: "Number" + if (this.delete) { + permissions.push("d"); } - } - }; - var blobSequenceNumber = { - parameterPath: ["options", "blobSequenceNumber"], - mapper: { - defaultValue: 0, - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" + if (this.deleteVersion) { + permissions.push("x"); } - } - }; - var contentType1 = { - parameterPath: ["options", "contentType"], - mapper: { - defaultValue: "application/octet-stream", - isConstant: true, - serializedName: "Content-Type", - type: { - name: "String" + if (this.tag) { + permissions.push("t"); } - } - }; - var body1 = { - parameterPath: "body", - mapper: { - serializedName: "body", - required: true, - xmlName: "body", - type: { - name: "Stream" + if (this.move) { + permissions.push("m"); } - } - }; - var accept2 = { - parameterPath: "accept", - mapper: { - defaultValue: "application/xml", - isConstant: true, - serializedName: "Accept", - type: { - name: "String" + if (this.execute) { + permissions.push("e"); } - } - }; - var comp19 = { - parameterPath: "comp", - mapper: { - defaultValue: "page", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + if (this.setImmutabilityPolicy) { + permissions.push("i"); } - } - }; - var pageWrite = { - parameterPath: "pageWrite", - mapper: { - defaultValue: "update", - isConstant: true, - serializedName: "x-ms-page-write", - type: { - name: "String" + if (this.permanentDelete) { + permissions.push("y"); } + return permissions.join(""); } }; - var ifSequenceNumberLessThanOrEqualTo = { - parameterPath: [ - "options", - "sequenceNumberAccessConditions", - "ifSequenceNumberLessThanOrEqualTo" - ], - mapper: { - serializedName: "x-ms-if-sequence-number-le", - xmlName: "x-ms-if-sequence-number-le", - type: { - name: "Number" + exports2.BlobSASPermissions = BlobSASPermissions; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/ContainerSASPermissions.js +var require_ContainerSASPermissions = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/ContainerSASPermissions.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ContainerSASPermissions = void 0; + var ContainerSASPermissions = class _ContainerSASPermissions { + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const containerSASPermissions = new _ContainerSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + containerSASPermissions.read = true; + break; + case "a": + containerSASPermissions.add = true; + break; + case "c": + containerSASPermissions.create = true; + break; + case "w": + containerSASPermissions.write = true; + break; + case "d": + containerSASPermissions.delete = true; + break; + case "l": + containerSASPermissions.list = true; + break; + case "t": + containerSASPermissions.tag = true; + break; + case "x": + containerSASPermissions.deleteVersion = true; + break; + case "m": + containerSASPermissions.move = true; + break; + case "e": + containerSASPermissions.execute = true; + break; + case "i": + containerSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; + default: + throw new RangeError(`Invalid permission ${char}`); + } } + return containerSASPermissions; } - }; - var ifSequenceNumberLessThan = { - parameterPath: [ - "options", - "sequenceNumberAccessConditions", - "ifSequenceNumberLessThan" - ], - mapper: { - serializedName: "x-ms-if-sequence-number-lt", - xmlName: "x-ms-if-sequence-number-lt", - type: { - name: "Number" + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const containerSASPermissions = new _ContainerSASPermissions(); + if (permissionLike.read) { + containerSASPermissions.read = true; } - } - }; - var ifSequenceNumberEqualTo = { - parameterPath: [ - "options", - "sequenceNumberAccessConditions", - "ifSequenceNumberEqualTo" - ], - mapper: { - serializedName: "x-ms-if-sequence-number-eq", - xmlName: "x-ms-if-sequence-number-eq", - type: { - name: "Number" + if (permissionLike.add) { + containerSASPermissions.add = true; } - } - }; - var pageWrite1 = { - parameterPath: "pageWrite", - mapper: { - defaultValue: "clear", - isConstant: true, - serializedName: "x-ms-page-write", - type: { - name: "String" + if (permissionLike.create) { + containerSASPermissions.create = true; } - } - }; - var sourceUrl = { - parameterPath: "sourceUrl", - mapper: { - serializedName: "x-ms-copy-source", - required: true, - xmlName: "x-ms-copy-source", - type: { - name: "String" + if (permissionLike.write) { + containerSASPermissions.write = true; } - } - }; - var sourceRange = { - parameterPath: "sourceRange", - mapper: { - serializedName: "x-ms-source-range", - required: true, - xmlName: "x-ms-source-range", - type: { - name: "String" + if (permissionLike.delete) { + containerSASPermissions.delete = true; } - } - }; - var sourceContentCrc64 = { - parameterPath: ["options", "sourceContentCrc64"], - mapper: { - serializedName: "x-ms-source-content-crc64", - xmlName: "x-ms-source-content-crc64", - type: { - name: "ByteArray" + if (permissionLike.list) { + containerSASPermissions.list = true; } - } - }; - var range1 = { - parameterPath: "range", - mapper: { - serializedName: "x-ms-range", - required: true, - xmlName: "x-ms-range", - type: { - name: "String" + if (permissionLike.deleteVersion) { + containerSASPermissions.deleteVersion = true; } - } - }; - var comp20 = { - parameterPath: "comp", - mapper: { - defaultValue: "pagelist", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + if (permissionLike.tag) { + containerSASPermissions.tag = true; } - } - }; - var prevsnapshot = { - parameterPath: ["options", "prevsnapshot"], - mapper: { - serializedName: "prevsnapshot", - xmlName: "prevsnapshot", - type: { - name: "String" + if (permissionLike.move) { + containerSASPermissions.move = true; } - } - }; - var prevSnapshotUrl = { - parameterPath: ["options", "prevSnapshotUrl"], - mapper: { - serializedName: "x-ms-previous-snapshot-url", - xmlName: "x-ms-previous-snapshot-url", - type: { - name: "String" + if (permissionLike.execute) { + containerSASPermissions.execute = true; } - } - }; - var sequenceNumberAction = { - parameterPath: "sequenceNumberAction", - mapper: { - serializedName: "x-ms-sequence-number-action", - required: true, - xmlName: "x-ms-sequence-number-action", - type: { - name: "Enum", - allowedValues: ["max", "update", "increment"] + if (permissionLike.setImmutabilityPolicy) { + containerSASPermissions.setImmutabilityPolicy = true; } - } - }; - var comp21 = { - parameterPath: "comp", - mapper: { - defaultValue: "incrementalcopy", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; } - } - }; - var blobType1 = { - parameterPath: "blobType", - mapper: { - defaultValue: "AppendBlob", - isConstant: true, - serializedName: "x-ms-blob-type", - type: { - name: "String" + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; } + return containerSASPermissions; } - }; - var comp22 = { - parameterPath: "comp", - mapper: { - defaultValue: "appendblock", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + /** + * Specifies Read access granted. + */ + read = false; + /** + * Specifies Add access granted. + */ + add = false; + /** + * Specifies Create access granted. + */ + create = false; + /** + * Specifies Write access granted. + */ + write = false; + /** + * Specifies Delete access granted. + */ + delete = false; + /** + * Specifies Delete version access granted. + */ + deleteVersion = false; + /** + * Specifies List access granted. + */ + list = false; + /** + * Specfies Tag access granted. + */ + tag = false; + /** + * Specifies Move access granted. + */ + move = false; + /** + * Specifies Execute access granted. + */ + execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + filterByTags = false; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); } - } - }; - var maxSize = { - parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], - mapper: { - serializedName: "x-ms-blob-condition-maxsize", - xmlName: "x-ms-blob-condition-maxsize", - type: { - name: "Number" + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); } - } - }; - var appendPosition = { - parameterPath: [ - "options", - "appendPositionAccessConditions", - "appendPosition" - ], - mapper: { - serializedName: "x-ms-blob-condition-appendpos", - xmlName: "x-ms-blob-condition-appendpos", - type: { - name: "Number" + if (this.delete) { + permissions.push("d"); } - } - }; - var sourceRange1 = { - parameterPath: ["options", "sourceRange"], - mapper: { - serializedName: "x-ms-source-range", - xmlName: "x-ms-source-range", - type: { - name: "String" + if (this.deleteVersion) { + permissions.push("x"); } - } - }; - var comp23 = { - parameterPath: "comp", - mapper: { - defaultValue: "seal", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + if (this.list) { + permissions.push("l"); } - } - }; - var blobType2 = { - parameterPath: "blobType", - mapper: { - defaultValue: "BlockBlob", - isConstant: true, - serializedName: "x-ms-blob-type", - type: { - name: "String" + if (this.tag) { + permissions.push("t"); } - } - }; - var copySourceBlobProperties = { - parameterPath: ["options", "copySourceBlobProperties"], - mapper: { - serializedName: "x-ms-copy-source-blob-properties", - xmlName: "x-ms-copy-source-blob-properties", - type: { - name: "Boolean" + if (this.move) { + permissions.push("m"); } - } - }; - var comp24 = { - parameterPath: "comp", - mapper: { - defaultValue: "block", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + if (this.execute) { + permissions.push("e"); } - } - }; - var blockId = { - parameterPath: "blockId", - mapper: { - serializedName: "blockid", - required: true, - xmlName: "blockid", - type: { - name: "String" + if (this.setImmutabilityPolicy) { + permissions.push("i"); } - } - }; - var blocks = { - parameterPath: "blocks", - mapper: BlockLookupList - }; - var comp25 = { - parameterPath: "comp", - mapper: { - defaultValue: "blocklist", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + if (this.permanentDelete) { + permissions.push("y"); } - } - }; - var listType = { - parameterPath: "listType", - mapper: { - defaultValue: "committed", - serializedName: "blocklisttype", - required: true, - xmlName: "blocklisttype", - type: { - name: "Enum", - allowedValues: ["committed", "uncommitted", "all"] + if (this.filterByTags) { + permissions.push("f"); } + return permissions.join(""); } }; - var ServiceImpl = class { + exports2.ContainerSASPermissions = ContainerSASPermissions; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/credentials/UserDelegationKeyCredential.js +var require_UserDelegationKeyCredential = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/credentials/UserDelegationKeyCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.UserDelegationKeyCredential = void 0; + var node_crypto_1 = require("node:crypto"); + var UserDelegationKeyCredential = class { /** - * Initialize a new instance of the class Service class. - * @param client Reference to the service client + * Azure Storage account name; readonly. */ - constructor(client) { - this.client = client; - } + accountName; /** - * Sets properties for a storage account's Blob service endpoint, including properties for Storage - * Analytics and CORS (Cross-Origin Resource Sharing) rules - * @param blobServiceProperties The StorageService properties. - * @param options The options parameters. + * Azure Storage user delegation key; readonly. */ - setProperties(blobServiceProperties2, options) { - return this.client.sendOperationRequest({ blobServiceProperties: blobServiceProperties2, options }, setPropertiesOperationSpec); - } + userDelegationKey; /** - * gets the properties of a storage account's Blob service, including properties for Storage Analytics - * and CORS (Cross-Origin Resource Sharing) rules. - * @param options The options parameters. + * Key value in Buffer type. */ - getProperties(options) { - return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$2); - } + key; /** - * Retrieves statistics related to replication for the Blob service. It is only available on the - * secondary location endpoint when read-access geo-redundant replication is enabled for the storage - * account. - * @param options The options parameters. + * Creates an instance of UserDelegationKeyCredential. + * @param accountName - + * @param userDelegationKey - */ - getStatistics(options) { - return this.client.sendOperationRequest({ options }, getStatisticsOperationSpec); + constructor(accountName, userDelegationKey) { + this.accountName = accountName; + this.userDelegationKey = userDelegationKey; + this.key = Buffer.from(userDelegationKey.value, "base64"); } /** - * The List Containers Segment operation returns a list of the containers under the specified account - * @param options The options parameters. + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - */ - listContainersSegment(options) { - return this.client.sendOperationRequest({ options }, listContainersSegmentOperationSpec); + computeHMACSHA256(stringToSign) { + return (0, node_crypto_1.createHmac)("sha256", this.key).update(stringToSign, "utf8").digest("base64"); } + }; + exports2.UserDelegationKeyCredential = UserDelegationKeyCredential; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/SasIPRange.js +var require_SasIPRange = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/SasIPRange.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ipRangeToString = ipRangeToString; + function ipRangeToString(ipRange) { + return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; + } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/SASQueryParameters.js +var require_SASQueryParameters = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/SASQueryParameters.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.SASQueryParameters = exports2.SASProtocol = void 0; + var SasIPRange_js_1 = require_SasIPRange(); + var utils_common_js_1 = require_utils_common(); + var SASProtocol; + (function(SASProtocol2) { + SASProtocol2["Https"] = "https"; + SASProtocol2["HttpsAndHttp"] = "https,http"; + })(SASProtocol || (exports2.SASProtocol = SASProtocol = {})); + var SASQueryParameters = class { /** - * Retrieves a user delegation key for the Blob service. This is only a valid operation when using - * bearer token authentication. - * @param keyInfo Key information - * @param options The options parameters. + * The storage API version. */ - getUserDelegationKey(keyInfo2, options) { - return this.client.sendOperationRequest({ keyInfo: keyInfo2, options }, getUserDelegationKeyOperationSpec); - } + version; /** - * Returns the sku name and account kind - * @param options The options parameters. + * Optional. The allowed HTTP protocol(s). */ - getAccountInfo(options) { - return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$2); - } + protocol; /** - * The Batch operation allows multiple API calls to be embedded into a single HTTP request. - * @param contentLength The length of the request. - * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch - * boundary. Example header value: multipart/mixed; boundary=batch_ - * @param body Initial data - * @param options The options parameters. + * Optional. The start time for this SAS token. */ - submitBatch(contentLength2, multipartContentType2, body2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, multipartContentType: multipartContentType2, body: body2, options }, submitBatchOperationSpec$1); - } + startsOn; /** - * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a - * given search expression. Filter blobs searches across all containers within a storage account but - * can be scoped within the expression to a single container. - * @param options The options parameters. + * Optional only when identifier is provided. The expiry time for this SAS token. */ - filterBlobs(options) { - return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec$1); - } - }; - var xmlSerializer$5 = coreClient__namespace.createSerializer( - Mappers, - /* isXml */ - true - ); - var setPropertiesOperationSpec = { - path: "/", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: ServiceSetPropertiesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ServiceSetPropertiesExceptionHeaders - } - }, - requestBody: blobServiceProperties, - queryParameters: [ - restype, - comp, - timeoutInSeconds - ], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version, - requestId - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$5 - }; - var getPropertiesOperationSpec$2 = { - path: "/", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: BlobServiceProperties, - headersMapper: ServiceGetPropertiesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ServiceGetPropertiesExceptionHeaders - } - }, - queryParameters: [ - restype, - comp, - timeoutInSeconds - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$5 - }; - var getStatisticsOperationSpec = { - path: "/", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: BlobServiceStatistics, - headersMapper: ServiceGetStatisticsHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ServiceGetStatisticsExceptionHeaders - } - }, - queryParameters: [ - restype, - timeoutInSeconds, - comp1 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$5 - }; - var listContainersSegmentOperationSpec = { - path: "/", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: ListContainersSegmentResponse, - headersMapper: ServiceListContainersSegmentHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ServiceListContainersSegmentExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - comp2, - prefix, - marker, - maxPageSize, - include - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$5 - }; - var getUserDelegationKeyOperationSpec = { - path: "/", - httpMethod: "POST", - responses: { - 200: { - bodyMapper: UserDelegationKey, - headersMapper: ServiceGetUserDelegationKeyHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ServiceGetUserDelegationKeyExceptionHeaders - } - }, - requestBody: keyInfo, - queryParameters: [ - restype, - timeoutInSeconds, - comp3 - ], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version, - requestId - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$5 - }; - var getAccountInfoOperationSpec$2 = { - path: "/", - httpMethod: "GET", - responses: { - 200: { - headersMapper: ServiceGetAccountInfoHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ServiceGetAccountInfoExceptionHeaders + expiresOn; + /** + * Optional only when identifier is provided. + * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for + * more details. + */ + permissions; + /** + * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices} + * for more details. + */ + services; + /** + * Optional. The storage resource types being accessed (only for Account SAS). Please refer to + * {@link AccountSASResourceTypes} for more details. + */ + resourceTypes; + /** + * Optional. The signed identifier (only for {@link BlobSASSignatureValues}). + * + * @see https://learn.microsoft.com/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope; + /** + * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}). + * @see https://learn.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only + */ + resource; + /** + * The signature for the SAS token. + */ + signature; + /** + * Value for cache-control header in Blob/File Service SAS. + */ + cacheControl; + /** + * Value for content-disposition header in Blob/File Service SAS. + */ + contentDisposition; + /** + * Value for content-encoding header in Blob/File Service SAS. + */ + contentEncoding; + /** + * Value for content-length header in Blob/File Service SAS. + */ + contentLanguage; + /** + * Value for content-type header in Blob/File Service SAS. + */ + contentType; + /** + * Inner value of getter ipRange. + */ + ipRangeInner; + /** + * The Azure Active Directory object ID in GUID format. + * Property of user delegation key. + */ + signedOid; + /** + * The Azure Active Directory tenant ID in GUID format. + * Property of user delegation key. + */ + signedTenantId; + /** + * The date-time the key is active. + * Property of user delegation key. + */ + signedStartsOn; + /** + * The date-time the key expires. + * Property of user delegation key. + */ + signedExpiresOn; + /** + * Abbreviation of the Azure Storage service that accepts the user delegation key. + * Property of user delegation key. + */ + signedService; + /** + * The service version that created the user delegation key. + * Property of user delegation key. + */ + signedVersion; + /** + * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key + * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key + * has the required permissions before granting access but no additional permission check for the user specified in + * this value will be performed. This is only used for User Delegation SAS. + */ + preauthorizedAgentObjectId; + /** + * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access. + * This is only used for User Delegation SAS. + */ + correlationId; + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange() { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start + }; } - }, - queryParameters: [ - comp, - timeoutInSeconds, - restype1 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$5 - }; - var submitBatchOperationSpec$1 = { - path: "/", - httpMethod: "POST", - responses: { - 202: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: ServiceSubmitBatchHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ServiceSubmitBatchExceptionHeaders + return void 0; + } + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { + this.version = version; + this.signature = signature; + if (permissionsOrOptions !== void 0 && typeof permissionsOrOptions !== "string") { + this.permissions = permissionsOrOptions.permissions; + this.services = permissionsOrOptions.services; + this.resourceTypes = permissionsOrOptions.resourceTypes; + this.protocol = permissionsOrOptions.protocol; + this.startsOn = permissionsOrOptions.startsOn; + this.expiresOn = permissionsOrOptions.expiresOn; + this.ipRangeInner = permissionsOrOptions.ipRange; + this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; + this.resource = permissionsOrOptions.resource; + this.cacheControl = permissionsOrOptions.cacheControl; + this.contentDisposition = permissionsOrOptions.contentDisposition; + this.contentEncoding = permissionsOrOptions.contentEncoding; + this.contentLanguage = permissionsOrOptions.contentLanguage; + this.contentType = permissionsOrOptions.contentType; + if (permissionsOrOptions.userDelegationKey) { + this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; + this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; + this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; + this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; + this.signedService = permissionsOrOptions.userDelegationKey.signedService; + this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; + this.correlationId = permissionsOrOptions.correlationId; + } + } else { + this.services = services; + this.resourceTypes = resourceTypes; + this.expiresOn = expiresOn; + this.permissions = permissionsOrOptions; + this.protocol = protocol; + this.startsOn = startsOn; + this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; + this.identifier = identifier; + this.resource = resource; + this.cacheControl = cacheControl; + this.contentDisposition = contentDisposition; + this.contentEncoding = contentEncoding; + this.contentLanguage = contentLanguage; + this.contentType = contentType; + if (userDelegationKey) { + this.signedOid = userDelegationKey.signedObjectId; + this.signedTenantId = userDelegationKey.signedTenantId; + this.signedStartsOn = userDelegationKey.signedStartsOn; + this.signedExpiresOn = userDelegationKey.signedExpiresOn; + this.signedService = userDelegationKey.signedService; + this.signedVersion = userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; + this.correlationId = correlationId; + } } - }, - requestBody: body, - queryParameters: [timeoutInSeconds, comp4], - urlParameters: [url], - headerParameters: [ - accept, - version, - requestId, - contentLength, - multipartContentType - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$5 - }; - var filterBlobsOperationSpec$1 = { - path: "/", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: FilterBlobSegment, - headersMapper: ServiceFilterBlobsHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ServiceFilterBlobsExceptionHeaders + } + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString() { + const params = [ + "sv", + "ss", + "srt", + "spr", + "st", + "se", + "sip", + "si", + "ses", + "skoid", + // Signed object ID + "sktid", + // Signed tenant ID + "skt", + // Signed key start time + "ske", + // Signed key expiry time + "sks", + // Signed key service + "skv", + // Signed key version + "sr", + "sp", + "sig", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "saoid", + "scid" + ]; + const queries = []; + for (const param of params) { + switch (param) { + case "sv": + this.tryAppendQueryParameter(queries, param, this.version); + break; + case "ss": + this.tryAppendQueryParameter(queries, param, this.services); + break; + case "srt": + this.tryAppendQueryParameter(queries, param, this.resourceTypes); + break; + case "spr": + this.tryAppendQueryParameter(queries, param, this.protocol); + break; + case "st": + this.tryAppendQueryParameter(queries, param, this.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(this.startsOn, false) : void 0); + break; + case "se": + this.tryAppendQueryParameter(queries, param, this.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(this.expiresOn, false) : void 0); + break; + case "sip": + this.tryAppendQueryParameter(queries, param, this.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(this.ipRange) : void 0); + break; + case "si": + this.tryAppendQueryParameter(queries, param, this.identifier); + break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; + case "skoid": + this.tryAppendQueryParameter(queries, param, this.signedOid); + break; + case "sktid": + this.tryAppendQueryParameter(queries, param, this.signedTenantId); + break; + case "skt": + this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(this.signedStartsOn, false) : void 0); + break; + case "ske": + this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(this.signedExpiresOn, false) : void 0); + break; + case "sks": + this.tryAppendQueryParameter(queries, param, this.signedService); + break; + case "skv": + this.tryAppendQueryParameter(queries, param, this.signedVersion); + break; + case "sr": + this.tryAppendQueryParameter(queries, param, this.resource); + break; + case "sp": + this.tryAppendQueryParameter(queries, param, this.permissions); + break; + case "sig": + this.tryAppendQueryParameter(queries, param, this.signature); + break; + case "rscc": + this.tryAppendQueryParameter(queries, param, this.cacheControl); + break; + case "rscd": + this.tryAppendQueryParameter(queries, param, this.contentDisposition); + break; + case "rsce": + this.tryAppendQueryParameter(queries, param, this.contentEncoding); + break; + case "rscl": + this.tryAppendQueryParameter(queries, param, this.contentLanguage); + break; + case "rsct": + this.tryAppendQueryParameter(queries, param, this.contentType); + break; + case "saoid": + this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); + break; + case "scid": + this.tryAppendQueryParameter(queries, param, this.correlationId); + break; + } } - }, - queryParameters: [ - timeoutInSeconds, - marker, - maxPageSize, - comp5, - where - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$5 - }; - var ContainerImpl = class { - /** - * Initialize a new instance of the class Container class. - * @param client Reference to the service client - */ - constructor(client) { - this.client = client; - } - /** - * creates a new container under the specified account. If the container with the same name already - * exists, the operation fails - * @param options The options parameters. - */ - create(options) { - return this.client.sendOperationRequest({ options }, createOperationSpec$2); - } - /** - * returns all user-defined metadata and system properties for the specified container. The data - * returned does not include the container's list of blobs - * @param options The options parameters. - */ - getProperties(options) { - return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$1); - } - /** - * operation marks the specified container for deletion. The container and any blobs contained within - * it are later deleted during garbage collection - * @param options The options parameters. - */ - delete(options) { - return this.client.sendOperationRequest({ options }, deleteOperationSpec$1); - } - /** - * operation sets one or more user-defined name-value pairs for the specified container. - * @param options The options parameters. - */ - setMetadata(options) { - return this.client.sendOperationRequest({ options }, setMetadataOperationSpec$1); - } - /** - * gets the permissions for the specified container. The permissions indicate whether container data - * may be accessed publicly. - * @param options The options parameters. - */ - getAccessPolicy(options) { - return this.client.sendOperationRequest({ options }, getAccessPolicyOperationSpec); + return queries.join("&"); } /** - * sets the permissions for the specified container. The permissions indicate whether blobs in a - * container may be accessed publicly. - * @param options The options parameters. + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - */ - setAccessPolicy(options) { - return this.client.sendOperationRequest({ options }, setAccessPolicyOperationSpec); + tryAppendQueryParameter(queries, key, value) { + if (!value) { + return; + } + key = encodeURIComponent(key); + value = encodeURIComponent(value); + if (key.length > 0 && value.length > 0) { + queries.push(`${key}=${value}`); + } } - /** - * Restores a previously-deleted container. - * @param options The options parameters. - */ - restore(options) { - return this.client.sendOperationRequest({ options }, restoreOperationSpec); + }; + exports2.SASQueryParameters = SASQueryParameters; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/BlobSASSignatureValues.js +var require_BlobSASSignatureValues = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/BlobSASSignatureValues.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.generateBlobSASQueryParameters = generateBlobSASQueryParameters; + exports2.generateBlobSASQueryParametersInternal = generateBlobSASQueryParametersInternal; + var BlobSASPermissions_js_1 = require_BlobSASPermissions(); + var ContainerSASPermissions_js_1 = require_ContainerSASPermissions(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var UserDelegationKeyCredential_js_1 = require_UserDelegationKeyCredential(); + var SasIPRange_js_1 = require_SasIPRange(); + var SASQueryParameters_js_1 = require_SASQueryParameters(); + var constants_js_1 = require_constants9(); + var utils_common_js_1 = require_utils_common(); + function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + return generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName).sasQueryParameters; + } + function generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : constants_js_1.SERVICE_VERSION; + const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential ? sharedKeyCredentialOrUserDelegationKey : void 0; + let userDelegationKeyCredential; + if (sharedKeyCredential === void 0 && accountName !== void 0) { + userDelegationKeyCredential = new UserDelegationKeyCredential_js_1.UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); } - /** - * Renames an existing container. - * @param sourceContainerName Required. Specifies the name of the container to rename. - * @param options The options parameters. - */ - rename(sourceContainerName2, options) { - return this.client.sendOperationRequest({ sourceContainerName: sourceContainerName2, options }, renameOperationSpec); + if (sharedKeyCredential === void 0 && userDelegationKeyCredential === void 0) { + throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); } - /** - * The Batch operation allows multiple API calls to be embedded into a single HTTP request. - * @param contentLength The length of the request. - * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch - * boundary. Example header value: multipart/mixed; boundary=batch_ - * @param body Initial data - * @param options The options parameters. - */ - submitBatch(contentLength2, multipartContentType2, body2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, multipartContentType: multipartContentType2, body: body2, options }, submitBatchOperationSpec); + if (version >= "2020-12-06") { + if (sharedKeyCredential !== void 0) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } else { + if (version >= "2025-07-05") { + return generateBlobSASQueryParametersUDK20250705(blobSASSignatureValues, userDelegationKeyCredential); + } else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + } + } } - /** - * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given - * search expression. Filter blobs searches within the given container. - * @param options The options parameters. - */ - filterBlobs(options) { - return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); + if (version >= "2018-11-09") { + if (sharedKeyCredential !== void 0) { + return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); + } else { + if (version >= "2020-02-10") { + return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); + } else { + return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); + } + } } - /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param options The options parameters. - */ - acquireLease(options) { - return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec$1); + if (version >= "2015-04-05") { + if (sharedKeyCredential !== void 0) { + return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); + } else { + throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); + } } - /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param leaseId Specifies the current lease ID on the resource. - * @param options The options parameters. - */ - releaseLease(leaseId2, options) { - return this.client.sendOperationRequest({ leaseId: leaseId2, options }, releaseLeaseOperationSpec$1); + throw new RangeError("'version' must be >= '2015-04-05'."); + } + function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); } - /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param leaseId Specifies the current lease ID on the resource. - * @param options The options parameters. - */ - renewLease(leaseId2, options) { - return this.client.sendOperationRequest({ leaseId: leaseId2, options }, renewLeaseOperationSpec$1); + let resource = "c"; + if (blobSASSignatureValues.blobName) { + resource = "b"; } - /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param options The options parameters. - */ - breakLease(options) { - return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec$1); + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } } - /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param leaseId Specifies the current lease ID on the resource. - * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 - * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor - * (String) for a list of valid GUID string formats. - * @param options The options parameters. - */ - changeLease(leaseId2, proposedLeaseId2, options) { - return this.client.sendOperationRequest({ leaseId: leaseId2, proposedLeaseId: proposedLeaseId2, options }, changeLeaseOperationSpec$1); + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), + stringToSign + }; + } + function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); } - /** - * [Update] The List Blobs operation returns a list of the blobs under the specified container - * @param options The options parameters. - */ - listBlobFlatSegment(options) { - return this.client.sendOperationRequest({ options }, listBlobFlatSegmentOperationSpec); + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } } - /** - * [Update] The List Blobs operation returns a list of the blobs under the specified container - * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix - * element in the response body that acts as a placeholder for all blobs whose names begin with the - * same substring up to the appearance of the delimiter character. The delimiter may be a single - * character or a string. - * @param options The options parameters. - */ - listBlobHierarchySegment(delimiter2, options) { - return this.client.sendOperationRequest({ delimiter: delimiter2, options }, listBlobHierarchySegmentOperationSpec); + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } } - /** - * Returns the sku name and account kind - * @param options The options parameters. - */ - getAccountInfo(options) { - return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$1); + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), + stringToSign + }; + } + function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); } - }; - var xmlSerializer$4 = coreClient__namespace.createSerializer( - Mappers, - /* isXml */ - true - ); - var createOperationSpec$2 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: ContainerCreateHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerCreateExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, restype2], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - metadata, - access3, - defaultEncryptionScope, - preventEncryptionScopeOverride - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var getPropertiesOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - headersMapper: ContainerGetPropertiesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerGetPropertiesExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, restype2], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var deleteOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "DELETE", - responses: { - 202: { - headersMapper: ContainerDeleteHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerDeleteExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, restype2], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var setMetadataOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: ContainerSetMetadataHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerSetMetadataExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp6 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var getAccessPolicyOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: { - type: { - name: "Sequence", - element: { - type: { name: "Composite", className: "SignedIdentifier" } - } - }, - serializedName: "SignedIdentifiers", - xmlName: "SignedIdentifiers", - xmlIsWrapped: true, - xmlElementName: "SignedIdentifier" - }, - headersMapper: ContainerGetAccessPolicyHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerGetAccessPolicyExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp7 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var setAccessPolicyOperationSpec = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: ContainerSetAccessPolicyHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerSetAccessPolicyExceptionHeaders - } - }, - requestBody: containerAcl, - queryParameters: [ - timeoutInSeconds, - restype2, - comp7 - ], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version, - requestId, - access3, - leaseId, - ifModifiedSince, - ifUnmodifiedSince - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$4 - }; - var restoreOperationSpec = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: ContainerRestoreHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerRestoreExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp8 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - deletedContainerName, - deletedContainerVersion - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var renameOperationSpec = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: ContainerRenameHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerRenameExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp9 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - sourceContainerName, - sourceLeaseId - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var submitBatchOperationSpec = { - path: "/{containerName}", - httpMethod: "POST", - responses: { - 202: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: ContainerSubmitBatchHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerSubmitBatchExceptionHeaders - } - }, - requestBody: body, - queryParameters: [ - timeoutInSeconds, - comp4, - restype2 - ], - urlParameters: [url], - headerParameters: [ - accept, - version, - requestId, - contentLength, - multipartContentType - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$4 - }; - var filterBlobsOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: FilterBlobSegment, - headersMapper: ContainerFilterBlobsHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerFilterBlobsExceptionHeaders + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; } - }, - queryParameters: [ - timeoutInSeconds, - marker, - maxPageSize, - comp5, - where, - restype2 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var acquireLeaseOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: ContainerAcquireLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerAcquireLeaseExceptionHeaders + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action, - duration, - proposedLeaseId - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var releaseLeaseOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: ContainerReleaseLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerReleaseLeaseExceptionHeaders + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, void 0, void 0, void 0, blobSASSignatureValues.encryptionScope), + stringToSign + }; + } + function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; } - }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action1, - leaseId1 - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var renewLeaseOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: ContainerRenewLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerRenewLeaseExceptionHeaders + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - leaseId1, - action2 - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var breakLeaseOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: ContainerBreakLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerBreakLeaseExceptionHeaders + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey), + stringToSign + }; + } + function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; } - }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action3, - breakPeriod - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var changeLeaseOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: ContainerChangeLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerChangeLeaseExceptionHeaders + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - leaseId1, - action4, - proposedLeaseId1 - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var listBlobFlatSegmentOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: ListBlobsFlatSegmentResponse, - headersMapper: ContainerListBlobFlatSegmentHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerListBlobFlatSegmentExceptionHeaders + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + void 0, + // agentObjectId + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId), + stringToSign + }; + } + function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; } - }, - queryParameters: [ - timeoutInSeconds, - comp2, - prefix, - marker, - maxPageSize, - restype2, - include1 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var listBlobHierarchySegmentOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: ListBlobsHierarchySegmentResponse, - headersMapper: ContainerListBlobHierarchySegmentHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - }, - queryParameters: [ - timeoutInSeconds, - comp2, - prefix, - marker, - maxPageSize, - restype2, - include1, - delimiter - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var getAccountInfoOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - headersMapper: ContainerGetAccountInfoHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerGetAccountInfoExceptionHeaders + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + void 0, + // agentObjectId + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope), + stringToSign + }; + } + function generateBlobSASQueryParametersUDK20250705(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; } - }, - queryParameters: [ - comp, - timeoutInSeconds, - restype1 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var BlobImpl = class { - /** - * Initialize a new instance of the class Blob class. - * @param client Reference to the service client - */ - constructor(client) { - this.client = client; } - /** - * The Download operation reads or downloads a blob from the system, including its metadata and - * properties. You can also call Download to read a snapshot. - * @param options The options parameters. - */ - download(options) { - return this.client.sendOperationRequest({ options }, downloadOperationSpec); + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } } - /** - * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system - * properties for the blob. It does not return the content of the blob. - * @param options The options parameters. - */ - getProperties(options) { - return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + void 0, + // agentObjectId + blobSASSignatureValues.correlationId, + void 0, + // SignedKeyDelegatedUserTenantId, will be added in a future release. + void 0, + // SignedDelegatedUserObjectId, will be added in future release. + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope), + stringToSign + }; + } + function getCanonicalName(accountName, containerName, blobName) { + const elements = [`/blob/${accountName}/${containerName}`]; + if (blobName) { + elements.push(`/${blobName}`); } - /** - * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is - * permanently removed from the storage account. If the storage account's soft delete feature is - * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible - * immediately. However, the blob service retains the blob or snapshot for the number of days specified - * by the DeleteRetentionPolicy section of [Storage service properties] - * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is - * permanently removed from the storage account. Note that you continue to be charged for the - * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the - * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You - * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a - * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 - * (ResourceNotFound). - * @param options The options parameters. - */ - delete(options) { - return this.client.sendOperationRequest({ options }, deleteOperationSpec); + return elements.join(""); + } + function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : constants_js_1.SERVICE_VERSION; + if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { + throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); } - /** - * Undelete a blob that was previously soft deleted - * @param options The options parameters. - */ - undelete(options) { - return this.client.sendOperationRequest({ options }, undeleteOperationSpec); + if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.snapshotTime) { + throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); } - /** - * Sets the time a blob will expire and be deleted. - * @param expiryOptions Required. Indicates mode of the expiry time - * @param options The options parameters. - */ - setExpiry(expiryOptions2, options) { - return this.client.sendOperationRequest({ expiryOptions: expiryOptions2, options }, setExpiryOperationSpec); + if (blobSASSignatureValues.versionId && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); } - /** - * The Set HTTP Headers operation sets system properties on the blob - * @param options The options parameters. - */ - setHttpHeaders(options) { - return this.client.sendOperationRequest({ options }, setHttpHeadersOperationSpec); + if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.versionId) { + throw RangeError("Must provide 'blobName' when providing 'versionId'."); } - /** - * The Set Immutability Policy operation sets the immutability policy on the blob - * @param options The options parameters. - */ - setImmutabilityPolicy(options) { - return this.client.sendOperationRequest({ options }, setImmutabilityPolicyOperationSpec); + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.setImmutabilityPolicy && version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); } - /** - * The Delete Immutability Policy operation deletes the immutability policy on the blob - * @param options The options parameters. - */ - deleteImmutabilityPolicy(options) { - return this.client.sendOperationRequest({ options }, deleteImmutabilityPolicyOperationSpec); + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.deleteVersion && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); } - /** - * The Set Legal Hold operation sets a legal hold on the blob. - * @param legalHold Specified if a legal hold should be set on the blob. - * @param options The options parameters. - */ - setLegalHold(legalHold2, options) { - return this.client.sendOperationRequest({ legalHold: legalHold2, options }, setLegalHoldOperationSpec); + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.permanentDelete && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); } - /** - * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more - * name-value pairs - * @param options The options parameters. - */ - setMetadata(options) { - return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.tag && version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param options The options parameters. - */ - acquireLease(options) { - return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); + if (version < "2020-02-10" && blobSASSignatureValues.permissions && (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { + throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param leaseId Specifies the current lease ID on the resource. - * @param options The options parameters. - */ - releaseLease(leaseId2, options) { - return this.client.sendOperationRequest({ leaseId: leaseId2, options }, releaseLeaseOperationSpec); + if (version < "2021-04-10" && blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param leaseId Specifies the current lease ID on the resource. - * @param options The options parameters. - */ - renewLease(leaseId2, options) { - return this.client.sendOperationRequest({ leaseId: leaseId2, options }, renewLeaseOperationSpec); + if (version < "2020-02-10" && (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { + throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param leaseId Specifies the current lease ID on the resource. - * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 - * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor - * (String) for a list of valid GUID string formats. - * @param options The options parameters. - */ - changeLease(leaseId2, proposedLeaseId2, options) { - return this.client.sendOperationRequest({ leaseId: leaseId2, proposedLeaseId: proposedLeaseId2, options }, changeLeaseOperationSpec); + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); } + blobSASSignatureValues.version = version; + return blobSASSignatureValues; + } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BlobLeaseClient.js +var require_BlobLeaseClient = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BlobLeaseClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobLeaseClient = void 0; + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants9(); + var tracing_js_1 = require_tracing(); + var utils_common_js_1 = require_utils_common(); + var BlobLeaseClient = class { + _leaseId; + _url; + _containerOrBlobOperation; + _isContainer; /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param options The options parameters. + * Gets the lease Id. + * + * @readonly */ - breakLease(options) { - return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); + get leaseId() { + return this._leaseId; } /** - * The Create Snapshot operation creates a read-only snapshot of a blob - * @param options The options parameters. + * Gets the url. + * + * @readonly */ - createSnapshot(options) { - return this.client.sendOperationRequest({ options }, createSnapshotOperationSpec); + get url() { + return this._url; } /** - * The Start Copy From URL operation copies a blob or an internet resource to a new blob. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. */ - startCopyFromURL(copySource2, options) { - return this.client.sendOperationRequest({ copySource: copySource2, options }, startCopyFromURLOperationSpec); + constructor(client, leaseId) { + const clientContext = client.storageClientContext; + this._url = client.url; + if (client.name === void 0) { + this._isContainer = true; + this._containerOrBlobOperation = clientContext.container; + } else { + this._isContainer = false; + this._containerOrBlobOperation = clientContext.blob; + } + if (!leaseId) { + leaseId = (0, core_util_1.randomUUID)(); + } + this._leaseId = leaseId; } /** - * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return - * a response until the copy is complete. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://learn.microsoft.com/rest/api/storageservices/lease-container + * and + * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. */ - copyFromURL(copySource2, options) { - return this.client.sendOperationRequest({ copySource: copySource2, options }, copyFromURLOperationSpec); + async acquireLease(duration, options = {}) { + if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-acquireLease", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this._containerOrBlobOperation.acquireLease({ + abortSignal: options.abortSignal, + duration, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + proposedLeaseId: this._leaseId, + tracingOptions: updatedOptions.tracingOptions + })); + }); } /** - * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination - * blob with zero length and full metadata. - * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob - * operation. - * @param options The options parameters. + * To change the ID of the lease. + * @see https://learn.microsoft.com/rest/api/storageservices/lease-container + * and + * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. */ - abortCopyFromURL(copyId2, options) { - return this.client.sendOperationRequest({ copyId: copyId2, options }, abortCopyFromURLOperationSpec); + async changeLease(proposedLeaseId, options = {}) { + if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-changeLease", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + })); + this._leaseId = proposedLeaseId; + return response; + }); } /** - * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium - * storage account and on a block blob in a blob storage account (locally redundant storage only). A - * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block - * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's - * ETag. - * @param tier Indicates the tier to be set on the blob. - * @param options The options parameters. + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://learn.microsoft.com/rest/api/storageservices/lease-container + * and + * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. */ - setTier(tier2, options) { - return this.client.sendOperationRequest({ tier: tier2, options }, setTierOperationSpec); + async releaseLease(options = {}) { + if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-releaseLease", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this._containerOrBlobOperation.releaseLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + })); + }); } /** - * Returns the sku name and account kind - * @param options The options parameters. + * To renew the lease. + * @see https://learn.microsoft.com/rest/api/storageservices/lease-container + * and + * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. */ - getAccountInfo(options) { - return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); + async renewLease(options = {}) { + if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-renewLease", options, async (updatedOptions) => { + return this._containerOrBlobOperation.renewLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + }); + }); } /** - * The Query operation enables users to select/project on blob data by providing simple query - * expressions. - * @param options The options parameters. + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://learn.microsoft.com/rest/api/storageservices/lease-container + * and + * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. */ - query(options) { - return this.client.sendOperationRequest({ options }, queryOperationSpec); + async breakLease(breakPeriod, options = {}) { + if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-breakLease", options, async (updatedOptions) => { + const operationOptions = { + abortSignal: options.abortSignal, + breakPeriod, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + }; + return (0, utils_common_js_1.assertResponse)(await this._containerOrBlobOperation.breakLease(operationOptions)); + }); } + }; + exports2.BlobLeaseClient = BlobLeaseClient; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/RetriableReadableStream.js +var require_RetriableReadableStream = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/RetriableReadableStream.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.RetriableReadableStream = void 0; + var abort_controller_1 = require_commonjs3(); + var node_stream_1 = require("node:stream"); + var RetriableReadableStream = class extends node_stream_1.Readable { + start; + offset; + end; + getter; + source; + retries = 0; + maxRetryRequests; + onProgress; + options; /** - * The Get Tags operation enables users to get the tags associated with a blob. - * @param options The options parameters. + * Creates an instance of RetriableReadableStream. + * + * @param source - The current ReadableStream returned from getter + * @param getter - A method calling downloading request returning + * a new ReadableStream from specified offset + * @param offset - Offset position in original data source to read + * @param count - How much data in original data source to read + * @param options - */ - getTags(options) { - return this.client.sendOperationRequest({ options }, getTagsOperationSpec); + constructor(source, getter, offset, count, options = {}) { + super({ highWaterMark: options.highWaterMark }); + this.getter = getter; + this.source = source; + this.start = offset; + this.offset = offset; + this.end = offset + count - 1; + this.maxRetryRequests = options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; + this.onProgress = options.onProgress; + this.options = options; + this.setSourceEventHandlers(); } - /** - * The Set Tags operation enables users to set tags on a blob. - * @param options The options parameters. - */ - setTags(options) { - return this.client.sendOperationRequest({ options }, setTagsOperationSpec); + _read() { + this.source.resume(); } - }; - var xmlSerializer$3 = coreClient__namespace.createSerializer( - Mappers, - /* isXml */ - true - ); - var downloadOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: BlobDownloadHeaders - }, - 206: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: BlobDownloadHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobDownloadExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - rangeGetContentMD5, - rangeGetContentCRC64, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var getPropertiesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "HEAD", - responses: { - 200: { - headersMapper: BlobGetPropertiesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobGetPropertiesExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var deleteOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "DELETE", - responses: { - 202: { - headersMapper: BlobDeleteHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobDeleteExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - blobDeleteType - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - deleteSnapshots - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var undeleteOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobUndeleteHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobUndeleteExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp8], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setExpiryOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetExpiryHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetExpiryExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp11], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - expiryOptions, - expiresOn - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setHttpHeadersOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetHttpHeadersHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetHttpHeadersExceptionHeaders - } - }, - queryParameters: [comp, timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setImmutabilityPolicyOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetImmutabilityPolicyHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetImmutabilityPolicyExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - comp12 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifUnmodifiedSince, - immutabilityPolicyExpiry, - immutabilityPolicyMode - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var deleteImmutabilityPolicyOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "DELETE", - responses: { - 200: { - headersMapper: BlobDeleteImmutabilityPolicyHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - comp12 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setLegalHoldOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetLegalHoldHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetLegalHoldExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - comp13 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - legalHold - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setMetadataOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetMetadataHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetMetadataExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp6], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var acquireLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlobAcquireLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobAcquireLeaseExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action, - duration, - proposedLeaseId, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var releaseLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobReleaseLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobReleaseLeaseExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action1, - leaseId1, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var renewLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobRenewLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobRenewLeaseExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - leaseId1, - action2, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var changeLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobChangeLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobChangeLeaseExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - leaseId1, - action4, - proposedLeaseId1, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var breakLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: BlobBreakLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobBreakLeaseExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action3, - breakPeriod, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var createSnapshotOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlobCreateSnapshotHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobCreateSnapshotExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp14], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var startCopyFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: BlobStartCopyFromURLHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobStartCopyFromURLExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - tier, - rehydratePriority, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceIfTags, - copySource, - blobTagsString, - sealBlob, - legalHold1 - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var copyFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: BlobCopyFromURLHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobCopyFromURLExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - tier, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - copySource, - blobTagsString, - legalHold1, - xMsRequiresSync, - sourceContentMD5, - copySourceAuthorization, - copySourceTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var abortCopyFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 204: { - headersMapper: BlobAbortCopyFromURLHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobAbortCopyFromURLExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - comp15, - copyId - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - copyActionAbortConstant - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setTierOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetTierHeaders - }, - 202: { - headersMapper: BlobSetTierHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetTierExceptionHeaders + setSourceEventHandlers() { + this.source.on("data", this.sourceDataHandler); + this.source.on("end", this.sourceErrorOrEndHandler); + this.source.on("error", this.sourceErrorOrEndHandler); + this.source.on("aborted", this.sourceAbortedHandler); + } + removeSourceEventHandlers() { + this.source.removeListener("data", this.sourceDataHandler); + this.source.removeListener("end", this.sourceErrorOrEndHandler); + this.source.removeListener("error", this.sourceErrorOrEndHandler); + this.source.removeListener("aborted", this.sourceAbortedHandler); + } + sourceDataHandler = (data2) => { + if (this.options.doInjectErrorOnce) { + this.options.doInjectErrorOnce = void 0; + this.source.pause(); + this.sourceErrorOrEndHandler(); + this.source.destroy(); + return; } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - comp16 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifTags, - rehydratePriority, - tier1 - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var getAccountInfoOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - headersMapper: BlobGetAccountInfoHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobGetAccountInfoExceptionHeaders + this.offset += data2.length; + if (this.onProgress) { + this.onProgress({ loadedBytes: this.offset - this.start }); } - }, - queryParameters: [ - comp, - timeoutInSeconds, - restype1 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var queryOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "POST", - responses: { - 200: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: BlobQueryHeaders - }, - 206: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: BlobQueryHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobQueryExceptionHeaders + if (!this.push(data2)) { + this.source.pause(); } - }, - requestBody: queryRequest, - queryParameters: [ - timeoutInSeconds, - snapshot, - comp17 - ], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version, - requestId, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$3 - }; - var getTagsOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: BlobTags, - headersMapper: BlobGetTagsHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobGetTagsExceptionHeaders + }; + sourceAbortedHandler = () => { + const abortError = new abort_controller_1.AbortError("The operation was aborted."); + this.destroy(abortError); + }; + sourceErrorOrEndHandler = (err) => { + if (err && err.name === "AbortError") { + this.destroy(err); + return; } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - comp18 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setTagsOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 204: { - headersMapper: BlobSetTagsHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetTagsExceptionHeaders + this.removeSourceEventHandlers(); + if (this.offset - 1 === this.end) { + this.push(null); + } else if (this.offset <= this.end) { + if (this.retries < this.maxRetryRequests) { + this.retries += 1; + this.getter(this.offset).then((newSource) => { + this.source = newSource; + this.setSourceEventHandlers(); + return; + }).catch((error2) => { + this.destroy(error2); + }); + } else { + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + } + } else { + this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); } - }, - requestBody: tags, - queryParameters: [ - timeoutInSeconds, - versionId, - comp18 - ], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version, - requestId, - leaseId, - ifTags, - transactionalContentMD5, - transactionalContentCrc64 - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$3 + }; + _destroy(error2, callback) { + this.removeSourceEventHandlers(); + this.source.destroy(); + callback(error2 === null ? void 0 : error2); + } }; - var PageBlobImpl = class { + exports2.RetriableReadableStream = RetriableReadableStream; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BlobDownloadResponse.js +var require_BlobDownloadResponse = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BlobDownloadResponse.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobDownloadResponse = void 0; + var core_util_1 = require_commonjs4(); + var RetriableReadableStream_js_1 = require_RetriableReadableStream(); + var BlobDownloadResponse = class { + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } /** - * Initialize a new instance of the class PageBlob class. - * @param client Reference to the service client + * Returns if it was previously specified + * for the file. + * + * @readonly */ - constructor(client) { - this.client = client; + get cacheControl() { + return this.originalResponse.cacheControl; } /** - * The Create operation creates a new page blob. - * @param contentLength The length of the request. - * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The - * page blob size must be aligned to a 512-byte boundary. - * @param options The options parameters. + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly */ - create(contentLength2, blobContentLength2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, blobContentLength: blobContentLength2, options }, createOperationSpec$1); + get contentDisposition() { + return this.originalResponse.contentDisposition; } /** - * The Upload Pages operation writes a range of pages to a page blob - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly */ - uploadPages(contentLength2, body2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, body: body2, options }, uploadPagesOperationSpec); + get contentEncoding() { + return this.originalResponse.contentEncoding; } /** - * The Clear Pages operation clears a set of pages from a page blob - * @param contentLength The length of the request. - * @param options The options parameters. + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly */ - clearPages(contentLength2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, options }, clearPagesOperationSpec); + get contentLanguage() { + return this.originalResponse.contentLanguage; } /** - * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a - * URL - * @param sourceUrl Specify a URL to the copy source. - * @param sourceRange Bytes of source data in the specified range. The length of this range should - * match the ContentLength header and x-ms-range/Range destination range header. - * @param contentLength The length of the request. - * @param range The range of bytes to which the source range would be written. The range should be 512 - * aligned and range-end is required. - * @param options The options parameters. + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly */ - uploadPagesFromURL(sourceUrl2, sourceRange2, contentLength2, range2, options) { - return this.client.sendOperationRequest({ sourceUrl: sourceUrl2, sourceRange: sourceRange2, contentLength: contentLength2, range: range2, options }, uploadPagesFromURLOperationSpec); + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; } /** - * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a - * page blob - * @param options The options parameters. + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly */ - getPageRanges(options) { - return this.client.sendOperationRequest({ options }, getPageRangesOperationSpec); + get blobType() { + return this.originalResponse.blobType; } /** - * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were - * changed between target blob and previous snapshot. - * @param options The options parameters. + * The number of bytes present in the + * response body. + * + * @readonly */ - getPageRangesDiff(options) { - return this.client.sendOperationRequest({ options }, getPageRangesDiffOperationSpec); + get contentLength() { + return this.originalResponse.contentLength; } /** - * Resize the Blob - * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The - * page blob size must be aligned to a 512-byte boundary. - * @param options The options parameters. + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly */ - resize(blobContentLength2, options) { - return this.client.sendOperationRequest({ blobContentLength: blobContentLength2, options }, resizeOperationSpec); + get contentMD5() { + return this.originalResponse.contentMD5; } /** - * Update the sequence number of the blob - * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. - * This property applies to page blobs only. This property indicates how the service should modify the - * blob's sequence number - * @param options The options parameters. + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly */ - updateSequenceNumber(sequenceNumberAction2, options) { - return this.client.sendOperationRequest({ sequenceNumberAction: sequenceNumberAction2, options }, updateSequenceNumberOperationSpec); + get contentRange() { + return this.originalResponse.contentRange; } /** - * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. - * The snapshot is copied such that only the differential changes between the previously copied - * snapshot are transferred to the destination. The copied snapshots are complete copies of the - * original snapshot and can be read or copied from as usual. This API is supported since REST version - * 2016-05-31. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly */ - copyIncremental(copySource2, options) { - return this.client.sendOperationRequest({ copySource: copySource2, options }, copyIncrementalOperationSpec); + get contentType() { + return this.originalResponse.contentType; } - }; - var xmlSerializer$2 = coreClient__namespace.createSerializer( - Mappers, - /* isXml */ - true - ); - var createOperationSpec$1 = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobCreateHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobCreateExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - tier, - blobTagsString, - legalHold1, - blobType, - blobContentLength, - blobSequenceNumber - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var uploadPagesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobUploadPagesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobUploadPagesExceptionHeaders - } - }, - requestBody: body1, - queryParameters: [timeoutInSeconds, comp19], - urlParameters: [url], - headerParameters: [ - version, - requestId, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - transactionalContentMD5, - transactionalContentCrc64, - contentType1, - accept2, - pageWrite, - ifSequenceNumberLessThanOrEqualTo, - ifSequenceNumberLessThan, - ifSequenceNumberEqualTo - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "binary", - serializer: xmlSerializer$2 - }; - var clearPagesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobClearPagesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobClearPagesExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp19], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - ifSequenceNumberLessThanOrEqualTo, - ifSequenceNumberLessThan, - ifSequenceNumberEqualTo, - pageWrite1 - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var uploadPagesFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobUploadPagesFromURLHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobUploadPagesFromURLExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp19], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceContentMD5, - copySourceAuthorization, - pageWrite, - ifSequenceNumberLessThanOrEqualTo, - ifSequenceNumberLessThan, - ifSequenceNumberEqualTo, - sourceUrl, - sourceRange, - sourceContentCrc64, - range1 - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var getPageRangesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: PageList, - headersMapper: PageBlobGetPageRangesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobGetPageRangesExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - marker, - maxPageSize, - snapshot, - comp20 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var getPageRangesDiffOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: PageList, - headersMapper: PageBlobGetPageRangesDiffHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobGetPageRangesDiffExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - marker, - maxPageSize, - snapshot, - comp20, - prevsnapshot - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - ifMatch, - ifNoneMatch, - ifTags, - prevSnapshotUrl - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var resizeOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: PageBlobResizeHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobResizeExceptionHeaders - } - }, - queryParameters: [comp, timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - blobContentLength - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var updateSequenceNumberOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: PageBlobUpdateSequenceNumberHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders - } - }, - queryParameters: [comp, timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - blobSequenceNumber, - sequenceNumberAction - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var copyIncrementalOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: PageBlobCopyIncrementalHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobCopyIncrementalExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp21], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - copySource - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var AppendBlobImpl = class { /** - * Initialize a new instance of the class AppendBlob class. - * @param client Reference to the service client + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly */ - constructor(client) { - this.client = client; + get copyCompletedOn() { + return this.originalResponse.copyCompletedOn; } /** - * The Create Append Blob operation creates a new append blob. - * @param contentLength The length of the request. - * @param options The options parameters. + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly */ - create(contentLength2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, options }, createOperationSpec); + get copyId() { + return this.originalResponse.copyId; } /** - * The Append Block operation commits a new block of data to the end of an existing append blob. The - * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to - * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly */ - appendBlock(contentLength2, body2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, body: body2, options }, appendBlockOperationSpec); + get copyProgress() { + return this.originalResponse.copyProgress; } /** - * The Append Block operation commits a new block of data to the end of an existing append blob where - * the contents are read from a source url. The Append Block operation is permitted only if the blob - * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version - * 2015-02-21 version or later. - * @param sourceUrl Specify a URL to the copy source. - * @param contentLength The length of the request. - * @param options The options parameters. + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly */ - appendBlockFromUrl(sourceUrl2, contentLength2, options) { - return this.client.sendOperationRequest({ sourceUrl: sourceUrl2, contentLength: contentLength2, options }, appendBlockFromUrlOperationSpec); + get copySource() { + return this.originalResponse.copySource; } /** - * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version - * 2019-12-12 version or later. - * @param options The options parameters. + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly */ - seal(options) { - return this.client.sendOperationRequest({ options }, sealOperationSpec); + get copyStatus() { + return this.originalResponse.copyStatus; } - }; - var xmlSerializer$1 = coreClient__namespace.createSerializer( - Mappers, - /* isXml */ - true - ); - var createOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: AppendBlobCreateHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: AppendBlobCreateExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - blobTagsString, - legalHold1, - blobType1 - ], - isXML: true, - serializer: xmlSerializer$1 - }; - var appendBlockOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: AppendBlobAppendBlockHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: AppendBlobAppendBlockExceptionHeaders - } - }, - requestBody: body1, - queryParameters: [timeoutInSeconds, comp22], - urlParameters: [url], - headerParameters: [ - version, - requestId, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - transactionalContentMD5, - transactionalContentCrc64, - contentType1, - accept2, - maxSize, - appendPosition - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "binary", - serializer: xmlSerializer$1 - }; - var appendBlockFromUrlOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: AppendBlobAppendBlockFromUrlHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp22], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceContentMD5, - copySourceAuthorization, - transactionalContentMD5, - sourceUrl, - sourceContentCrc64, - maxSize, - appendPosition, - sourceRange1 - ], - isXML: true, - serializer: xmlSerializer$1 - }; - var sealOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: AppendBlobSealHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: AppendBlobSealExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp23], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - appendPosition - ], - isXML: true, - serializer: xmlSerializer$1 - }; - var BlockBlobImpl = class { /** - * Initialize a new instance of the class BlockBlob class. - * @param client Reference to the service client + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly */ - constructor(client) { - this.client = client; + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; } /** - * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing - * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put - * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a - * partial update of the content of a block blob, use the Put Block List operation. - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly */ - upload(contentLength2, body2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, body: body2, options }, uploadOperationSpec); + get leaseDuration() { + return this.originalResponse.leaseDuration; } /** - * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read - * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are - * not supported with Put Blob from URL; the content of an existing blob is overwritten with the - * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, - * use the Put Block from URL API in conjunction with Put Block List. - * @param contentLength The length of the request. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The number of tags associated with the blob + * + * @readonly + */ + get tagCount() { + return this.originalResponse.tagCount; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly */ - putBlobFromUrl(contentLength2, copySource2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, copySource: copySource2, options }, putBlobFromUrlOperationSpec); + get blobContentMD5() { + return this.originalResponse.blobContentMD5; } /** - * The Stage Block operation creates a new block to be committed as part of a blob - * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string - * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified - * for the blockid parameter must be the same size for each block. - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly */ - stageBlock(blockId2, contentLength2, body2, options) { - return this.client.sendOperationRequest({ blockId: blockId2, contentLength: contentLength2, body: body2, options }, stageBlockOperationSpec); + get lastModified() { + return this.originalResponse.lastModified; } /** - * The Stage Block operation creates a new block to be committed as part of a blob where the contents - * are read from a URL. - * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string - * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified - * for the blockid parameter must be the same size for each block. - * @param contentLength The length of the request. - * @param sourceUrl Specify a URL to the copy source. - * @param options The options parameters. + * Returns the UTC date and time generated by the service that indicates the time at which the blob was + * last read or written to. + * + * @readonly */ - stageBlockFromURL(blockId2, contentLength2, sourceUrl2, options) { - return this.client.sendOperationRequest({ blockId: blockId2, contentLength: contentLength2, sourceUrl: sourceUrl2, options }, stageBlockFromURLOperationSpec); + get lastAccessed() { + return this.originalResponse.lastAccessed; } /** - * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the - * blob. In order to be written as part of a blob, a block must have been successfully written to the - * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading - * only those blocks that have changed, then committing the new and existing blocks together. You can - * do this by specifying whether to commit a block from the committed block list or from the - * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list - * it may belong to. - * @param blocks Blob Blocks. - * @param options The options parameters. + * Returns the date and time the blob was created. + * + * @readonly */ - commitBlockList(blocks2, options) { - return this.client.sendOperationRequest({ blocks: blocks2, options }, commitBlockListOperationSpec); + get createdOn() { + return this.originalResponse.createdOn; } /** - * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block - * blob - * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted - * blocks, or both lists together. - * @param options The options parameters. + * A name-value pair + * to associate with a file storage object. + * + * @readonly */ - getBlockList(listType2, options) { - return this.client.sendOperationRequest({ listType: listType2, options }, getBlockListOperationSpec); + get metadata() { + return this.originalResponse.metadata; } - }; - var xmlSerializer = coreClient__namespace.createSerializer( - Mappers, - /* isXml */ - true - ); - var uploadOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobUploadHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobUploadExceptionHeaders - } - }, - requestBody: body1, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - contentLength, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - tier, - blobTagsString, - legalHold1, - transactionalContentMD5, - transactionalContentCrc64, - contentType1, - accept2, - blobType2 - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "binary", - serializer: xmlSerializer - }; - var putBlobFromUrlOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobPutBlobFromUrlHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - encryptionScope, - tier, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceIfTags, - copySource, - blobTagsString, - sourceContentMD5, - copySourceAuthorization, - copySourceTags, - transactionalContentMD5, - blobType2, - copySourceBlobProperties - ], - isXML: true, - serializer: xmlSerializer - }; - var stageBlockOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobStageBlockHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobStageBlockExceptionHeaders - } - }, - requestBody: body1, - queryParameters: [ - timeoutInSeconds, - comp24, - blockId - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - contentLength, - leaseId, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - encryptionScope, - transactionalContentMD5, - transactionalContentCrc64, - contentType1, - accept2 - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "binary", - serializer: xmlSerializer - }; - var stageBlockFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobStageBlockFromURLHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobStageBlockFromURLExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - comp24, - blockId - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - leaseId, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - encryptionScope, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceContentMD5, - copySourceAuthorization, - sourceUrl, - sourceContentCrc64, - sourceRange1 - ], - isXML: true, - serializer: xmlSerializer - }; - var commitBlockListOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobCommitBlockListHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobCommitBlockListExceptionHeaders - } - }, - requestBody: blocks, - queryParameters: [timeoutInSeconds, comp25], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version, - requestId, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - tier, - blobTagsString, - legalHold1, - transactionalContentMD5, - transactionalContentCrc64 - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer - }; - var getBlockListOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: BlockList, - headersMapper: BlockBlobGetBlockListHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobGetBlockListExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - comp25, - listType - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifTags - ], - isXML: true, - serializer: xmlSerializer - }; - var StorageClient$1 = class StorageClient extends coreHttpCompat__namespace.ExtendedServiceClient { /** - * Initializes a new instance of the StorageClient class. - * @param url The URL of the service account, container, or blob that is the target of the desired - * operation. - * @param options The parameter options + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly */ - constructor(url2, options) { - var _a4, _b; - if (url2 === void 0) { - throw new Error("'url' cannot be null"); - } - if (!options) { - options = {}; - } - const defaults = { - requestContentType: "application/json; charset=utf-8" - }; - const packageDetails = `azsdk-js-azure-storage-blob/12.27.0`; - const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` : `${packageDetails}`; - const optionsWithDefaults = Object.assign(Object.assign(Object.assign({}, defaults), options), { userAgentOptions: { - userAgentPrefix - }, endpoint: (_b = (_a4 = options.endpoint) !== null && _a4 !== void 0 ? _a4 : options.baseUri) !== null && _b !== void 0 ? _b : "{url}" }); - super(optionsWithDefaults); - this.url = url2; - this.version = options.version || "2025-05-05"; - this.service = new ServiceImpl(this); - this.container = new ContainerImpl(this); - this.blob = new BlobImpl(this); - this.pageBlob = new PageBlobImpl(this); - this.appendBlob = new AppendBlobImpl(this); - this.blockBlob = new BlockBlobImpl(this); - } - }; - var StorageContextClient = class extends StorageClient$1 { - async sendOperationRequest(operationArguments, operationSpec) { - const operationSpecToSend = Object.assign({}, operationSpec); - if (operationSpecToSend.path === "/{containerName}" || operationSpecToSend.path === "/{containerName}/{blob}") { - operationSpecToSend.path = ""; - } - return super.sendOperationRequest(operationArguments, operationSpecToSend); + get requestId() { + return this.originalResponse.requestId; } - }; - var StorageClient = class { /** - * Creates an instance of StorageClient. - * @param url - url to resource - * @param pipeline - request policy pipeline. + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly */ - constructor(url2, pipeline) { - this.url = escapeURLPath(url2); - this.accountName = getAccountNameFromUrl(url2); - this.pipeline = pipeline; - this.storageClientContext = new StorageContextClient(this.url, getCoreClientOptions(pipeline)); - this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); - this.credential = getCredentialFromPipeline(pipeline); - const storageClientContext = this.storageClientContext; - storageClientContext.requestContentType = void 0; - } - }; - var tracingClient = coreTracing.createTracingClient({ - packageName: "@azure/storage-blob", - packageVersion: SDK_VERSION, - namespace: "Microsoft.Storage" - }); - var BlobSASPermissions = class _BlobSASPermissions { - constructor() { - this.read = false; - this.add = false; - this.create = false; - this.write = false; - this.delete = false; - this.deleteVersion = false; - this.tag = false; - this.move = false; - this.execute = false; - this.setImmutabilityPolicy = false; - this.permanentDelete = false; + get clientRequestId() { + return this.originalResponse.clientRequestId; } /** - * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an - * Error if it encounters a character that does not correspond to a valid permission. + * Indicates the version of the Blob service used + * to execute the request. * - * @param permissions - + * @readonly */ - static parse(permissions) { - const blobSASPermissions = new _BlobSASPermissions(); - for (const char of permissions) { - switch (char) { - case "r": - blobSASPermissions.read = true; - break; - case "a": - blobSASPermissions.add = true; - break; - case "c": - blobSASPermissions.create = true; - break; - case "w": - blobSASPermissions.write = true; - break; - case "d": - blobSASPermissions.delete = true; - break; - case "x": - blobSASPermissions.deleteVersion = true; - break; - case "t": - blobSASPermissions.tag = true; - break; - case "m": - blobSASPermissions.move = true; - break; - case "e": - blobSASPermissions.execute = true; - break; - case "i": - blobSASPermissions.setImmutabilityPolicy = true; - break; - case "y": - blobSASPermissions.permanentDelete = true; - break; - default: - throw new RangeError(`Invalid permission: ${char}`); - } - } - return blobSASPermissions; + get version() { + return this.originalResponse.version; } /** - * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it - * and boolean values for them. + * Indicates the versionId of the downloaded blob version. * - * @param permissionLike - + * @readonly */ - static from(permissionLike) { - const blobSASPermissions = new _BlobSASPermissions(); - if (permissionLike.read) { - blobSASPermissions.read = true; - } - if (permissionLike.add) { - blobSASPermissions.add = true; - } - if (permissionLike.create) { - blobSASPermissions.create = true; - } - if (permissionLike.write) { - blobSASPermissions.write = true; - } - if (permissionLike.delete) { - blobSASPermissions.delete = true; - } - if (permissionLike.deleteVersion) { - blobSASPermissions.deleteVersion = true; - } - if (permissionLike.tag) { - blobSASPermissions.tag = true; - } - if (permissionLike.move) { - blobSASPermissions.move = true; - } - if (permissionLike.execute) { - blobSASPermissions.execute = true; - } - if (permissionLike.setImmutabilityPolicy) { - blobSASPermissions.setImmutabilityPolicy = true; - } - if (permissionLike.permanentDelete) { - blobSASPermissions.permanentDelete = true; - } - return blobSASPermissions; + get versionId() { + return this.originalResponse.versionId; } /** - * Converts the given permissions to a string. Using this method will guarantee the permissions are in an - * order accepted by the service. + * Indicates whether version of this blob is a current version. * - * @returns A string which represents the BlobSASPermissions + * @readonly */ - toString() { - const permissions = []; - if (this.read) { - permissions.push("r"); - } - if (this.add) { - permissions.push("a"); - } - if (this.create) { - permissions.push("c"); - } - if (this.write) { - permissions.push("w"); - } - if (this.delete) { - permissions.push("d"); - } - if (this.deleteVersion) { - permissions.push("x"); - } - if (this.tag) { - permissions.push("t"); - } - if (this.move) { - permissions.push("m"); - } - if (this.execute) { - permissions.push("e"); - } - if (this.setImmutabilityPolicy) { - permissions.push("i"); - } - if (this.permanentDelete) { - permissions.push("y"); - } - return permissions.join(""); - } - }; - var ContainerSASPermissions = class _ContainerSASPermissions { - constructor() { - this.read = false; - this.add = false; - this.create = false; - this.write = false; - this.delete = false; - this.deleteVersion = false; - this.list = false; - this.tag = false; - this.move = false; - this.execute = false; - this.setImmutabilityPolicy = false; - this.permanentDelete = false; - this.filterByTags = false; + get isCurrentVersion() { + return this.originalResponse.isCurrentVersion; } /** - * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an - * Error if it encounters a character that does not correspond to a valid permission. + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. * - * @param permissions - + * @readonly */ - static parse(permissions) { - const containerSASPermissions = new _ContainerSASPermissions(); - for (const char of permissions) { - switch (char) { - case "r": - containerSASPermissions.read = true; - break; - case "a": - containerSASPermissions.add = true; - break; - case "c": - containerSASPermissions.create = true; - break; - case "w": - containerSASPermissions.write = true; - break; - case "d": - containerSASPermissions.delete = true; - break; - case "l": - containerSASPermissions.list = true; - break; - case "t": - containerSASPermissions.tag = true; - break; - case "x": - containerSASPermissions.deleteVersion = true; - break; - case "m": - containerSASPermissions.move = true; - break; - case "e": - containerSASPermissions.execute = true; - break; - case "i": - containerSASPermissions.setImmutabilityPolicy = true; - break; - case "y": - containerSASPermissions.permanentDelete = true; - break; - case "f": - containerSASPermissions.filterByTags = true; - break; - default: - throw new RangeError(`Invalid permission ${char}`); - } - } - return containerSASPermissions; + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; } /** - * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it - * and boolean values for them. - * - * @param permissionLike - + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) */ - static from(permissionLike) { - const containerSASPermissions = new _ContainerSASPermissions(); - if (permissionLike.read) { - containerSASPermissions.read = true; - } - if (permissionLike.add) { - containerSASPermissions.add = true; - } - if (permissionLike.create) { - containerSASPermissions.create = true; - } - if (permissionLike.write) { - containerSASPermissions.write = true; - } - if (permissionLike.delete) { - containerSASPermissions.delete = true; - } - if (permissionLike.list) { - containerSASPermissions.list = true; - } - if (permissionLike.deleteVersion) { - containerSASPermissions.deleteVersion = true; - } - if (permissionLike.tag) { - containerSASPermissions.tag = true; - } - if (permissionLike.move) { - containerSASPermissions.move = true; - } - if (permissionLike.execute) { - containerSASPermissions.execute = true; - } - if (permissionLike.setImmutabilityPolicy) { - containerSASPermissions.setImmutabilityPolicy = true; - } - if (permissionLike.permanentDelete) { - containerSASPermissions.permanentDelete = true; - } - if (permissionLike.filterByTags) { - containerSASPermissions.filterByTags = true; - } - return containerSASPermissions; + get contentCrc64() { + return this.originalResponse.contentCrc64; } /** - * Converts the given permissions to a string. Using this method will guarantee the permissions are in an - * order accepted by the service. - * - * The order of the characters should be as specified here to ensure correctness. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * Object Replication Policy Id of the destination blob. * + * @readonly */ - toString() { - const permissions = []; - if (this.read) { - permissions.push("r"); - } - if (this.add) { - permissions.push("a"); - } - if (this.create) { - permissions.push("c"); - } - if (this.write) { - permissions.push("w"); - } - if (this.delete) { - permissions.push("d"); - } - if (this.deleteVersion) { - permissions.push("x"); - } - if (this.list) { - permissions.push("l"); - } - if (this.tag) { - permissions.push("t"); - } - if (this.move) { - permissions.push("m"); - } - if (this.execute) { - permissions.push("e"); - } - if (this.setImmutabilityPolicy) { - permissions.push("i"); - } - if (this.permanentDelete) { - permissions.push("y"); - } - if (this.filterByTags) { - permissions.push("f"); - } - return permissions.join(""); + get objectReplicationDestinationPolicyId() { + return this.originalResponse.objectReplicationDestinationPolicyId; } - }; - var UserDelegationKeyCredential = class { /** - * Creates an instance of UserDelegationKeyCredential. - * @param accountName - - * @param userDelegationKey - + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + * + * @readonly */ - constructor(accountName, userDelegationKey) { - this.accountName = accountName; - this.userDelegationKey = userDelegationKey; - this.key = Buffer.from(userDelegationKey.value, "base64"); + get objectReplicationSourceProperties() { + return this.originalResponse.objectReplicationSourceProperties; } /** - * Generates a hash signature for an HTTP request or for a SAS. + * If this blob has been sealed. * - * @param stringToSign - + * @readonly */ - computeHMACSHA256(stringToSign) { - return crypto2.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + get isSealed() { + return this.originalResponse.isSealed; } - }; - function ipRangeToString(ipRange) { - return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; - } - exports2.SASProtocol = void 0; - (function(SASProtocol) { - SASProtocol["Https"] = "https"; - SASProtocol["HttpsAndHttp"] = "https,http"; - })(exports2.SASProtocol || (exports2.SASProtocol = {})); - var SASQueryParameters = class { /** - * Optional. IP range allowed for this SAS. + * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. * * @readonly */ - get ipRange() { - if (this.ipRangeInner) { - return { - end: this.ipRangeInner.end, - start: this.ipRangeInner.start - }; - } - return void 0; - } - constructor(version2, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn2, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType2, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope2) { - this.version = version2; - this.signature = signature; - if (permissionsOrOptions !== void 0 && typeof permissionsOrOptions !== "string") { - this.permissions = permissionsOrOptions.permissions; - this.services = permissionsOrOptions.services; - this.resourceTypes = permissionsOrOptions.resourceTypes; - this.protocol = permissionsOrOptions.protocol; - this.startsOn = permissionsOrOptions.startsOn; - this.expiresOn = permissionsOrOptions.expiresOn; - this.ipRangeInner = permissionsOrOptions.ipRange; - this.identifier = permissionsOrOptions.identifier; - this.encryptionScope = permissionsOrOptions.encryptionScope; - this.resource = permissionsOrOptions.resource; - this.cacheControl = permissionsOrOptions.cacheControl; - this.contentDisposition = permissionsOrOptions.contentDisposition; - this.contentEncoding = permissionsOrOptions.contentEncoding; - this.contentLanguage = permissionsOrOptions.contentLanguage; - this.contentType = permissionsOrOptions.contentType; - if (permissionsOrOptions.userDelegationKey) { - this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; - this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; - this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; - this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; - this.signedService = permissionsOrOptions.userDelegationKey.signedService; - this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; - this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; - this.correlationId = permissionsOrOptions.correlationId; - } - } else { - this.services = services; - this.resourceTypes = resourceTypes; - this.expiresOn = expiresOn2; - this.permissions = permissionsOrOptions; - this.protocol = protocol; - this.startsOn = startsOn; - this.ipRangeInner = ipRange; - this.encryptionScope = encryptionScope2; - this.identifier = identifier; - this.resource = resource; - this.cacheControl = cacheControl; - this.contentDisposition = contentDisposition; - this.contentEncoding = contentEncoding; - this.contentLanguage = contentLanguage; - this.contentType = contentType2; - if (userDelegationKey) { - this.signedOid = userDelegationKey.signedObjectId; - this.signedTenantId = userDelegationKey.signedTenantId; - this.signedStartsOn = userDelegationKey.signedStartsOn; - this.signedExpiresOn = userDelegationKey.signedExpiresOn; - this.signedService = userDelegationKey.signedService; - this.signedVersion = userDelegationKey.signedVersion; - this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; - this.correlationId = correlationId; - } - } + get immutabilityPolicyExpiresOn() { + return this.originalResponse.immutabilityPolicyExpiresOn; } /** - * Encodes all SAS query parameters into a string that can be appended to a URL. + * Indicates immutability policy mode. * + * @readonly */ - toString() { - const params = [ - "sv", - "ss", - "srt", - "spr", - "st", - "se", - "sip", - "si", - "ses", - "skoid", - // Signed object ID - "sktid", - // Signed tenant ID - "skt", - // Signed key start time - "ske", - // Signed key expiry time - "sks", - // Signed key service - "skv", - // Signed key version - "sr", - "sp", - "sig", - "rscc", - "rscd", - "rsce", - "rscl", - "rsct", - "saoid", - "scid" - ]; - const queries = []; - for (const param of params) { - switch (param) { - case "sv": - this.tryAppendQueryParameter(queries, param, this.version); - break; - case "ss": - this.tryAppendQueryParameter(queries, param, this.services); - break; - case "srt": - this.tryAppendQueryParameter(queries, param, this.resourceTypes); - break; - case "spr": - this.tryAppendQueryParameter(queries, param, this.protocol); - break; - case "st": - this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : void 0); - break; - case "se": - this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : void 0); - break; - case "sip": - this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : void 0); - break; - case "si": - this.tryAppendQueryParameter(queries, param, this.identifier); - break; - case "ses": - this.tryAppendQueryParameter(queries, param, this.encryptionScope); - break; - case "skoid": - this.tryAppendQueryParameter(queries, param, this.signedOid); - break; - case "sktid": - this.tryAppendQueryParameter(queries, param, this.signedTenantId); - break; - case "skt": - this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : void 0); - break; - case "ske": - this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : void 0); - break; - case "sks": - this.tryAppendQueryParameter(queries, param, this.signedService); - break; - case "skv": - this.tryAppendQueryParameter(queries, param, this.signedVersion); - break; - case "sr": - this.tryAppendQueryParameter(queries, param, this.resource); - break; - case "sp": - this.tryAppendQueryParameter(queries, param, this.permissions); - break; - case "sig": - this.tryAppendQueryParameter(queries, param, this.signature); - break; - case "rscc": - this.tryAppendQueryParameter(queries, param, this.cacheControl); - break; - case "rscd": - this.tryAppendQueryParameter(queries, param, this.contentDisposition); - break; - case "rsce": - this.tryAppendQueryParameter(queries, param, this.contentEncoding); - break; - case "rscl": - this.tryAppendQueryParameter(queries, param, this.contentLanguage); - break; - case "rsct": - this.tryAppendQueryParameter(queries, param, this.contentType); - break; - case "saoid": - this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); - break; - case "scid": - this.tryAppendQueryParameter(queries, param, this.correlationId); - break; - } - } - return queries.join("&"); + get immutabilityPolicyMode() { + return this.originalResponse.immutabilityPolicyMode; } /** - * A private helper method used to filter and append query key/value pairs into an array. + * Indicates if a legal hold is present on the blob. * - * @param queries - - * @param key - - * @param value - + * @readonly */ - tryAppendQueryParameter(queries, key, value) { - if (!value) { - return; - } - key = encodeURIComponent(key); - value = encodeURIComponent(value); - if (key.length > 0 && value.length > 0) { - queries.push(`${key}=${value}`); - } - } - }; - function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { - return generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName).sasQueryParameters; - } - function generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { - const version2 = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; - const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential ? sharedKeyCredentialOrUserDelegationKey : void 0; - let userDelegationKeyCredential; - if (sharedKeyCredential === void 0 && accountName !== void 0) { - userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); - } - if (sharedKeyCredential === void 0 && userDelegationKeyCredential === void 0) { - throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); - } - if (version2 >= "2020-12-06") { - if (sharedKeyCredential !== void 0) { - return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); - } else { - return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); - } - } - if (version2 >= "2018-11-09") { - if (sharedKeyCredential !== void 0) { - return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); - } else { - if (version2 >= "2020-02-10") { - return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); - } else { - return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); - } - } - } - if (version2 >= "2015-04-05") { - if (sharedKeyCredential !== void 0) { - return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); - } else { - throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); - } - } - throw new RangeError("'version' must be >= '2015-04-05'."); - } - function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + get legalHold() { + return this.originalResponse.legalHold; } - let resource = "c"; - if (blobSASSignatureValues.blobName) { - resource = "b"; + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get contentAsBlob() { + return this.originalResponse.blobBody; } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will automatically retry when internal read stream unexpected ends. + * + * @readonly + */ + get readableStreamBody() { + return core_util_1.isNodeLike ? this.blobDownloadStream : void 0; } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), - stringToSign - }; - } - function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; - } + originalResponse; + blobDownloadStream; + /** + * Creates an instance of BlobDownloadResponse. + * + * @param originalResponse - + * @param getter - + * @param offset - + * @param count - + * @param options - + */ + constructor(originalResponse, getter, offset, count, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new RetriableReadableStream_js_1.RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + }; + exports2.BlobDownloadResponse = BlobDownloadResponse; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroConstants.js +var require_AvroConstants = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroConstants.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AVRO_SCHEMA_KEY = exports2.AVRO_CODEC_KEY = exports2.AVRO_INIT_BYTES = exports2.AVRO_SYNC_MARKER_SIZE = void 0; + exports2.AVRO_SYNC_MARKER_SIZE = 16; + exports2.AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); + exports2.AVRO_CODEC_KEY = "avro.codec"; + exports2.AVRO_SCHEMA_KEY = "avro.schema"; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroParser.js +var require_AvroParser = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroParser.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AvroType = exports2.AvroParser = void 0; + var AvroParser = class _AvroParser { + /** + * Reads a fixed number of bytes from the stream. + * + * @param stream - + * @param length - + * @param options - + */ + static async readFixedBytes(stream, length, options = {}) { + const bytes = await stream.read(length, { abortSignal: options.abortSignal }); + if (bytes.length !== length) { + throw new Error("Hit stream end."); } + return bytes; } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), - stringToSign - }; - } - function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + /** + * Reads a single byte from the stream. + * + * @param stream - + * @param options - + */ + static async readByte(stream, options = {}) { + const buf = await _AvroParser.readFixedBytes(stream, 1, options); + return buf[0]; } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; + // int and long are stored in variable-length zig-zag coding. + // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt + // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types + static async readZigZagLong(stream, options = {}) { + let zigZagEncoded = 0; + let significanceInBit = 0; + let byte, haveMoreByte, significanceInFloat; + do { + byte = await _AvroParser.readByte(stream, options); + haveMoreByte = byte & 128; + zigZagEncoded |= (byte & 127) << significanceInBit; + significanceInBit += 7; + } while (haveMoreByte && significanceInBit < 28); + if (haveMoreByte) { + zigZagEncoded = zigZagEncoded; + significanceInFloat = 268435456; + do { + byte = await _AvroParser.readByte(stream, options); + zigZagEncoded += (byte & 127) * significanceInFloat; + significanceInFloat *= 128; + } while (byte & 128); + const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; + if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { + throw new Error("Integer overflow."); + } + return res; } + return zigZagEncoded >> 1 ^ -(zigZagEncoded & 1); } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } + static async readLong(stream, options = {}) { + return _AvroParser.readZigZagLong(stream, options); } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.encryptionScope, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, void 0, void 0, void 0, blobSASSignatureValues.encryptionScope), - stringToSign - }; - } - function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + static async readInt(stream, options = {}) { + return _AvroParser.readZigZagLong(stream, options); } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; - } + static async readNull() { + return null; } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + static async readBoolean(stream, options = {}) { + const b = await _AvroParser.readByte(stream, options); + if (b === 1) { + return true; + } else if (b === 0) { + return false; } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + throw new Error("Byte was not a boolean."); } } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey), - stringToSign - }; - } - function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + static async readFloat(stream, options = {}) { + const u8arr = await _AvroParser.readFixedBytes(stream, 4, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat32(0, true); } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; - } + static async readDouble(stream, options = {}) { + const u8arr = await _AvroParser.readFixedBytes(stream, 8, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat64(0, true); } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + static async readBytes(stream, options = {}) { + const size = await _AvroParser.readLong(stream, options); + if (size < 0) { + throw new Error("Bytes size was negative."); } + return stream.read(size, { abortSignal: options.abortSignal }); } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.preauthorizedAgentObjectId, - void 0, - // agentObjectId - blobSASSignatureValues.correlationId, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId), - stringToSign - }; - } - function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + static async readString(stream, options = {}) { + const u8arr = await _AvroParser.readBytes(stream, options); + const utf8decoder = new TextDecoder(); + return utf8decoder.decode(u8arr); } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; + static async readMapPair(stream, readItemMethod, options = {}) { + const key = await _AvroParser.readString(stream, options); + const value = await readItemMethod(stream, options); + return { key, value }; + } + static async readMap(stream, readItemMethod, options = {}) { + const readPairMethod = (s, opts = {}) => { + return _AvroParser.readMapPair(s, readItemMethod, opts); + }; + const pairs = await _AvroParser.readArray(stream, readPairMethod, options); + const dict = {}; + for (const pair of pairs) { + dict[pair.key] = pair.value; } + return dict; } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + static async readArray(stream, readItemMethod, options = {}) { + const items = []; + for (let count = await _AvroParser.readLong(stream, options); count !== 0; count = await _AvroParser.readLong(stream, options)) { + if (count < 0) { + await _AvroParser.readLong(stream, options); + count = -count; + } + while (count--) { + const item = await readItemMethod(stream, options); + items.push(item); + } + } + return items; + } + }; + exports2.AvroParser = AvroParser; + var AvroComplex; + (function(AvroComplex2) { + AvroComplex2["RECORD"] = "record"; + AvroComplex2["ENUM"] = "enum"; + AvroComplex2["ARRAY"] = "array"; + AvroComplex2["MAP"] = "map"; + AvroComplex2["UNION"] = "union"; + AvroComplex2["FIXED"] = "fixed"; + })(AvroComplex || (AvroComplex = {})); + var AvroPrimitive; + (function(AvroPrimitive2) { + AvroPrimitive2["NULL"] = "null"; + AvroPrimitive2["BOOLEAN"] = "boolean"; + AvroPrimitive2["INT"] = "int"; + AvroPrimitive2["LONG"] = "long"; + AvroPrimitive2["FLOAT"] = "float"; + AvroPrimitive2["DOUBLE"] = "double"; + AvroPrimitive2["BYTES"] = "bytes"; + AvroPrimitive2["STRING"] = "string"; + })(AvroPrimitive || (AvroPrimitive = {})); + var AvroType = class _AvroType { + /** + * Determines the AvroType from the Avro Schema. + */ + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + static fromSchema(schema) { + if (typeof schema === "string") { + return _AvroType.fromStringSchema(schema); + } else if (Array.isArray(schema)) { + return _AvroType.fromArraySchema(schema); } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + return _AvroType.fromObjectSchema(schema); } } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.preauthorizedAgentObjectId, - void 0, - // agentObjectId - blobSASSignatureValues.correlationId, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.encryptionScope, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope), - stringToSign - }; - } - function getCanonicalName(accountName, containerName, blobName) { - const elements = [`/blob/${accountName}/${containerName}`]; - if (blobName) { - elements.push(`/${blobName}`); + static fromStringSchema(schema) { + switch (schema) { + case AvroPrimitive.NULL: + case AvroPrimitive.BOOLEAN: + case AvroPrimitive.INT: + case AvroPrimitive.LONG: + case AvroPrimitive.FLOAT: + case AvroPrimitive.DOUBLE: + case AvroPrimitive.BYTES: + case AvroPrimitive.STRING: + return new AvroPrimitiveType(schema); + default: + throw new Error(`Unexpected Avro type ${schema}`); + } } - return elements.join(""); - } - function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { - const version2 = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; - if (blobSASSignatureValues.snapshotTime && version2 < "2018-11-09") { - throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); + static fromArraySchema(schema) { + return new AvroUnionType(schema.map(_AvroType.fromSchema)); } - if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.snapshotTime) { - throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); + static fromObjectSchema(schema) { + const type = schema.type; + try { + return _AvroType.fromStringSchema(type); + } catch { + } + switch (type) { + case AvroComplex.RECORD: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.name) { + throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); + } + const fields = {}; + if (!schema.fields) { + throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); + } + for (const field of schema.fields) { + fields[field.name] = _AvroType.fromSchema(field.type); + } + return new AvroRecordType(fields, schema.name); + case AvroComplex.ENUM: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.symbols) { + throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + } + return new AvroEnumType(schema.symbols); + case AvroComplex.MAP: + if (!schema.values) { + throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + } + return new AvroMapType(_AvroType.fromSchema(schema.values)); + case AvroComplex.ARRAY: + // Unused today + case AvroComplex.FIXED: + // Unused today + default: + throw new Error(`Unexpected Avro type ${type} in ${schema}`); + } } - if (blobSASSignatureValues.versionId && version2 < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); + }; + exports2.AvroType = AvroType; + var AvroPrimitiveType = class extends AvroType { + _primitive; + constructor(primitive) { + super(); + this._primitive = primitive; } - if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.versionId) { - throw RangeError("Must provide 'blobName' when providing 'versionId'."); + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + read(stream, options = {}) { + switch (this._primitive) { + case AvroPrimitive.NULL: + return AvroParser.readNull(); + case AvroPrimitive.BOOLEAN: + return AvroParser.readBoolean(stream, options); + case AvroPrimitive.INT: + return AvroParser.readInt(stream, options); + case AvroPrimitive.LONG: + return AvroParser.readLong(stream, options); + case AvroPrimitive.FLOAT: + return AvroParser.readFloat(stream, options); + case AvroPrimitive.DOUBLE: + return AvroParser.readDouble(stream, options); + case AvroPrimitive.BYTES: + return AvroParser.readBytes(stream, options); + case AvroPrimitive.STRING: + return AvroParser.readString(stream, options); + default: + throw new Error("Unknown Avro Primitive"); + } } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.setImmutabilityPolicy && version2 < "2020-08-04") { - throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + }; + var AvroEnumType = class extends AvroType { + _symbols; + constructor(symbols) { + super(); + this._symbols = symbols; } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.deleteVersion && version2 < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + async read(stream, options = {}) { + const value = await AvroParser.readInt(stream, options); + return this._symbols[value]; } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.permanentDelete && version2 < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + }; + var AvroUnionType = class extends AvroType { + _types; + constructor(types) { + super(); + this._types = types; } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.tag && version2 < "2019-12-12") { - throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); + async read(stream, options = {}) { + const typeIndex = await AvroParser.readInt(stream, options); + return this._types[typeIndex].read(stream, options); } - if (version2 < "2020-02-10" && blobSASSignatureValues.permissions && (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { - throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); + }; + var AvroMapType = class extends AvroType { + _itemType; + constructor(itemType) { + super(); + this._itemType = itemType; } - if (version2 < "2021-04-10" && blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.filterByTags) { - throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + read(stream, options = {}) { + const readItemMethod = (s, opts) => { + return this._itemType.read(s, opts); + }; + return AvroParser.readMap(stream, readItemMethod, options); } - if (version2 < "2020-02-10" && (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { - throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); + }; + var AvroRecordType = class extends AvroType { + _name; + _fields; + constructor(fields, name) { + super(); + this._fields = fields; + this._name = name; } - if (blobSASSignatureValues.encryptionScope && version2 < "2020-12-06") { - throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + async read(stream, options = {}) { + const record = {}; + record["$schema"] = this._name; + for (const key in this._fields) { + if (Object.prototype.hasOwnProperty.call(this._fields, key)) { + record[key] = await this._fields[key].read(stream, options); + } + } + return record; } - blobSASSignatureValues.version = version2; - return blobSASSignatureValues; + }; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/utils/utils.common.js +var require_utils_common3 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/utils/utils.common.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.arraysEqual = arraysEqual; + function arraysEqual(a, b) { + if (a === b) + return true; + if (a == null || b == null) + return false; + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; ++i) { + if (a[i] !== b[i]) + return false; + } + return true; } - var BlobLeaseClient = class { - /** - * Gets the lease Id. - * - * @readonly - */ - get leaseId() { - return this._leaseId; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReader.js +var require_AvroReader = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReader.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AvroReader = void 0; + var AvroConstants_js_1 = require_AvroConstants(); + var AvroParser_js_1 = require_AvroParser(); + var utils_common_js_1 = require_utils_common3(); + var AvroReader = class { + _dataStream; + _headerStream; + _syncMarker; + _metadata; + _itemType; + _itemsRemainingInBlock; + // Remembers where we started if partial data stream was provided. + _initialBlockOffset; + /// The byte offset within the Avro file (both header and data) + /// of the start of the current block. + _blockOffset; + get blockOffset() { + return this._blockOffset; } - /** - * Gets the url. - * - * @readonly - */ - get url() { - return this._url; + _objectIndex; + get objectIndex() { + return this._objectIndex; } - /** - * Creates an instance of BlobLeaseClient. - * @param client - The client to make the lease operation requests. - * @param leaseId - Initial proposed lease id. - */ - constructor(client, leaseId2) { - const clientContext = client.storageClientContext; - this._url = client.url; - if (client.name === void 0) { - this._isContainer = true; - this._containerOrBlobOperation = clientContext.container; - } else { - this._isContainer = false; - this._containerOrBlobOperation = clientContext.blob; - } - if (!leaseId2) { - leaseId2 = coreUtil.randomUUID(); - } - this._leaseId = leaseId2; + _initialized; + constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { + this._dataStream = dataStream; + this._headerStream = headerStream || dataStream; + this._initialized = false; + this._blockOffset = currentBlockOffset || 0; + this._objectIndex = indexWithinCurrentBlock || 0; + this._initialBlockOffset = currentBlockOffset || 0; } - /** - * Establishes and manages a lock on a container for delete operations, or on a blob - * for write and delete operations. - * The lock duration can be 15 to 60 seconds, or can be infinite. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-blob - * - * @param duration - Must be between 15 to 60 seconds, or infinite (-1) - * @param options - option to configure lease management operations. - * @returns Response data for acquire lease operation. - */ - async acquireLease(duration2, options = {}) { - var _a4, _b, _c, _d, _e; - if (this._isContainer && (((_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + async initialize(options = {}) { + const header = await AvroParser_js_1.AvroParser.readFixedBytes(this._headerStream, AvroConstants_js_1.AVRO_INIT_BYTES.length, { + abortSignal: options.abortSignal + }); + if (!(0, utils_common_js_1.arraysEqual)(header, AvroConstants_js_1.AVRO_INIT_BYTES)) { + throw new Error("Stream is not an Avro file."); } - return tracingClient.withSpan("BlobLeaseClient-acquireLease", options, async (updatedOptions) => { - var _a5; - return assertResponse(await this._containerOrBlobOperation.acquireLease({ - abortSignal: options.abortSignal, - duration: duration2, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a5 = options.conditions) === null || _a5 === void 0 ? void 0 : _a5.tagConditions }), - proposedLeaseId: this._leaseId, - tracingOptions: updatedOptions.tracingOptions - })); + this._metadata = await AvroParser_js_1.AvroParser.readMap(this._headerStream, AvroParser_js_1.AvroParser.readString, { + abortSignal: options.abortSignal }); - } - /** - * To change the ID of the lease. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-blob - * - * @param proposedLeaseId - the proposed new lease Id. - * @param options - option to configure lease management operations. - * @returns Response data for change lease operation. - */ - async changeLease(proposedLeaseId2, options = {}) { - var _a4, _b, _c, _d, _e; - if (this._isContainer && (((_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + const codec = this._metadata[AvroConstants_js_1.AVRO_CODEC_KEY]; + if (!(codec === void 0 || codec === null || codec === "null")) { + throw new Error("Codecs are not supported"); } - return tracingClient.withSpan("BlobLeaseClient-changeLease", options, async (updatedOptions) => { - var _a5; - const response = assertResponse(await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId2, { - abortSignal: options.abortSignal, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a5 = options.conditions) === null || _a5 === void 0 ? void 0 : _a5.tagConditions }), - tracingOptions: updatedOptions.tracingOptions - })); - this._leaseId = proposedLeaseId2; - return response; + this._syncMarker = await AvroParser_js_1.AvroParser.readFixedBytes(this._headerStream, AvroConstants_js_1.AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal }); - } - /** - * To free the lease if it is no longer needed so that another client may - * immediately acquire a lease against the container or the blob. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-blob - * - * @param options - option to configure lease management operations. - * @returns Response data for release lease operation. - */ - async releaseLease(options = {}) { - var _a4, _b, _c, _d, _e; - if (this._isContainer && (((_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + const schema = JSON.parse(this._metadata[AvroConstants_js_1.AVRO_SCHEMA_KEY]); + this._itemType = AvroParser_js_1.AvroType.fromSchema(schema); + if (this._blockOffset === 0) { + this._blockOffset = this._initialBlockOffset + this._dataStream.position; } - return tracingClient.withSpan("BlobLeaseClient-releaseLease", options, async (updatedOptions) => { - var _a5; - return assertResponse(await this._containerOrBlobOperation.releaseLease(this._leaseId, { - abortSignal: options.abortSignal, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a5 = options.conditions) === null || _a5 === void 0 ? void 0 : _a5.tagConditions }), - tracingOptions: updatedOptions.tracingOptions - })); + this._itemsRemainingInBlock = await AvroParser_js_1.AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal }); + await AvroParser_js_1.AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + this._initialized = true; + if (this._objectIndex && this._objectIndex > 0) { + for (let i = 0; i < this._objectIndex; i++) { + await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); + this._itemsRemainingInBlock--; + } + } } - /** - * To renew the lease. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-blob - * - * @param options - Optional option to configure lease management operations. - * @returns Response data for renew lease operation. - */ - async renewLease(options = {}) { - var _a4, _b, _c, _d, _e; - if (this._isContainer && (((_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + hasNext() { + return !this._initialized || this._itemsRemainingInBlock > 0; + } + async *parseObjects(options = {}) { + if (!this._initialized) { + await this.initialize(options); } - return tracingClient.withSpan("BlobLeaseClient-renewLease", options, async (updatedOptions) => { - var _a5; - return this._containerOrBlobOperation.renewLease(this._leaseId, { - abortSignal: options.abortSignal, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a5 = options.conditions) === null || _a5 === void 0 ? void 0 : _a5.tagConditions }), - tracingOptions: updatedOptions.tracingOptions + while (this.hasNext()) { + const result2 = await this._itemType.read(this._dataStream, { + abortSignal: options.abortSignal }); - }); + this._itemsRemainingInBlock--; + this._objectIndex++; + if (this._itemsRemainingInBlock === 0) { + const marker = await AvroParser_js_1.AvroParser.readFixedBytes(this._dataStream, AvroConstants_js_1.AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal + }); + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + this._objectIndex = 0; + if (!(0, utils_common_js_1.arraysEqual)(this._syncMarker, marker)) { + throw new Error("Stream is not a valid Avro file."); + } + try { + this._itemsRemainingInBlock = await AvroParser_js_1.AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal + }); + } catch { + this._itemsRemainingInBlock = 0; + } + if (this._itemsRemainingInBlock > 0) { + await AvroParser_js_1.AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + } + } + yield result2; + } } - /** - * To end the lease but ensure that another client cannot acquire a new lease - * until the current lease period has expired. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-blob - * - * @param breakPeriod - Break period - * @param options - Optional options to configure lease management operations. - * @returns Response data for break lease operation. - */ - async breakLease(breakPeriod2, options = {}) { - var _a4, _b, _c, _d, _e; - if (this._isContainer && (((_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + }; + exports2.AvroReader = AvroReader; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReadable.js +var require_AvroReadable = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReadable.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AvroReadable = void 0; + var AvroReadable = class { + }; + exports2.AvroReadable = AvroReadable; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReadableFromStream.js +var require_AvroReadableFromStream = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReadableFromStream.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AvroReadableFromStream = void 0; + var AvroReadable_js_1 = require_AvroReadable(); + var abort_controller_1 = require_commonjs3(); + var buffer_1 = require("buffer"); + var ABORT_ERROR = new abort_controller_1.AbortError("Reading from the avro stream was aborted."); + var AvroReadableFromStream = class extends AvroReadable_js_1.AvroReadable { + _position; + _readable; + toUint8Array(data2) { + if (typeof data2 === "string") { + return buffer_1.Buffer.from(data2); + } + return data2; + } + constructor(readable) { + super(); + this._readable = readable; + this._position = 0; + } + get position() { + return this._position; + } + async read(size, options = {}) { + if (options.abortSignal?.aborted) { + throw ABORT_ERROR; + } + if (size < 0) { + throw new Error(`size parameter should be positive: ${size}`); + } + if (size === 0) { + return new Uint8Array(); + } + if (!this._readable.readable) { + throw new Error("Stream no longer readable."); + } + const chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + return this.toUint8Array(chunk); + } else { + return new Promise((resolve, reject) => { + const cleanUp = () => { + this._readable.removeListener("readable", readableCallback); + this._readable.removeListener("error", rejectCallback); + this._readable.removeListener("end", rejectCallback); + this._readable.removeListener("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + }; + const readableCallback = () => { + const callbackChunk = this._readable.read(size); + if (callbackChunk) { + this._position += callbackChunk.length; + cleanUp(); + resolve(this.toUint8Array(callbackChunk)); + } + }; + const rejectCallback = () => { + cleanUp(); + reject(); + }; + const abortHandler = () => { + cleanUp(); + reject(ABORT_ERROR); + }; + this._readable.on("readable", readableCallback); + this._readable.once("error", rejectCallback); + this._readable.once("end", rejectCallback); + this._readable.once("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + }); } - return tracingClient.withSpan("BlobLeaseClient-breakLease", options, async (updatedOptions) => { - var _a5; - const operationOptions = { - abortSignal: options.abortSignal, - breakPeriod: breakPeriod2, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a5 = options.conditions) === null || _a5 === void 0 ? void 0 : _a5.tagConditions }), - tracingOptions: updatedOptions.tracingOptions - }; - return assertResponse(await this._containerOrBlobOperation.breakLease(operationOptions)); - }); } }; - var RetriableReadableStream = class extends stream2.Readable { + exports2.AvroReadableFromStream = AvroReadableFromStream; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/index.js +var require_internal_avro = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AvroReadableFromStream = exports2.AvroReadable = exports2.AvroReader = void 0; + var AvroReader_js_1 = require_AvroReader(); + Object.defineProperty(exports2, "AvroReader", { enumerable: true, get: function() { + return AvroReader_js_1.AvroReader; + } }); + var AvroReadable_js_1 = require_AvroReadable(); + Object.defineProperty(exports2, "AvroReadable", { enumerable: true, get: function() { + return AvroReadable_js_1.AvroReadable; + } }); + var AvroReadableFromStream_js_1 = require_AvroReadableFromStream(); + Object.defineProperty(exports2, "AvroReadableFromStream", { enumerable: true, get: function() { + return AvroReadableFromStream_js_1.AvroReadableFromStream; + } }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/BlobQuickQueryStream.js +var require_BlobQuickQueryStream = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/BlobQuickQueryStream.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobQuickQueryStream = void 0; + var node_stream_1 = require("node:stream"); + var index_js_1 = require_internal_avro(); + var BlobQuickQueryStream = class extends node_stream_1.Readable { + source; + avroReader; + avroIter; + avroPaused = true; + onProgress; + onError; /** - * Creates an instance of RetriableReadableStream. + * Creates an instance of BlobQuickQueryStream. * * @param source - The current ReadableStream returned from getter - * @param getter - A method calling downloading request returning - * a new ReadableStream from specified offset - * @param offset - Offset position in original data source to read - * @param count - How much data in original data source to read * @param options - */ - constructor(source, getter, offset, count, options = {}) { - super({ highWaterMark: options.highWaterMark }); - this.retries = 0; - this.sourceDataHandler = (data2) => { - if (this.options.doInjectErrorOnce) { - this.options.doInjectErrorOnce = void 0; - this.source.pause(); - this.sourceErrorOrEndHandler(); - this.source.destroy(); - return; - } - this.offset += data2.length; - if (this.onProgress) { - this.onProgress({ loadedBytes: this.offset - this.start }); - } - if (!this.push(data2)) { - this.source.pause(); - } - }; - this.sourceAbortedHandler = () => { - const abortError = new abortController.AbortError("The operation was aborted."); - this.destroy(abortError); - }; - this.sourceErrorOrEndHandler = (err) => { - if (err && err.name === "AbortError") { - this.destroy(err); - return; - } - this.removeSourceEventHandlers(); - if (this.offset - 1 === this.end) { - this.push(null); - } else if (this.offset <= this.end) { - if (this.retries < this.maxRetryRequests) { - this.retries += 1; - this.getter(this.offset).then((newSource) => { - this.source = newSource; - this.setSourceEventHandlers(); - return; - }).catch((error2) => { - this.destroy(error2); - }); - } else { - this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); - } - } else { - this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); - } - }; - this.getter = getter; + constructor(source, options = {}) { + super(); this.source = source; - this.start = offset; - this.offset = offset; - this.end = offset + count - 1; - this.maxRetryRequests = options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; this.onProgress = options.onProgress; - this.options = options; - this.setSourceEventHandlers(); + this.onError = options.onError; + this.avroReader = new index_js_1.AvroReader(new index_js_1.AvroReadableFromStream(this.source)); + this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); } _read() { - this.source.resume(); - } - setSourceEventHandlers() { - this.source.on("data", this.sourceDataHandler); - this.source.on("end", this.sourceErrorOrEndHandler); - this.source.on("error", this.sourceErrorOrEndHandler); - this.source.on("aborted", this.sourceAbortedHandler); - } - removeSourceEventHandlers() { - this.source.removeListener("data", this.sourceDataHandler); - this.source.removeListener("end", this.sourceErrorOrEndHandler); - this.source.removeListener("error", this.sourceErrorOrEndHandler); - this.source.removeListener("aborted", this.sourceAbortedHandler); + if (this.avroPaused) { + this.readInternal().catch((err) => { + this.emit("error", err); + }); + } } - _destroy(error2, callback) { - this.removeSourceEventHandlers(); - this.source.destroy(); - callback(error2 === null ? void 0 : error2); + async readInternal() { + this.avroPaused = false; + let avroNext; + do { + avroNext = await this.avroIter.next(); + if (avroNext.done) { + break; + } + const obj = avroNext.value; + const schema = obj.$schema; + if (typeof schema !== "string") { + throw Error("Missing schema in avro record."); + } + switch (schema) { + case "com.microsoft.azure.storage.queryBlobContents.resultData": + { + const data2 = obj.data; + if (data2 instanceof Uint8Array === false) { + throw Error("Invalid data in avro result record."); + } + if (!this.push(Buffer.from(data2))) { + this.avroPaused = true; + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.progress": + { + const bytesScanned = obj.bytesScanned; + if (typeof bytesScanned !== "number") { + throw Error("Invalid bytesScanned in avro progress record."); + } + if (this.onProgress) { + this.onProgress({ loadedBytes: bytesScanned }); + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.end": + if (this.onProgress) { + const totalBytes = obj.totalBytes; + if (typeof totalBytes !== "number") { + throw Error("Invalid totalBytes in avro end record."); + } + this.onProgress({ loadedBytes: totalBytes }); + } + this.push(null); + break; + case "com.microsoft.azure.storage.queryBlobContents.error": + if (this.onError) { + const fatal = obj.fatal; + if (typeof fatal !== "boolean") { + throw Error("Invalid fatal in avro error record."); + } + const name = obj.name; + if (typeof name !== "string") { + throw Error("Invalid name in avro error record."); + } + const description = obj.description; + if (typeof description !== "string") { + throw Error("Invalid description in avro error record."); + } + const position = obj.position; + if (typeof position !== "number") { + throw Error("Invalid position in avro error record."); + } + this.onError({ + position, + name, + isFatal: fatal, + description + }); + } + break; + default: + throw Error(`Unknown schema ${schema} in avro progress record.`); + } + } while (!avroNext.done && !this.avroPaused); } }; - var BlobDownloadResponse = class { + exports2.BlobQuickQueryStream = BlobQuickQueryStream; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BlobQueryResponse.js +var require_BlobQueryResponse = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BlobQueryResponse.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobQueryResponse = void 0; + var core_util_1 = require_commonjs4(); + var BlobQuickQueryStream_js_1 = require_BlobQuickQueryStream(); + var BlobQueryResponse = class { /** * Indicates that the service supports * requests for partial file content. @@ -50900,7 +54162,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @readonly */ get copyCompletedOn() { - return this.originalResponse.copyCompletedOn; + return void 0; } /** * String identifier for the last attempted Copy @@ -51007,14 +54269,6 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; get etag() { return this.originalResponse.etag; } - /** - * The number of tags associated with the blob - * - * @readonly - */ - get tagCount() { - return this.originalResponse.tagCount; - } /** * The error code. * @@ -51057,23 +54311,6 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; get lastModified() { return this.originalResponse.lastModified; } - /** - * Returns the UTC date and time generated by the service that indicates the time at which the blob was - * last read or written to. - * - * @readonly - */ - get lastAccessed() { - return this.originalResponse.lastAccessed; - } - /** - * Returns the date and time the blob was created. - * - * @readonly - */ - get createdOn() { - return this.originalResponse.createdOn; - } /** * A name-value pair * to associate with a file storage object. @@ -51084,1166 +54321,1359 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return this.originalResponse.metadata; } /** - * This header uniquely identifies the request - * that was made and can be used for troubleshooting the request. - * - * @readonly - */ - get requestId() { - return this.originalResponse.requestId; - } - /** - * If a client request id header is sent in the request, this header will be present in the - * response with the same value. - * - * @readonly - */ - get clientRequestId() { - return this.originalResponse.clientRequestId; - } - /** - * Indicates the version of the Blob service used - * to execute the request. - * - * @readonly - */ - get version() { - return this.originalResponse.version; - } - /** - * Indicates the versionId of the downloaded blob version. - * - * @readonly - */ - get versionId() { - return this.originalResponse.versionId; - } - /** - * Indicates whether version of this blob is a current version. - * - * @readonly - */ - get isCurrentVersion() { - return this.originalResponse.isCurrentVersion; - } - /** - * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned - * when the blob was encrypted with a customer-provided key. - * - * @readonly - */ - get encryptionKeySha256() { - return this.originalResponse.encryptionKeySha256; - } - /** - * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to - * true, then the request returns a crc64 for the range, as long as the range size is less than - * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is - * specified in the same request, it will fail with 400(Bad Request) - */ - get contentCrc64() { - return this.originalResponse.contentCrc64; - } - /** - * Object Replication Policy Id of the destination blob. - * - * @readonly - */ - get objectReplicationDestinationPolicyId() { - return this.originalResponse.objectReplicationDestinationPolicyId; - } - /** - * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. - * - * @readonly - */ - get objectReplicationSourceProperties() { - return this.originalResponse.objectReplicationSourceProperties; - } - /** - * If this blob has been sealed. - * - * @readonly - */ - get isSealed() { - return this.originalResponse.isSealed; - } - /** - * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. - * - * @readonly - */ - get immutabilityPolicyExpiresOn() { - return this.originalResponse.immutabilityPolicyExpiresOn; - } - /** - * Indicates immutability policy mode. - * - * @readonly - */ - get immutabilityPolicyMode() { - return this.originalResponse.immutabilityPolicyMode; - } - /** - * Indicates if a legal hold is present on the blob. - * - * @readonly - */ - get legalHold() { - return this.originalResponse.legalHold; - } - /** - * The response body as a browser Blob. - * Always undefined in node.js. - * - * @readonly - */ - get contentAsBlob() { - return this.originalResponse.blobBody; - } - /** - * The response body as a node.js Readable stream. - * Always undefined in the browser. - * - * It will automatically retry when internal read stream unexpected ends. - * - * @readonly - */ - get readableStreamBody() { - return coreUtil.isNode ? this.blobDownloadStream : void 0; - } - /** - * The HTTP response. - */ - get _response() { - return this.originalResponse._response; - } - /** - * Creates an instance of BlobDownloadResponse. - * - * @param originalResponse - - * @param getter - - * @param offset - - * @param count - - * @param options - - */ - constructor(originalResponse, getter, offset, count, options = {}) { - this.originalResponse = originalResponse; - this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); - } - }; - var AVRO_SYNC_MARKER_SIZE = 16; - var AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); - var AVRO_CODEC_KEY = "avro.codec"; - var AVRO_SCHEMA_KEY = "avro.schema"; - var AvroParser = class _AvroParser { - /** - * Reads a fixed number of bytes from the stream. - * - * @param stream - - * @param length - - * @param options - - */ - static async readFixedBytes(stream3, length, options = {}) { - const bytes = await stream3.read(length, { abortSignal: options.abortSignal }); - if (bytes.length !== length) { - throw new Error("Hit stream end."); - } - return bytes; - } - /** - * Reads a single byte from the stream. - * - * @param stream - - * @param options - - */ - static async readByte(stream3, options = {}) { - const buf = await _AvroParser.readFixedBytes(stream3, 1, options); - return buf[0]; - } - // int and long are stored in variable-length zig-zag coding. - // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt - // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types - static async readZigZagLong(stream3, options = {}) { - let zigZagEncoded = 0; - let significanceInBit = 0; - let byte, haveMoreByte, significanceInFloat; - do { - byte = await _AvroParser.readByte(stream3, options); - haveMoreByte = byte & 128; - zigZagEncoded |= (byte & 127) << significanceInBit; - significanceInBit += 7; - } while (haveMoreByte && significanceInBit < 28); - if (haveMoreByte) { - zigZagEncoded = zigZagEncoded; - significanceInFloat = 268435456; - do { - byte = await _AvroParser.readByte(stream3, options); - zigZagEncoded += (byte & 127) * significanceInFloat; - significanceInFloat *= 128; - } while (byte & 128); - const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; - if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { - throw new Error("Integer overflow."); - } - return res; - } - return zigZagEncoded >> 1 ^ -(zigZagEncoded & 1); - } - static async readLong(stream3, options = {}) { - return _AvroParser.readZigZagLong(stream3, options); - } - static async readInt(stream3, options = {}) { - return _AvroParser.readZigZagLong(stream3, options); - } - static async readNull() { - return null; - } - static async readBoolean(stream3, options = {}) { - const b = await _AvroParser.readByte(stream3, options); - if (b === 1) { - return true; - } else if (b === 0) { - return false; - } else { - throw new Error("Byte was not a boolean."); - } - } - static async readFloat(stream3, options = {}) { - const u8arr = await _AvroParser.readFixedBytes(stream3, 4, options); - const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); - return view.getFloat32(0, true); - } - static async readDouble(stream3, options = {}) { - const u8arr = await _AvroParser.readFixedBytes(stream3, 8, options); - const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); - return view.getFloat64(0, true); - } - static async readBytes(stream3, options = {}) { - const size = await _AvroParser.readLong(stream3, options); - if (size < 0) { - throw new Error("Bytes size was negative."); - } - return stream3.read(size, { abortSignal: options.abortSignal }); - } - static async readString(stream3, options = {}) { - const u8arr = await _AvroParser.readBytes(stream3, options); - const utf8decoder = new TextDecoder(); - return utf8decoder.decode(u8arr); - } - static async readMapPair(stream3, readItemMethod, options = {}) { - const key = await _AvroParser.readString(stream3, options); - const value = await readItemMethod(stream3, options); - return { key, value }; - } - static async readMap(stream3, readItemMethod, options = {}) { - const readPairMethod = (s, opts = {}) => { - return _AvroParser.readMapPair(s, readItemMethod, opts); - }; - const pairs = await _AvroParser.readArray(stream3, readPairMethod, options); - const dict = {}; - for (const pair of pairs) { - dict[pair.key] = pair.value; - } - return dict; - } - static async readArray(stream3, readItemMethod, options = {}) { - const items = []; - for (let count = await _AvroParser.readLong(stream3, options); count !== 0; count = await _AvroParser.readLong(stream3, options)) { - if (count < 0) { - await _AvroParser.readLong(stream3, options); - count = -count; - } - while (count--) { - const item = await readItemMethod(stream3, options); - items.push(item); - } - } - return items; - } - }; - var AvroComplex; - (function(AvroComplex2) { - AvroComplex2["RECORD"] = "record"; - AvroComplex2["ENUM"] = "enum"; - AvroComplex2["ARRAY"] = "array"; - AvroComplex2["MAP"] = "map"; - AvroComplex2["UNION"] = "union"; - AvroComplex2["FIXED"] = "fixed"; - })(AvroComplex || (AvroComplex = {})); - var AvroPrimitive; - (function(AvroPrimitive2) { - AvroPrimitive2["NULL"] = "null"; - AvroPrimitive2["BOOLEAN"] = "boolean"; - AvroPrimitive2["INT"] = "int"; - AvroPrimitive2["LONG"] = "long"; - AvroPrimitive2["FLOAT"] = "float"; - AvroPrimitive2["DOUBLE"] = "double"; - AvroPrimitive2["BYTES"] = "bytes"; - AvroPrimitive2["STRING"] = "string"; - })(AvroPrimitive || (AvroPrimitive = {})); - var AvroType = class _AvroType { - /** - * Determines the AvroType from the Avro Schema. - */ - // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - static fromSchema(schema) { - if (typeof schema === "string") { - return _AvroType.fromStringSchema(schema); - } else if (Array.isArray(schema)) { - return _AvroType.fromArraySchema(schema); - } else { - return _AvroType.fromObjectSchema(schema); - } - } - static fromStringSchema(schema) { - switch (schema) { - case AvroPrimitive.NULL: - case AvroPrimitive.BOOLEAN: - case AvroPrimitive.INT: - case AvroPrimitive.LONG: - case AvroPrimitive.FLOAT: - case AvroPrimitive.DOUBLE: - case AvroPrimitive.BYTES: - case AvroPrimitive.STRING: - return new AvroPrimitiveType(schema); - default: - throw new Error(`Unexpected Avro type ${schema}`); - } - } - static fromArraySchema(schema) { - return new AvroUnionType(schema.map(_AvroType.fromSchema)); - } - static fromObjectSchema(schema) { - const type = schema.type; - try { - return _AvroType.fromStringSchema(type); - } catch (_a4) { - } - switch (type) { - case AvroComplex.RECORD: - if (schema.aliases) { - throw new Error(`aliases currently is not supported, schema: ${schema}`); - } - if (!schema.name) { - throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); - } - const fields = {}; - if (!schema.fields) { - throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); - } - for (const field of schema.fields) { - fields[field.name] = _AvroType.fromSchema(field.type); - } - return new AvroRecordType(fields, schema.name); - case AvroComplex.ENUM: - if (schema.aliases) { - throw new Error(`aliases currently is not supported, schema: ${schema}`); - } - if (!schema.symbols) { - throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); - } - return new AvroEnumType(schema.symbols); - case AvroComplex.MAP: - if (!schema.values) { - throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); - } - return new AvroMapType(_AvroType.fromSchema(schema.values)); - case AvroComplex.ARRAY: - // Unused today - case AvroComplex.FIXED: - // Unused today - default: - throw new Error(`Unexpected Avro type ${type} in ${schema}`); - } + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; } - }; - var AvroPrimitiveType = class extends AvroType { - constructor(primitive) { - super(); - this._primitive = primitive; + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; } - // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - read(stream3, options = {}) { - switch (this._primitive) { - case AvroPrimitive.NULL: - return AvroParser.readNull(); - case AvroPrimitive.BOOLEAN: - return AvroParser.readBoolean(stream3, options); - case AvroPrimitive.INT: - return AvroParser.readInt(stream3, options); - case AvroPrimitive.LONG: - return AvroParser.readLong(stream3, options); - case AvroPrimitive.FLOAT: - return AvroParser.readFloat(stream3, options); - case AvroPrimitive.DOUBLE: - return AvroParser.readDouble(stream3, options); - case AvroPrimitive.BYTES: - return AvroParser.readBytes(stream3, options); - case AvroPrimitive.STRING: - return AvroParser.readString(stream3, options); - default: - throw new Error("Unknown Avro Primitive"); - } + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; } - }; - var AvroEnumType = class extends AvroType { - constructor(symbols) { - super(); - this._symbols = symbols; + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; } - // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - async read(stream3, options = {}) { - const value = await AvroParser.readInt(stream3, options); - return this._symbols[value]; + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; } - }; - var AvroUnionType = class extends AvroType { - constructor(types) { - super(); - this._types = types; + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get blobBody() { + return void 0; } - async read(stream3, options = {}) { - const typeIndex = await AvroParser.readInt(stream3, options); - return this._types[typeIndex].read(stream3, options); + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will parse avor data returned by blob query. + * + * @readonly + */ + get readableStreamBody() { + return core_util_1.isNodeLike ? this.blobDownloadStream : void 0; } - }; - var AvroMapType = class extends AvroType { - constructor(itemType) { - super(); - this._itemType = itemType; + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; } - // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - read(stream3, options = {}) { - const readItemMethod = (s, opts) => { - return this._itemType.read(s, opts); - }; - return AvroParser.readMap(stream3, readItemMethod, options); + originalResponse; + blobDownloadStream; + /** + * Creates an instance of BlobQueryResponse. + * + * @param originalResponse - + * @param options - + */ + constructor(originalResponse, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new BlobQuickQueryStream_js_1.BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); } }; - var AvroRecordType = class extends AvroType { - constructor(fields, name) { - super(); - this._fields = fields; - this._name = name; + exports2.BlobQueryResponse = BlobQueryResponse; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/models.js +var require_models2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/models.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageBlobAudience = exports2.PremiumPageBlobTier = exports2.BlockBlobTier = void 0; + exports2.toAccessTier = toAccessTier; + exports2.ensureCpkIfSpecified = ensureCpkIfSpecified; + exports2.getBlobServiceAccountAudience = getBlobServiceAccountAudience; + var constants_js_1 = require_constants9(); + var BlockBlobTier; + (function(BlockBlobTier2) { + BlockBlobTier2["Hot"] = "Hot"; + BlockBlobTier2["Cool"] = "Cool"; + BlockBlobTier2["Cold"] = "Cold"; + BlockBlobTier2["Archive"] = "Archive"; + })(BlockBlobTier || (exports2.BlockBlobTier = BlockBlobTier = {})); + var PremiumPageBlobTier; + (function(PremiumPageBlobTier2) { + PremiumPageBlobTier2["P4"] = "P4"; + PremiumPageBlobTier2["P6"] = "P6"; + PremiumPageBlobTier2["P10"] = "P10"; + PremiumPageBlobTier2["P15"] = "P15"; + PremiumPageBlobTier2["P20"] = "P20"; + PremiumPageBlobTier2["P30"] = "P30"; + PremiumPageBlobTier2["P40"] = "P40"; + PremiumPageBlobTier2["P50"] = "P50"; + PremiumPageBlobTier2["P60"] = "P60"; + PremiumPageBlobTier2["P70"] = "P70"; + PremiumPageBlobTier2["P80"] = "P80"; + })(PremiumPageBlobTier || (exports2.PremiumPageBlobTier = PremiumPageBlobTier = {})); + function toAccessTier(tier) { + if (tier === void 0) { + return void 0; } - // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - async read(stream3, options = {}) { - const record = {}; - record["$schema"] = this._name; - for (const key in this._fields) { - if (Object.prototype.hasOwnProperty.call(this._fields, key)) { - record[key] = await this._fields[key].read(stream3, options); + return tier; + } + function ensureCpkIfSpecified(cpk, isHttps) { + if (cpk && !isHttps) { + throw new RangeError("Customer-provided encryption key must be used over HTTPS."); + } + if (cpk && !cpk.encryptionAlgorithm) { + cpk.encryptionAlgorithm = constants_js_1.EncryptionAlgorithmAES25; + } + } + var StorageBlobAudience; + (function(StorageBlobAudience2) { + StorageBlobAudience2["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + StorageBlobAudience2["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; + })(StorageBlobAudience || (exports2.StorageBlobAudience = StorageBlobAudience = {})); + function getBlobServiceAccountAudience(storageAccountName) { + return `https://${storageAccountName}.blob.core.windows.net/.default`; + } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/PageBlobRangeResponse.js +var require_PageBlobRangeResponse = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/PageBlobRangeResponse.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.rangeResponseFromModel = rangeResponseFromModel; + function rangeResponseFromModel(response) { + const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start + })); + const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start + })); + return { + ...response, + pageRange, + clearRange, + _response: { + ...response._response, + parsedBody: { + pageRange, + clearRange } } - return record; + }; + } + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/logger.js +var require_logger2 = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/logger.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.logger = void 0; + var logger_1 = require_commonjs2(); + exports2.logger = (0, logger_1.createClientLogger)("core-lro"); + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/poller/constants.js +var require_constants11 = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/poller/constants.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.terminalStates = exports2.POLL_INTERVAL_IN_MS = void 0; + exports2.POLL_INTERVAL_IN_MS = 2e3; + exports2.terminalStates = ["succeeded", "canceled", "failed"]; + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/poller/operation.js +var require_operation = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/poller/operation.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.pollOperation = exports2.initOperation = exports2.deserializeState = void 0; + var logger_js_1 = require_logger2(); + var constants_js_1 = require_constants11(); + function deserializeState(serializedState) { + try { + return JSON.parse(serializedState).state; + } catch (e) { + throw new Error(`Unable to deserialize input state: ${serializedState}`); } - }; - function arraysEqual(a, b) { - if (a === b) - return true; - if (a == null || b == null) - return false; - if (a.length !== b.length) - return false; - for (let i = 0; i < a.length; ++i) { - if (a[i] !== b[i]) - return false; + } + exports2.deserializeState = deserializeState; + function setStateError(inputs) { + const { state, stateProxy, isOperationError } = inputs; + return (error2) => { + if (isOperationError(error2)) { + stateProxy.setError(state, error2); + stateProxy.setFailed(state); + } + throw error2; + }; + } + function appendReadableErrorMessage(currentMessage, innerMessage) { + let message = currentMessage; + if (message.slice(-1) !== ".") { + message = message + "."; } - return true; + return message + " " + innerMessage; } - var AvroReader = class { - get blockOffset() { - return this._blockOffset; + function simplifyError(err) { + let message = err.message; + let code = err.code; + let curErr = err; + while (curErr.innererror) { + curErr = curErr.innererror; + code = curErr.code; + message = appendReadableErrorMessage(message, curErr.message); } - get objectIndex() { - return this._objectIndex; + return { + code, + message + }; + } + function processOperationStatus(result2) { + const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } = result2; + switch (status) { + case "succeeded": { + stateProxy.setSucceeded(state); + break; + } + case "failed": { + const err = getError === null || getError === void 0 ? void 0 : getError(response); + let postfix = ""; + if (err) { + const { code, message } = simplifyError(err); + postfix = `. ${code}. ${message}`; + } + const errStr = `The long-running operation has failed${postfix}`; + stateProxy.setError(state, new Error(errStr)); + stateProxy.setFailed(state); + logger_js_1.logger.warning(errStr); + break; + } + case "canceled": { + stateProxy.setCanceled(state); + break; + } } - constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { - this._dataStream = dataStream; - this._headerStream = headerStream || dataStream; - this._initialized = false; - this._blockOffset = currentBlockOffset || 0; - this._objectIndex = indexWithinCurrentBlock || 0; - this._initialBlockOffset = currentBlockOffset || 0; + if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || isDone === void 0 && ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status)) { + stateProxy.setResult(state, buildResult({ + response, + state, + processResult + })); } - async initialize(options = {}) { - const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { - abortSignal: options.abortSignal - }); - if (!arraysEqual(header, AVRO_INIT_BYTES)) { - throw new Error("Stream is not an Avro file."); - } - this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { - abortSignal: options.abortSignal - }); - const codec = this._metadata[AVRO_CODEC_KEY]; - if (!(codec === void 0 || codec === null || codec === "null")) { - throw new Error("Codecs are not supported"); + } + function buildResult(inputs) { + const { processResult, response, state } = inputs; + return processResult ? processResult(response, state) : response; + } + async function initOperation(inputs) { + const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult } = inputs; + const { operationLocation, resourceLocation, metadata, response } = await init(); + if (operationLocation) + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); + const config = { + metadata, + operationLocation, + resourceLocation + }; + logger_js_1.logger.verbose(`LRO: Operation description:`, config); + const state = stateProxy.initState(config); + const status = getOperationStatus({ response, state, operationLocation }); + processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); + return state; + } + exports2.initOperation = initOperation; + async function pollOperationHelper(inputs) { + const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, isOperationError, options } = inputs; + const response = await poll(operationLocation, options).catch(setStateError({ + state, + stateProxy, + isOperationError + })); + const status = getOperationStatus(response, state); + logger_js_1.logger.verbose(`LRO: Status: + Polling from: ${state.config.operationLocation} + Operation status: ${status} + Polling status: ${constants_js_1.terminalStates.includes(status) ? "Stopped" : "Running"}`); + if (status === "succeeded") { + const resourceLocation = getResourceLocation(response, state); + if (resourceLocation !== void 0) { + return { + response: await poll(resourceLocation).catch(setStateError({ state, stateProxy, isOperationError })), + status + }; } - this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal + } + return { response, status }; + } + async function pollOperation(inputs) { + const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, processResult, getError, updateState, setDelay, isDone, setErrorAsResult } = inputs; + const { operationLocation } = state.config; + if (operationLocation !== void 0) { + const { response, status } = await pollOperationHelper({ + poll, + getOperationStatus, + state, + stateProxy, + operationLocation, + getResourceLocation, + isOperationError, + options }); - const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); - this._itemType = AvroType.fromSchema(schema); - if (this._blockOffset === 0) { - this._blockOffset = this._initialBlockOffset + this._dataStream.position; - } - this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal + processOperationStatus({ + status, + response, + state, + stateProxy, + isDone, + processResult, + getError, + setErrorAsResult }); - await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); - this._initialized = true; - if (this._objectIndex && this._objectIndex > 0) { - for (let i = 0; i < this._objectIndex; i++) { - await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); - this._itemsRemainingInBlock--; - } + if (!constants_js_1.terminalStates.includes(status)) { + const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); + if (intervalInMs) + setDelay(intervalInMs); + const location = getOperationLocation === null || getOperationLocation === void 0 ? void 0 : getOperationLocation(response, state); + if (location !== void 0) { + const isUpdated = operationLocation !== location; + state.config.operationLocation = location; + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(location, isUpdated); + } else + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); } + updateState === null || updateState === void 0 ? void 0 : updateState(state, response); } - hasNext() { - return !this._initialized || this._itemsRemainingInBlock > 0; + } + exports2.pollOperation = pollOperation; + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/http/operation.js +var require_operation2 = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/http/operation.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.pollHttpOperation = exports2.isOperationError = exports2.getResourceLocation = exports2.getOperationStatus = exports2.getOperationLocation = exports2.initHttpOperation = exports2.getStatusFromInitialResponse = exports2.getErrorFromResponse = exports2.parseRetryAfter = exports2.inferLroMode = void 0; + var operation_js_1 = require_operation(); + var logger_js_1 = require_logger2(); + function getOperationLocationPollingUrl(inputs) { + const { azureAsyncOperation, operationLocation } = inputs; + return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation; + } + function getLocationHeader(rawResponse) { + return rawResponse.headers["location"]; + } + function getOperationLocationHeader(rawResponse) { + return rawResponse.headers["operation-location"]; + } + function getAzureAsyncOperationHeader(rawResponse) { + return rawResponse.headers["azure-asyncoperation"]; + } + function findResourceLocation(inputs) { + var _a4; + const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "DELETE": { + return void 0; + } + case "PATCH": { + return (_a4 = getDefault()) !== null && _a4 !== void 0 ? _a4 : requestPath; + } + default: { + return getDefault(); + } } - parseObjects() { - return tslib.__asyncGenerator(this, arguments, function* parseObjects_1(options = {}) { - if (!this._initialized) { - yield tslib.__await(this.initialize(options)); + function getDefault() { + switch (resourceLocationConfig) { + case "azure-async-operation": { + return void 0; } - while (this.hasNext()) { - const result2 = yield tslib.__await(this._itemType.read(this._dataStream, { - abortSignal: options.abortSignal - })); - this._itemsRemainingInBlock--; - this._objectIndex++; - if (this._itemsRemainingInBlock === 0) { - const marker2 = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal - })); - this._blockOffset = this._initialBlockOffset + this._dataStream.position; - this._objectIndex = 0; - if (!arraysEqual(this._syncMarker, marker2)) { - throw new Error("Stream is not a valid Avro file."); - } - try { - this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal - })); - } catch (_a4) { - this._itemsRemainingInBlock = 0; - } - if (this._itemsRemainingInBlock > 0) { - yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); - } - } - yield yield tslib.__await(result2); + case "original-uri": { + return requestPath; } - }); + case "location": + default: { + return location; + } + } + } + } + function inferLroMode(inputs) { + const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs; + const operationLocation = getOperationLocationHeader(rawResponse); + const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); + const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); + const location = getLocationHeader(rawResponse); + const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase(); + if (pollingUrl !== void 0) { + return { + mode: "OperationLocation", + operationLocation: pollingUrl, + resourceLocation: findResourceLocation({ + requestMethod: normalizedRequestMethod, + location, + requestPath, + resourceLocationConfig + }) + }; + } else if (location !== void 0) { + return { + mode: "ResourceLocation", + operationLocation: location + }; + } else if (normalizedRequestMethod === "PUT" && requestPath) { + return { + mode: "Body", + operationLocation: requestPath + }; + } else { + return void 0; + } + } + exports2.inferLroMode = inferLroMode; + function transformStatus(inputs) { + const { status, statusCode } = inputs; + if (typeof status !== "string" && status !== void 0) { + throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); + } + switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) { + case void 0: + return toOperationStatus(statusCode); + case "succeeded": + return "succeeded"; + case "failed": + return "failed"; + case "running": + case "accepted": + case "started": + case "canceling": + case "cancelling": + return "running"; + case "canceled": + case "cancelled": + return "canceled"; + default: { + logger_js_1.logger.verbose(`LRO: unrecognized operation status: ${status}`); + return status; + } + } + } + function getStatus(rawResponse) { + var _a4; + const { status } = (_a4 = rawResponse.body) !== null && _a4 !== void 0 ? _a4 : {}; + return transformStatus({ status, statusCode: rawResponse.statusCode }); + } + function getProvisioningState(rawResponse) { + var _a4, _b; + const { properties, provisioningState } = (_a4 = rawResponse.body) !== null && _a4 !== void 0 ? _a4 : {}; + const status = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; + return transformStatus({ status, statusCode: rawResponse.statusCode }); + } + function toOperationStatus(statusCode) { + if (statusCode === 202) { + return "running"; + } else if (statusCode < 300) { + return "succeeded"; + } else { + return "failed"; } - }; - var AvroReadable = class { - }; - var ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); - var AvroReadableFromStream = class extends AvroReadable { - toUint8Array(data2) { - if (typeof data2 === "string") { - return Buffer.from(data2); - } - return data2; + } + function parseRetryAfter({ rawResponse }) { + const retryAfter = rawResponse.headers["retry-after"]; + if (retryAfter !== void 0) { + const retryAfterInSeconds = parseInt(retryAfter); + return isNaN(retryAfterInSeconds) ? calculatePollingIntervalFromDate(new Date(retryAfter)) : retryAfterInSeconds * 1e3; } - constructor(readable) { - super(); - this._readable = readable; - this._position = 0; + return void 0; + } + exports2.parseRetryAfter = parseRetryAfter; + function getErrorFromResponse(response) { + const error2 = accessBodyProperty(response, "error"); + if (!error2) { + logger_js_1.logger.warning(`The long-running operation failed but there is no error property in the response's body`); + return; } - get position() { - return this._position; + if (!error2.code || !error2.message) { + logger_js_1.logger.warning(`The long-running operation failed but the error property in the response's body doesn't contain code or message`); + return; } - async read(size, options = {}) { + return error2; + } + exports2.getErrorFromResponse = getErrorFromResponse; + function calculatePollingIntervalFromDate(retryAfterDate) { + const timeNow = Math.floor((/* @__PURE__ */ new Date()).getTime()); + const retryAfterTime = retryAfterDate.getTime(); + if (timeNow < retryAfterTime) { + return retryAfterTime - timeNow; + } + return void 0; + } + function getStatusFromInitialResponse(inputs) { + const { response, state, operationLocation } = inputs; + function helper() { var _a4; - if ((_a4 = options.abortSignal) === null || _a4 === void 0 ? void 0 : _a4.aborted) { - throw ABORT_ERROR; + const mode = (_a4 = state.config.metadata) === null || _a4 === void 0 ? void 0 : _a4["mode"]; + switch (mode) { + case void 0: + return toOperationStatus(response.rawResponse.statusCode); + case "Body": + return getOperationStatus(response, state); + default: + return "running"; } - if (size < 0) { - throw new Error(`size parameter should be positive: ${size}`); + } + const status = helper(); + return status === "running" && operationLocation === void 0 ? "succeeded" : status; + } + exports2.getStatusFromInitialResponse = getStatusFromInitialResponse; + async function initHttpOperation(inputs) { + const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; + return (0, operation_js_1.initOperation)({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = inferLroMode({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig + }); + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); + }, + stateProxy, + processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse, + getOperationStatus: getStatusFromInitialResponse, + setErrorAsResult + }); + } + exports2.initHttpOperation = initHttpOperation; + function getOperationLocation({ rawResponse }, state) { + var _a4; + const mode = (_a4 = state.config.metadata) === null || _a4 === void 0 ? void 0 : _a4["mode"]; + switch (mode) { + case "OperationLocation": { + return getOperationLocationPollingUrl({ + operationLocation: getOperationLocationHeader(rawResponse), + azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse) + }); } - if (size === 0) { - return new Uint8Array(); + case "ResourceLocation": { + return getLocationHeader(rawResponse); } - if (!this._readable.readable) { - throw new Error("Stream no longer readable."); + case "Body": + default: { + return void 0; } - const chunk = this._readable.read(size); - if (chunk) { - this._position += chunk.length; - return this.toUint8Array(chunk); - } else { - return new Promise((resolve, reject) => { - const cleanUp = () => { - this._readable.removeListener("readable", readableCallback); - this._readable.removeListener("error", rejectCallback); - this._readable.removeListener("end", rejectCallback); - this._readable.removeListener("close", rejectCallback); - if (options.abortSignal) { - options.abortSignal.removeEventListener("abort", abortHandler); - } - }; - const readableCallback = () => { - const callbackChunk = this._readable.read(size); - if (callbackChunk) { - this._position += callbackChunk.length; - cleanUp(); - resolve(this.toUint8Array(callbackChunk)); - } - }; - const rejectCallback = () => { - cleanUp(); - reject(); - }; - const abortHandler = () => { - cleanUp(); - reject(ABORT_ERROR); - }; - this._readable.on("readable", readableCallback); - this._readable.once("error", rejectCallback); - this._readable.once("end", rejectCallback); - this._readable.once("close", rejectCallback); - if (options.abortSignal) { - options.abortSignal.addEventListener("abort", abortHandler); - } - }); + } + } + exports2.getOperationLocation = getOperationLocation; + function getOperationStatus({ rawResponse }, state) { + var _a4; + const mode = (_a4 = state.config.metadata) === null || _a4 === void 0 ? void 0 : _a4["mode"]; + switch (mode) { + case "OperationLocation": { + return getStatus(rawResponse); + } + case "ResourceLocation": { + return toOperationStatus(rawResponse.statusCode); + } + case "Body": { + return getProvisioningState(rawResponse); } + default: + throw new Error(`Internal error: Unexpected operation mode: ${mode}`); } - }; - var BlobQuickQueryStream = class extends stream2.Readable { + } + exports2.getOperationStatus = getOperationStatus; + function accessBodyProperty({ flatResponse, rawResponse }, prop2) { + var _a4, _b; + return (_a4 = flatResponse === null || flatResponse === void 0 ? void 0 : flatResponse[prop2]) !== null && _a4 !== void 0 ? _a4 : (_b = rawResponse.body) === null || _b === void 0 ? void 0 : _b[prop2]; + } + function getResourceLocation(res, state) { + const loc = accessBodyProperty(res, "resourceLocation"); + if (loc && typeof loc === "string") { + state.config.resourceLocation = loc; + } + return state.config.resourceLocation; + } + exports2.getResourceLocation = getResourceLocation; + function isOperationError(e) { + return e.name === "RestError"; + } + exports2.isOperationError = isOperationError; + async function pollHttpOperation(inputs) { + const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult } = inputs; + return (0, operation_js_1.pollOperation)({ + state, + stateProxy, + setDelay, + processResult: processResult ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) : ({ flatResponse }) => flatResponse, + getError: getErrorFromResponse, + updateState, + getPollingInterval: parseRetryAfter, + getOperationLocation, + getOperationStatus, + isOperationError, + getResourceLocation, + options, + /** + * The expansion here is intentional because `lro` could be an object that + * references an inner this, so we need to preserve a reference to it. + */ + poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), + setErrorAsResult + }); + } + exports2.pollHttpOperation = pollHttpOperation; + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/poller/poller.js +var require_poller = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/poller/poller.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.buildCreatePoller = void 0; + var operation_js_1 = require_operation(); + var constants_js_1 = require_constants11(); + var core_util_1 = require_commonjs4(); + var createStateProxy = () => ({ /** - * Creates an instance of BlobQuickQueryStream. - * - * @param source - The current ReadableStream returned from getter - * @param options - + * The state at this point is created to be of type OperationState. + * It will be updated later to be of type TState when the + * customer-provided callback, `updateState`, is called during polling. */ - constructor(source, options = {}) { - super(); - this.avroPaused = true; - this.source = source; - this.onProgress = options.onProgress; - this.onError = options.onError; - this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); - this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); - } - _read() { - if (this.avroPaused) { - this.readInternal().catch((err) => { - this.emit("error", err); - }); - } - } - async readInternal() { - this.avroPaused = false; - let avroNext; - do { - avroNext = await this.avroIter.next(); - if (avroNext.done) { - break; - } - const obj = avroNext.value; - const schema = obj.$schema; - if (typeof schema !== "string") { - throw Error("Missing schema in avro record."); - } - switch (schema) { - case "com.microsoft.azure.storage.queryBlobContents.resultData": - { - const data2 = obj.data; - if (data2 instanceof Uint8Array === false) { - throw Error("Invalid data in avro result record."); - } - if (!this.push(Buffer.from(data2))) { - this.avroPaused = true; + initState: (config) => ({ status: "running", config }), + setCanceled: (state) => state.status = "canceled", + setError: (state, error2) => state.error = error2, + setResult: (state, result2) => state.result = result2, + setRunning: (state) => state.status = "running", + setSucceeded: (state) => state.status = "succeeded", + setFailed: (state) => state.status = "failed", + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => state.status === "canceled", + isFailed: (state) => state.status === "failed", + isRunning: (state) => state.status === "running", + isSucceeded: (state) => state.status === "succeeded" + }); + function buildCreatePoller(inputs) { + const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful } = inputs; + return async ({ init, poll }, options) => { + const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, restoreFrom } = options || {}; + const stateProxy = createStateProxy(); + const withOperationLocation = withOperationLocationCallback ? /* @__PURE__ */ (() => { + let called = false; + return (operationLocation, isUpdated) => { + if (isUpdated) + withOperationLocationCallback(operationLocation); + else if (!called) + withOperationLocationCallback(operationLocation); + called = true; + }; + })() : void 0; + const state = restoreFrom ? (0, operation_js_1.deserializeState)(restoreFrom) : await (0, operation_js_1.initOperation)({ + init, + stateProxy, + processResult, + getOperationStatus: getStatusFromInitialResponse, + withOperationLocation, + setErrorAsResult: !resolveOnUnsuccessful + }); + let resultPromise; + const abortController = new AbortController(); + const handlers = /* @__PURE__ */ new Map(); + const handleProgressEvents = async () => handlers.forEach((h) => h(state)); + const cancelErrMsg = "Operation was canceled"; + let currentPollIntervalInMs = intervalInMs; + const poller = { + getOperationState: () => state, + getResult: () => state.result, + isDone: () => ["succeeded", "failed", "canceled"].includes(state.status), + isStopped: () => resultPromise === void 0, + stopPolling: () => { + abortController.abort(); + }, + toString: () => JSON.stringify({ + state + }), + onProgress: (callback) => { + const s = Symbol(); + handlers.set(s, callback); + return () => handlers.delete(s); + }, + pollUntilDone: (pollOptions) => resultPromise !== null && resultPromise !== void 0 ? resultPromise : resultPromise = (async () => { + const { abortSignal: inputAbortSignal } = pollOptions || {}; + function abortListener() { + abortController.abort(); + } + const abortSignal = abortController.signal; + if (inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.aborted) { + abortController.abort(); + } else if (!abortSignal.aborted) { + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.addEventListener("abort", abortListener, { once: true }); + } + try { + if (!poller.isDone()) { + await poller.poll({ abortSignal }); + while (!poller.isDone()) { + await (0, core_util_1.delay)(currentPollIntervalInMs, { abortSignal }); + await poller.poll({ abortSignal }); } } - break; - case "com.microsoft.azure.storage.queryBlobContents.progress": - { - const bytesScanned = obj.bytesScanned; - if (typeof bytesScanned !== "number") { - throw Error("Invalid bytesScanned in avro progress record."); - } - if (this.onProgress) { - this.onProgress({ loadedBytes: bytesScanned }); - } + } finally { + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.removeEventListener("abort", abortListener); + } + if (resolveOnUnsuccessful) { + return poller.getResult(); + } else { + switch (state.status) { + case "succeeded": + return poller.getResult(); + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + case "notStarted": + case "running": + throw new Error(`Polling completed without succeeding or failing`); } - break; - case "com.microsoft.azure.storage.queryBlobContents.end": - if (this.onProgress) { - const totalBytes = obj.totalBytes; - if (typeof totalBytes !== "number") { - throw Error("Invalid totalBytes in avro end record."); - } - this.onProgress({ loadedBytes: totalBytes }); + } + })().finally(() => { + resultPromise = void 0; + }), + async poll(pollOptions) { + if (resolveOnUnsuccessful) { + if (poller.isDone()) + return; + } else { + switch (state.status) { + case "succeeded": + return; + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; } - this.push(null); - break; - case "com.microsoft.azure.storage.queryBlobContents.error": - if (this.onError) { - const fatal = obj.fatal; - if (typeof fatal !== "boolean") { - throw Error("Invalid fatal in avro error record."); - } - const name = obj.name; - if (typeof name !== "string") { - throw Error("Invalid name in avro error record."); - } - const description = obj.description; - if (typeof description !== "string") { - throw Error("Invalid description in avro error record."); - } - const position = obj.position; - if (typeof position !== "number") { - throw Error("Invalid position in avro error record."); - } - this.onError({ - position, - name, - isFatal: fatal, - description - }); + } + await (0, operation_js_1.pollOperation)({ + poll, + state, + stateProxy, + getOperationLocation, + isOperationError, + withOperationLocation, + getPollingInterval, + getOperationStatus: getStatusFromPollResponse, + getResourceLocation, + processResult, + getError, + updateState, + options: pollOptions, + setDelay: (pollIntervalInMs) => { + currentPollIntervalInMs = pollIntervalInMs; + }, + setErrorAsResult: !resolveOnUnsuccessful + }); + await handleProgressEvents(); + if (!resolveOnUnsuccessful) { + switch (state.status) { + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; } - break; - default: - throw Error(`Unknown schema ${schema} in avro progress record.`); + } } - } while (!avroNext.done && !this.avroPaused); - } - }; - var BlobQueryResponse = class { - /** - * Indicates that the service supports - * requests for partial file content. - * - * @readonly - */ - get acceptRanges() { - return this.originalResponse.acceptRanges; - } - /** - * Returns if it was previously specified - * for the file. - * - * @readonly - */ - get cacheControl() { - return this.originalResponse.cacheControl; + }; + return poller; + }; + } + exports2.buildCreatePoller = buildCreatePoller; + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/http/poller.js +var require_poller2 = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/http/poller.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createHttpPoller = void 0; + var operation_js_1 = require_operation2(); + var poller_js_1 = require_poller(); + async function createHttpPoller(lro, options) { + const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false } = options || {}; + return (0, poller_js_1.buildCreatePoller)({ + getStatusFromInitialResponse: operation_js_1.getStatusFromInitialResponse, + getStatusFromPollResponse: operation_js_1.getOperationStatus, + isOperationError: operation_js_1.isOperationError, + getOperationLocation: operation_js_1.getOperationLocation, + getResourceLocation: operation_js_1.getResourceLocation, + getPollingInterval: operation_js_1.parseRetryAfter, + getError: operation_js_1.getErrorFromResponse, + resolveOnUnsuccessful + })({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = (0, operation_js_1.inferLroMode)({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig + }); + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); + }, + poll: lro.sendPollRequest + }, { + intervalInMs, + withOperationLocation, + restoreFrom, + updateState, + processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse + }); + } + exports2.createHttpPoller = createHttpPoller; + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js +var require_operation3 = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.GenericPollOperation = void 0; + var operation_js_1 = require_operation2(); + var logger_js_1 = require_logger2(); + var createStateProxy = () => ({ + initState: (config) => ({ config, isStarted: true }), + setCanceled: (state) => state.isCancelled = true, + setError: (state, error2) => state.error = error2, + setResult: (state, result2) => state.result = result2, + setRunning: (state) => state.isStarted = true, + setSucceeded: (state) => state.isCompleted = true, + setFailed: () => { + }, + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => !!state.isCancelled, + isFailed: (state) => !!state.error, + isRunning: (state) => !!state.isStarted, + isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error) + }); + var GenericPollOperation = class { + constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { + this.state = state; + this.lro = lro; + this.setErrorAsResult = setErrorAsResult; + this.lroResourceLocationConfig = lroResourceLocationConfig; + this.processResult = processResult; + this.updateState = updateState; + this.isDone = isDone; } - /** - * Returns the value that was specified - * for the 'x-ms-content-disposition' header and specifies how to process the - * response. - * - * @readonly - */ - get contentDisposition() { - return this.originalResponse.contentDisposition; + setPollerConfig(pollerConfig) { + this.pollerConfig = pollerConfig; } - /** - * Returns the value that was specified - * for the Content-Encoding request header. - * - * @readonly - */ - get contentEncoding() { - return this.originalResponse.contentEncoding; + async update(options) { + var _a4; + const stateProxy = createStateProxy(); + if (!this.state.isStarted) { + this.state = Object.assign(Object.assign({}, this.state), await (0, operation_js_1.initHttpOperation)({ + lro: this.lro, + stateProxy, + resourceLocationConfig: this.lroResourceLocationConfig, + processResult: this.processResult, + setErrorAsResult: this.setErrorAsResult + })); + } + const updateState = this.updateState; + const isDone = this.isDone; + if (!this.state.isCompleted && this.state.error === void 0) { + await (0, operation_js_1.pollHttpOperation)({ + lro: this.lro, + state: this.state, + stateProxy, + processResult: this.processResult, + updateState: updateState ? (state, { rawResponse }) => updateState(state, rawResponse) : void 0, + isDone: isDone ? ({ flatResponse }, state) => isDone(flatResponse, state) : void 0, + options, + setDelay: (intervalInMs) => { + this.pollerConfig.intervalInMs = intervalInMs; + }, + setErrorAsResult: this.setErrorAsResult + }); + } + (_a4 = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a4 === void 0 ? void 0 : _a4.call(options, this.state); + return this; } - /** - * Returns the value that was specified - * for the Content-Language request header. - * - * @readonly - */ - get contentLanguage() { - return this.originalResponse.contentLanguage; + async cancel() { + logger_js_1.logger.error("`cancelOperation` is deprecated because it wasn't implemented"); + return this; } /** - * The current sequence number for a - * page blob. This header is not returned for block blobs or append blobs. - * - * @readonly + * Serializes the Poller operation. */ - get blobSequenceNumber() { - return this.originalResponse.blobSequenceNumber; + toString() { + return JSON.stringify({ + state: this.state + }); } - /** - * The blob's type. Possible values include: - * 'BlockBlob', 'PageBlob', 'AppendBlob'. - * - * @readonly - */ - get blobType() { - return this.originalResponse.blobType; + }; + exports2.GenericPollOperation = GenericPollOperation; + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js +var require_poller3 = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Poller = exports2.PollerCancelledError = exports2.PollerStoppedError = void 0; + var PollerStoppedError = class _PollerStoppedError extends Error { + constructor(message) { + super(message); + this.name = "PollerStoppedError"; + Object.setPrototypeOf(this, _PollerStoppedError.prototype); } - /** - * The number of bytes present in the - * response body. - * - * @readonly - */ - get contentLength() { - return this.originalResponse.contentLength; + }; + exports2.PollerStoppedError = PollerStoppedError; + var PollerCancelledError = class _PollerCancelledError extends Error { + constructor(message) { + super(message); + this.name = "PollerCancelledError"; + Object.setPrototypeOf(this, _PollerCancelledError.prototype); } + }; + exports2.PollerCancelledError = PollerCancelledError; + var Poller = class { /** - * If the file has an MD5 hash and the - * request is to read the full file, this response header is returned so that - * the client can check for message content integrity. If the request is to - * read a specified range and the 'x-ms-range-get-content-md5' is set to - * true, then the request returns an MD5 hash for the range, as long as the - * range size is less than or equal to 4 MB. If neither of these sets of - * conditions is true, then no value is returned for the 'Content-MD5' - * header. + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. * - * @readonly - */ - get contentMD5() { - return this.originalResponse.contentMD5; - } - /** - * Indicates the range of bytes returned if - * the client requested a subset of the file by setting the Range request - * header. + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. * - * @readonly - */ - get contentRange() { - return this.originalResponse.contentRange; - } - /** - * The content type specified for the file. - * The default content type is 'application/octet-stream' + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; * - * @readonly - */ - get contentType() { - return this.originalResponse.contentType; - } - /** - * Conclusion time of the last attempted - * Copy File operation where this file was the destination file. This value - * can specify the time of a completed, aborted, or failed copy attempt. + * const operation = { + * state, + * update, + * cancel, + * toString + * } * - * @readonly - */ - get copyCompletedOn() { - return void 0; - } - /** - * String identifier for the last attempted Copy - * File operation where this file was the destination file. + * // Sending the operation to the parent's constructor. + * super(operation); * - * @readonly - */ - get copyId() { - return this.originalResponse.copyId; - } - /** - * Contains the number of bytes copied and - * the total bytes in the source in the last attempted Copy File operation - * where this file was the destination file. Can show between 0 and - * Content-Length bytes copied. + * // You can assign more local properties here. + * } + * } + * ``` * - * @readonly - */ - get copyProgress() { - return this.originalResponse.copyProgress; - } - /** - * URL up to 2KB in length that specifies the - * source file used in the last attempted Copy File operation where this file - * was the destination file. + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. * - * @readonly - */ - get copySource() { - return this.originalResponse.copySource; - } - /** - * State of the copy operation - * identified by 'x-ms-copy-id'. Possible values include: 'pending', - * 'success', 'aborted', 'failed' + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: * - * @readonly - */ - get copyStatus() { - return this.originalResponse.copyStatus; - } - /** - * Only appears when - * x-ms-copy-status is failed or pending. Describes cause of fatal or - * non-fatal copy operation failure. + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` * - * @readonly + * @param operation - Must contain the basic properties of `PollOperation`. */ - get copyStatusDescription() { - return this.originalResponse.copyStatusDescription; + constructor(operation) { + this.resolveOnUnsuccessful = false; + this.stopped = true; + this.pollProgressCallbacks = []; + this.operation = operation; + this.promise = new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; + }); + this.promise.catch(() => { + }); } /** - * When a blob is leased, - * specifies whether the lease is of infinite or fixed duration. Possible - * values include: 'infinite', 'fixed'. - * - * @readonly + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. */ - get leaseDuration() { - return this.originalResponse.leaseDuration; + async startPolling(pollOptions = {}) { + if (this.stopped) { + this.stopped = false; + } + while (!this.isStopped() && !this.isDone()) { + await this.poll(pollOptions); + await this.delay(); + } } /** - * Lease state of the blob. Possible - * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. * - * @readonly - */ - get leaseState() { - return this.originalResponse.leaseState; - } - /** - * The current lease status of the - * blob. Possible values include: 'locked', 'unlocked'. + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. * - * @readonly + * @param options - Optional properties passed to the operation's update method. */ - get leaseStatus() { - return this.originalResponse.leaseStatus; + async pollOnce(options = {}) { + if (!this.isDone()) { + this.operation = await this.operation.update({ + abortSignal: options.abortSignal, + fireProgress: this.fireProgress.bind(this) + }); + } + this.processUpdatedState(); } /** - * A UTC date/time value generated by the service that - * indicates the time at which the response was initiated. + * fireProgress calls the functions passed in via onProgress the method of the poller. * - * @readonly - */ - get date() { - return this.originalResponse.date; - } - /** - * The number of committed blocks - * present in the blob. This header is returned only for append blobs. + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. * - * @readonly + * @param state - The current operation state. */ - get blobCommittedBlockCount() { - return this.originalResponse.blobCommittedBlockCount; + fireProgress(state) { + for (const callback of this.pollProgressCallbacks) { + callback(state); + } } /** - * The ETag contains a value that you can use to - * perform operations conditionally, in quotes. - * - * @readonly + * Invokes the underlying operation's cancel method. */ - get etag() { - return this.originalResponse.etag; + async cancelOnce(options = {}) { + this.operation = await this.operation.cancel(options); } /** - * The error code. + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. * - * @readonly - */ - get errorCode() { - return this.originalResponse.errorCode; - } - /** - * The value of this header is set to - * true if the file data and application metadata are completely encrypted - * using the specified algorithm. Otherwise, the value is set to false (when - * the file is unencrypted, or if only parts of the file/application metadata - * are encrypted). + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. * - * @readonly + * @param options - Optional properties passed to the operation's update method. */ - get isServerEncrypted() { - return this.originalResponse.isServerEncrypted; + poll(options = {}) { + if (!this.pollOncePromise) { + this.pollOncePromise = this.pollOnce(options); + const clearPollOncePromise = () => { + this.pollOncePromise = void 0; + }; + this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); + } + return this.pollOncePromise; } - /** - * If the blob has a MD5 hash, and if - * request contains range header (Range or x-ms-range), this response header - * is returned with the value of the whole blob's MD5 value. This value may - * or may not be equal to the value returned in Content-MD5 header, with the - * latter calculated from the requested range. - * - * @readonly - */ - get blobContentMD5() { - return this.originalResponse.blobContentMD5; + processUpdatedState() { + if (this.operation.state.error) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + this.reject(this.operation.state.error); + throw this.operation.state.error; + } + } + if (this.operation.state.isCancelled) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + const error2 = new PollerCancelledError("Operation was canceled"); + this.reject(error2); + throw error2; + } + } + if (this.isDone() && this.resolve) { + this.resolve(this.getResult()); + } } /** - * Returns the date and time the file was last - * modified. Any operation that modifies the file or its properties updates - * the last modified time. - * - * @readonly + * Returns a promise that will resolve once the underlying operation is completed. */ - get lastModified() { - return this.originalResponse.lastModified; + async pollUntilDone(pollOptions = {}) { + if (this.stopped) { + this.startPolling(pollOptions).catch(this.reject); + } + this.processUpdatedState(); + return this.promise; } /** - * A name-value pair - * to associate with a file storage object. + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. * - * @readonly + * It returns a method that can be used to stop receiving updates on the given callback function. */ - get metadata() { - return this.originalResponse.metadata; + onProgress(callback) { + this.pollProgressCallbacks.push(callback); + return () => { + this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); + }; } /** - * This header uniquely identifies the request - * that was made and can be used for troubleshooting the request. - * - * @readonly + * Returns true if the poller has finished polling. */ - get requestId() { - return this.originalResponse.requestId; + isDone() { + const state = this.operation.state; + return Boolean(state.isCompleted || state.isCancelled || state.error); } /** - * If a client request id header is sent in the request, this header will be present in the - * response with the same value. - * - * @readonly + * Stops the poller from continuing to poll. */ - get clientRequestId() { - return this.originalResponse.clientRequestId; + stopPolling() { + if (!this.stopped) { + this.stopped = true; + if (this.reject) { + this.reject(new PollerStoppedError("This poller is already stopped")); + } + } } /** - * Indicates the version of the File service used - * to execute the request. - * - * @readonly + * Returns true if the poller is stopped. */ - get version() { - return this.originalResponse.version; + isStopped() { + return this.stopped; } /** - * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned - * when the blob was encrypted with a customer-provided key. + * Attempts to cancel the underlying operation. * - * @readonly - */ - get encryptionKeySha256() { - return this.originalResponse.encryptionKeySha256; - } - /** - * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to - * true, then the request returns a crc64 for the range, as long as the range size is less than - * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is - * specified in the same request, it will fail with 400(Bad Request) - */ - get contentCrc64() { - return this.originalResponse.contentCrc64; - } - /** - * The response body as a browser Blob. - * Always undefined in node.js. + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. * - * @readonly + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. */ - get blobBody() { - return void 0; + cancelOperation(options = {}) { + if (!this.cancelPromise) { + this.cancelPromise = this.cancelOnce(options); + } else if (options.abortSignal) { + throw new Error("A cancel request is currently pending"); + } + return this.cancelPromise; } /** - * The response body as a node.js Readable stream. - * Always undefined in the browser. + * Returns the state of the operation. * - * It will parse avor data returned by blob query. + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. * - * @readonly + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. */ - get readableStreamBody() { - return coreUtil.isNode ? this.blobDownloadStream : void 0; + getOperationState() { + return this.operation.state; } /** - * The HTTP response. + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. */ - get _response() { - return this.originalResponse._response; + getResult() { + const state = this.operation.state; + return state.result; } /** - * Creates an instance of BlobQueryResponse. - * - * @param originalResponse - - * @param options - + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. */ - constructor(originalResponse, options = {}) { - this.originalResponse = originalResponse; - this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); - } - }; - exports2.BlockBlobTier = void 0; - (function(BlockBlobTier) { - BlockBlobTier["Hot"] = "Hot"; - BlockBlobTier["Cool"] = "Cool"; - BlockBlobTier["Cold"] = "Cold"; - BlockBlobTier["Archive"] = "Archive"; - })(exports2.BlockBlobTier || (exports2.BlockBlobTier = {})); - exports2.PremiumPageBlobTier = void 0; - (function(PremiumPageBlobTier) { - PremiumPageBlobTier["P4"] = "P4"; - PremiumPageBlobTier["P6"] = "P6"; - PremiumPageBlobTier["P10"] = "P10"; - PremiumPageBlobTier["P15"] = "P15"; - PremiumPageBlobTier["P20"] = "P20"; - PremiumPageBlobTier["P30"] = "P30"; - PremiumPageBlobTier["P40"] = "P40"; - PremiumPageBlobTier["P50"] = "P50"; - PremiumPageBlobTier["P60"] = "P60"; - PremiumPageBlobTier["P70"] = "P70"; - PremiumPageBlobTier["P80"] = "P80"; - })(exports2.PremiumPageBlobTier || (exports2.PremiumPageBlobTier = {})); - function toAccessTier(tier2) { - if (tier2 === void 0) { - return void 0; + toString() { + return this.operation.toString(); } - return tier2; - } - function ensureCpkIfSpecified(cpk, isHttps) { - if (cpk && !isHttps) { - throw new RangeError("Customer-provided encryption key must be used over HTTPS."); + }; + exports2.Poller = Poller; + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js +var require_lroEngine = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.LroEngine = void 0; + var operation_js_1 = require_operation3(); + var constants_js_1 = require_constants11(); + var poller_js_1 = require_poller3(); + var operation_js_2 = require_operation(); + var LroEngine = class extends poller_js_1.Poller { + constructor(lro, options) { + const { intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState } = options || {}; + const state = resumeFrom ? (0, operation_js_2.deserializeState)(resumeFrom) : {}; + const operation = new operation_js_1.GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); + super(operation); + this.resolveOnUnsuccessful = resolveOnUnsuccessful; + this.config = { intervalInMs }; + operation.setPollerConfig(this.config); } - if (cpk && !cpk.encryptionAlgorithm) { - cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay() { + return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); } - } - exports2.StorageBlobAudience = void 0; - (function(StorageBlobAudience) { - StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; - StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; - })(exports2.StorageBlobAudience || (exports2.StorageBlobAudience = {})); - function getBlobServiceAccountAudience(storageAccountName) { - return `https://${storageAccountName}.blob.core.windows.net/.default`; - } - function rangeResponseFromModel(response) { - const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ - offset: x.start, - count: x.end - x.start - })); - const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ - offset: x.start, - count: x.end - x.start - })); - return Object.assign(Object.assign({}, response), { - pageRange, - clearRange, - _response: Object.assign(Object.assign({}, response._response), { parsedBody: { - pageRange, - clearRange - } }) - }); - } - var BlobBeginCopyFromUrlPoller = class extends coreLro.Poller { + }; + exports2.LroEngine = LroEngine; + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js +var require_lroEngine2 = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.LroEngine = void 0; + var lroEngine_js_1 = require_lroEngine(); + Object.defineProperty(exports2, "LroEngine", { enumerable: true, get: function() { + return lroEngine_js_1.LroEngine; + } }); + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js +var require_pollOperation = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/index.js +var require_commonjs12 = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createHttpPoller = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var poller_js_1 = require_poller2(); + Object.defineProperty(exports2, "createHttpPoller", { enumerable: true, get: function() { + return poller_js_1.createHttpPoller; + } }); + tslib_1.__exportStar(require_lroEngine2(), exports2); + tslib_1.__exportStar(require_poller3(), exports2); + tslib_1.__exportStar(require_pollOperation(), exports2); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/pollers/BlobStartCopyFromUrlPoller.js +var require_BlobStartCopyFromUrlPoller = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/pollers/BlobStartCopyFromUrlPoller.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobBeginCopyFromUrlPoller = void 0; + var core_util_1 = require_commonjs4(); + var core_lro_1 = require_commonjs12(); + var BlobBeginCopyFromUrlPoller = class extends core_lro_1.Poller { + intervalInMs; constructor(options) { - const { blobClient, copySource: copySource2, intervalInMs = 15e3, onProgress, resumeFrom, startCopyFromURLOptions } = options; + const { blobClient, copySource, intervalInMs = 15e3, onProgress, resumeFrom, startCopyFromURLOptions } = options; let state; if (resumeFrom) { state = JSON.parse(resumeFrom).state; } - const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { + const operation = makeBlobBeginCopyFromURLPollOperation({ + ...state, blobClient, - copySource: copySource2, + copySource, startCopyFromURLOptions - })); + }); super(operation); if (typeof onProgress === "function") { this.onProgress(onProgress); @@ -52251,20 +55681,21 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; this.intervalInMs = intervalInMs; } delay() { - return coreUtil.delay(this.intervalInMs); + return (0, core_util_1.delay)(this.intervalInMs); } }; + exports2.BlobBeginCopyFromUrlPoller = BlobBeginCopyFromUrlPoller; var cancel = async function cancel2(options = {}) { const state = this.state; - const { copyId: copyId2 } = state; + const { copyId } = state; if (state.isCompleted) { return makeBlobBeginCopyFromURLPollOperation(state); } - if (!copyId2) { + if (!copyId) { state.isCancelled = true; return makeBlobBeginCopyFromURLPollOperation(state); } - await state.blobClient.abortCopyFromURL(copyId2, { + await state.blobClient.abortCopyFromURL(copyId, { abortSignal: options.abortSignal }); state.isCancelled = true; @@ -52272,10 +55703,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; }; var update3 = async function update4(options = {}) { const state = this.state; - const { blobClient, copySource: copySource2, startCopyFromURLOptions } = state; + const { blobClient, copySource, startCopyFromURLOptions } = state; if (!state.isStarted) { state.isStarted = true; - const result2 = await blobClient.startCopyFromURL(copySource2, startCopyFromURLOptions); + const result2 = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); state.copyId = result2.copyId; if (result2.copyStatus === "success") { state.result = result2; @@ -52315,12 +55746,21 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; }; function makeBlobBeginCopyFromURLPollOperation(state) { return { - state: Object.assign({}, state), + state: { ...state }, cancel, toString: toString2, update: update3 }; } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/Range.js +var require_Range = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/Range.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.rangeToString = rangeToString; function rangeToString(iRange) { if (iRange.offset < 0) { throw new RangeError(`Range.offset cannot be smaller than 0.`); @@ -52330,27 +55770,61 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } return iRange.count ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` : `bytes=${iRange.offset}-`; } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/Batch.js +var require_Batch = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/Batch.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Batch = void 0; + var events_1 = require("events"); var BatchStates; (function(BatchStates2) { BatchStates2[BatchStates2["Good"] = 0] = "Good"; BatchStates2[BatchStates2["Error"] = 1] = "Error"; })(BatchStates || (BatchStates = {})); var Batch = class { + /** + * Concurrency. Must be lager than 0. + */ + concurrency; + /** + * Number of active operations under execution. + */ + actives = 0; + /** + * Number of completed operations under execution. + */ + completed = 0; + /** + * Offset of next operation to be executed. + */ + offset = 0; + /** + * Operation array to be executed. + */ + operations = []; + /** + * States of Batch. When an error happens, state will turn into error. + * Batch will stop execute left operations. + */ + state = BatchStates.Good; + /** + * A private emitter used to pass events inside this class. + */ + emitter; /** * Creates an instance of Batch. * @param concurrency - */ constructor(concurrency = 5) { - this.actives = 0; - this.completed = 0; - this.offset = 0; - this.operations = []; - this.state = BatchStates.Good; if (concurrency < 1) { throw new RangeError("concurrency must be larger than 0"); } this.concurrency = concurrency; - this.emitter = new events.EventEmitter(); + this.emitter = new events_1.EventEmitter(); } /** * Add a operation into queue. @@ -52420,333 +55894,36 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BuffersStream = class extends stream2.Readable { - /** - * Creates an instance of BuffersStream that will emit the data - * contained in the array of buffers. - * - * @param buffers - Array of buffers containing the data - * @param byteLength - The total length of data contained in the buffers - */ - constructor(buffers, byteLength, options) { - super(options); - this.buffers = buffers; - this.byteLength = byteLength; - this.byteOffsetInCurrentBuffer = 0; - this.bufferIndex = 0; - this.pushedBytesLength = 0; - let buffersLength = 0; - for (const buf of this.buffers) { - buffersLength += buf.byteLength; - } - if (buffersLength < this.byteLength) { - throw new Error("Data size shouldn't be larger than the total length of buffers."); - } - } - /** - * Internal _read() that will be called when the stream wants to pull more data in. - * - * @param size - Optional. The size of data to be read - */ - _read(size) { - if (this.pushedBytesLength >= this.byteLength) { - this.push(null); - } - if (!size) { - size = this.readableHighWaterMark; - } - const outBuffers = []; - let i = 0; - while (i < size && this.pushedBytesLength < this.byteLength) { - const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; - const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; - const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); - if (remaining > size - i) { - const end2 = this.byteOffsetInCurrentBuffer + size - i; - outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end2)); - this.pushedBytesLength += size - i; - this.byteOffsetInCurrentBuffer = end2; - i = size; - break; - } else { - const end2 = this.byteOffsetInCurrentBuffer + remaining; - outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end2)); - if (remaining === remainingCapacityInThisBuffer) { - this.byteOffsetInCurrentBuffer = 0; - this.bufferIndex++; - } else { - this.byteOffsetInCurrentBuffer = end2; - } - this.pushedBytesLength += remaining; - i += remaining; - } - } - if (outBuffers.length > 1) { - this.push(Buffer.concat(outBuffers)); - } else if (outBuffers.length === 1) { - this.push(outBuffers[0]); - } - } - }; - var maxBufferLength = buffer.constants.MAX_LENGTH; - var PooledBuffer = class { - /** - * The size of the data contained in the pooled buffers. - */ - get size() { - return this._size; - } - constructor(capacity, buffers, totalLength) { - this.buffers = []; - this.capacity = capacity; - this._size = 0; - const bufferNum = Math.ceil(capacity / maxBufferLength); - for (let i = 0; i < bufferNum; i++) { - let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; - if (len === 0) { - len = maxBufferLength; - } - this.buffers.push(Buffer.allocUnsafe(len)); - } - if (buffers) { - this.fill(buffers, totalLength); - } - } - /** - * Fill the internal buffers with data in the input buffers serially - * with respect to the total length and the total capacity of the internal buffers. - * Data copied will be shift out of the input buffers. - * - * @param buffers - Input buffers containing the data to be filled in the pooled buffer - * @param totalLength - Total length of the data to be filled in. - * - */ - fill(buffers, totalLength) { - this._size = Math.min(this.capacity, totalLength); - let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; - while (totalCopiedNum < this._size) { - const source = buffers[i]; - const target = this.buffers[j]; - const copiedNum = source.copy(target, targetOffset, sourceOffset); - totalCopiedNum += copiedNum; - sourceOffset += copiedNum; - targetOffset += copiedNum; - if (sourceOffset === source.length) { - i++; - sourceOffset = 0; - } - if (targetOffset === target.length) { - j++; - targetOffset = 0; - } - } - buffers.splice(0, i); - if (buffers.length > 0) { - buffers[0] = buffers[0].slice(sourceOffset); - } - } - /** - * Get the readable stream assembled from all the data in the internal buffers. - * - */ - getReadableStream() { - return new BuffersStream(this.buffers, this.size); - } - }; - var BufferScheduler = class { - /** - * Creates an instance of BufferScheduler. - * - * @param readable - A Node.js Readable stream - * @param bufferSize - Buffer size of every maintained buffer - * @param maxBuffers - How many buffers can be allocated - * @param outgoingHandler - An async function scheduled to be - * triggered when a buffer fully filled - * with stream data - * @param concurrency - Concurrency of executing outgoingHandlers (>0) - * @param encoding - [Optional] Encoding of Readable stream when it's a string stream - */ - constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { - this.emitter = new events.EventEmitter(); - this.offset = 0; - this.isStreamEnd = false; - this.isError = false; - this.executingOutgoingHandlers = 0; - this.numBuffers = 0; - this.unresolvedDataArray = []; - this.unresolvedLength = 0; - this.incoming = []; - this.outgoing = []; - if (bufferSize <= 0) { - throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); - } - if (maxBuffers <= 0) { - throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); - } - if (concurrency <= 0) { - throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); - } - this.bufferSize = bufferSize; - this.maxBuffers = maxBuffers; - this.readable = readable; - this.outgoingHandler = outgoingHandler; - this.concurrency = concurrency; - this.encoding = encoding; - } - /** - * Start the scheduler, will return error when stream of any of the outgoingHandlers - * returns error. - * - */ - async do() { - return new Promise((resolve, reject) => { - this.readable.on("data", (data2) => { - data2 = typeof data2 === "string" ? Buffer.from(data2, this.encoding) : data2; - this.appendUnresolvedData(data2); - if (!this.resolveData()) { - this.readable.pause(); - } - }); - this.readable.on("error", (err) => { - this.emitter.emit("error", err); - }); - this.readable.on("end", () => { - this.isStreamEnd = true; - this.emitter.emit("checkEnd"); - }); - this.emitter.on("error", (err) => { - this.isError = true; - this.readable.pause(); - reject(err); - }); - this.emitter.on("checkEnd", () => { - if (this.outgoing.length > 0) { - this.triggerOutgoingHandlers(); - return; - } - if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { - if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { - const buffer2 = this.shiftBufferFromUnresolvedDataArray(); - this.outgoingHandler(() => buffer2.getReadableStream(), buffer2.size, this.offset).then(resolve).catch(reject); - } else if (this.unresolvedLength >= this.bufferSize) { - return; - } else { - resolve(); - } - } - }); - }); - } - /** - * Insert a new data into unresolved array. - * - * @param data - - */ - appendUnresolvedData(data2) { - this.unresolvedDataArray.push(data2); - this.unresolvedLength += data2.length; - } - /** - * Try to shift a buffer with size in blockSize. The buffer returned may be less - * than blockSize when data in unresolvedDataArray is less than bufferSize. - * - */ - shiftBufferFromUnresolvedDataArray(buffer2) { - if (!buffer2) { - buffer2 = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); - } else { - buffer2.fill(this.unresolvedDataArray, this.unresolvedLength); - } - this.unresolvedLength -= buffer2.size; - return buffer2; - } - /** - * Resolve data in unresolvedDataArray. For every buffer with size in blockSize - * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, - * then push it into outgoing to be handled by outgoing handler. - * - * Return false when available buffers in incoming are not enough, else true. - * - * @returns Return false when buffers in incoming are not enough, else true. - */ - resolveData() { - while (this.unresolvedLength >= this.bufferSize) { - let buffer2; - if (this.incoming.length > 0) { - buffer2 = this.incoming.shift(); - this.shiftBufferFromUnresolvedDataArray(buffer2); - } else { - if (this.numBuffers < this.maxBuffers) { - buffer2 = this.shiftBufferFromUnresolvedDataArray(); - this.numBuffers++; - } else { - return false; - } - } - this.outgoing.push(buffer2); - this.triggerOutgoingHandlers(); - } - return true; - } - /** - * Try to trigger a outgoing handler for every buffer in outgoing. Stop when - * concurrency reaches. - */ - async triggerOutgoingHandlers() { - let buffer2; - do { - if (this.executingOutgoingHandlers >= this.concurrency) { - return; - } - buffer2 = this.outgoing.shift(); - if (buffer2) { - this.triggerOutgoingHandler(buffer2); - } - } while (buffer2); - } - /** - * Trigger a outgoing handler for a buffer shifted from outgoing. - * - * @param buffer - - */ - async triggerOutgoingHandler(buffer2) { - const bufferLength = buffer2.size; - this.executingOutgoingHandlers++; - this.offset += bufferLength; - try { - await this.outgoingHandler(() => buffer2.getReadableStream(), bufferLength, this.offset - bufferLength); - } catch (err) { - this.emitter.emit("error", err); - return; - } - this.executingOutgoingHandlers--; - this.reuseBuffer(buffer2); - this.emitter.emit("checkEnd"); - } - /** - * Return buffer used by outgoing handler into incoming. - * - * @param buffer - - */ - reuseBuffer(buffer2) { - this.incoming.push(buffer2); - if (!this.isError && this.resolveData() && !this.isStreamEnd) { - this.readable.resume(); - } - } - }; - async function streamToBuffer(stream3, buffer2, offset, end2, encoding) { + exports2.Batch = Batch; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/utils.js +var require_utils4 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/utils.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.fsCreateReadStream = exports2.fsStat = void 0; + exports2.streamToBuffer = streamToBuffer; + exports2.streamToBuffer2 = streamToBuffer2; + exports2.streamToBuffer3 = streamToBuffer3; + exports2.readStreamToLocalFile = readStreamToLocalFile; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var node_fs_1 = tslib_1.__importDefault(require("node:fs")); + var node_util_1 = tslib_1.__importDefault(require("node:util")); + var constants_js_1 = require_constants9(); + async function streamToBuffer(stream, buffer, offset, end2, encoding) { let pos = 0; const count = end2 - offset; return new Promise((resolve, reject) => { - const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), REQUEST_TIMEOUT); - stream3.on("readable", () => { + const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), constants_js_1.REQUEST_TIMEOUT); + stream.on("readable", () => { if (pos >= count) { clearTimeout(timeout); resolve(); return; } - let chunk = stream3.read(); + let chunk = stream.read(); if (!chunk) { return; } @@ -52754,28 +55931,28 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; chunk = Buffer.from(chunk, encoding); } const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; - buffer2.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); pos += chunkLength; }); - stream3.on("end", () => { + stream.on("end", () => { clearTimeout(timeout); if (pos < count) { reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); } resolve(); }); - stream3.on("error", (msg) => { + stream.on("error", (msg) => { clearTimeout(timeout); reject(msg); }); }); } - async function streamToBuffer2(stream3, buffer2, encoding) { + async function streamToBuffer2(stream, buffer, encoding) { let pos = 0; - const bufferSize = buffer2.length; + const bufferSize = buffer.length; return new Promise((resolve, reject) => { - stream3.on("readable", () => { - let chunk = stream3.read(); + stream.on("readable", () => { + let chunk = stream.read(); if (!chunk) { return; } @@ -52786,18 +55963,30 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); return; } - buffer2.fill(chunk, pos, pos + chunk.length); + buffer.fill(chunk, pos, pos + chunk.length); pos += chunk.length; }); - stream3.on("end", () => { + stream.on("end", () => { resolve(pos); }); - stream3.on("error", reject); + stream.on("error", reject); + }); + } + async function streamToBuffer3(readableStream, encoding) { + return new Promise((resolve, reject) => { + const chunks = []; + readableStream.on("data", (data2) => { + chunks.push(typeof data2 === "string" ? Buffer.from(data2, encoding) : data2); + }); + readableStream.on("end", () => { + resolve(Buffer.concat(chunks)); + }); + readableStream.on("error", reject); }); } async function readStreamToLocalFile(rs, file) { return new Promise((resolve, reject) => { - const ws = fs__namespace.createWriteStream(file); + const ws = node_fs_1.default.createWriteStream(file); rs.on("error", (err) => { reject(err); }); @@ -52808,9 +55997,48 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; rs.pipe(ws); }); } - var fsStat = util__namespace.promisify(fs__namespace.stat); - var fsCreateReadStream = fs__namespace.createReadStream; - var BlobClient = class _BlobClient extends StorageClient { + exports2.fsStat = node_util_1.default.promisify(node_fs_1.default.stat); + exports2.fsCreateReadStream = node_fs_1.default.createReadStream; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/Clients.js +var require_Clients = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/Clients.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.PageBlobClient = exports2.BlockBlobClient = exports2.AppendBlobClient = exports2.BlobClient = void 0; + var core_rest_pipeline_1 = require_commonjs6(); + var core_auth_1 = require_commonjs7(); + var core_util_1 = require_commonjs4(); + var core_util_2 = require_commonjs4(); + var BlobDownloadResponse_js_1 = require_BlobDownloadResponse(); + var BlobQueryResponse_js_1 = require_BlobQueryResponse(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var models_js_1 = require_models2(); + var PageBlobRangeResponse_js_1 = require_PageBlobRangeResponse(); + var Pipeline_js_1 = require_Pipeline(); + var BlobStartCopyFromUrlPoller_js_1 = require_BlobStartCopyFromUrlPoller(); + var Range_js_1 = require_Range(); + var StorageClient_js_1 = require_StorageClient(); + var Batch_js_1 = require_Batch(); + var storage_common_1 = require_commonjs11(); + var constants_js_1 = require_constants9(); + var tracing_js_1 = require_tracing(); + var utils_common_js_1 = require_utils_common(); + var utils_js_1 = require_utils4(); + var BlobSASSignatureValues_js_1 = require_BlobSASSignatureValues(); + var BlobLeaseClient_js_1 = require_BlobLeaseClient(); + var BlobClient = class _BlobClient extends StorageClient_js_1.StorageClient { + /** + * blobContext provided by protocol layer. + */ + blobContext; + _name; + _containerName; + _versionId; + _snapshot; /** * The name of the blob. */ @@ -52826,49 +56054,49 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { options = options || {}; let pipeline; - let url2; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + let url; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; - } else if (coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; + url = urlOrConnectionString; if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { options = blobNameOrOptions; } - pipeline = newPipeline(new AnonymousCredential(), options); + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { const containerName = credentialOrPipelineOrContainerName; const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreUtil.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { - options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); } - pipeline = newPipeline(sharedKeyCredential, options); + pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - super(url2, pipeline); + super(url, pipeline); ({ blobName: this._name, containerName: this._containerName } = this.getBlobAndContainerNamesFromUrl()); this.blobContext = this.storageClientContext.blob; - this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); - this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); + this._snapshot = (0, utils_common_js_1.getURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT); + this._versionId = (0, utils_common_js_1.getURLParameter)(this.url, constants_js_1.URLConstants.Parameters.VERSIONID); } /** * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. @@ -52877,8 +56105,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param snapshot - The snapshot timestamp. * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp */ - withSnapshot(snapshot2) { - return new _BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); + withSnapshot(snapshot) { + return new _BlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); } /** * Creates a new BlobClient object pointing to a version of this blob. @@ -52887,8 +56115,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param versionId - The versionId. * @returns A new BlobClient object pointing to the version of this blob. */ - withVersion(versionId2) { - return new _BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId2.length === 0 ? void 0 : versionId2), this.pipeline); + withVersion(versionId) { + return new _BlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.VERSIONID, versionId.length === 0 ? void 0 : versionId), this.pipeline); } /** * Creates a AppendBlobClient object. @@ -52918,7 +56146,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * In Node.js, data returns in a Readable stream readableStreamBody * * In browsers, data returns in a promise blobBody * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-blob + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob * * @param offset - From which position of the blob to download, greater than or equal to 0 * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined @@ -52927,76 +56155,106 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage (Node.js): * - * ```js - * // Download and convert a blob to a string + * ```ts snippet:ReadmeSampleDownloadBlob_Node + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobClient = containerClient.getBlobClient(blobName); + * + * // Get blob content from position 0 to the end + * // In Node.js, get downloaded data by accessing downloadBlockBlobResponse.readableStreamBody * const downloadBlockBlobResponse = await blobClient.download(); - * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); - * console.log("Downloaded blob content:", downloaded.toString()); + * if (downloadBlockBlobResponse.readableStreamBody) { + * const downloaded = await streamToString(downloadBlockBlobResponse.readableStreamBody); + * console.log(`Downloaded blob content: ${downloaded}`); + * } * - * async function streamToBuffer(readableStream) { - * return new Promise((resolve, reject) => { - * const chunks = []; - * readableStream.on("data", (data) => { - * chunks.push(typeof data === "string" ? Buffer.from(data) : data); + * async function streamToString(stream: NodeJS.ReadableStream): Promise { + * const result = await new Promise>((resolve, reject) => { + * const chunks: Buffer[] = []; + * stream.on("data", (data) => { + * chunks.push(Buffer.isBuffer(data) ? data : Buffer.from(data)); * }); - * readableStream.on("end", () => { + * stream.on("end", () => { * resolve(Buffer.concat(chunks)); * }); - * readableStream.on("error", reject); + * stream.on("error", reject); * }); + * return result.toString(); * } * ``` * * Example usage (browser): * - * ```js - * // Download and convert a blob to a string - * const downloadBlockBlobResponse = await blobClient.download(); - * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); - * console.log( - * "Downloaded blob content", - * downloaded + * ```ts snippet:ReadmeSampleDownloadBlob_Browser + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), * ); * - * async function blobToString(blob: Blob): Promise { - * const fileReader = new FileReader(); - * return new Promise((resolve, reject) => { - * fileReader.onloadend = (ev: any) => { - * resolve(ev.target!.result); - * }; - * fileReader.onerror = reject; - * fileReader.readAsText(blob); - * }); + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobClient = containerClient.getBlobClient(blobName); + * + * // Get blob content from position 0 to the end + * // In browsers, get downloaded data by accessing downloadBlockBlobResponse.blobBody + * const downloadBlockBlobResponse = await blobClient.download(); + * const blobBody = await downloadBlockBlobResponse.blobBody; + * if (blobBody) { + * const downloaded = await blobBody.text(); + * console.log(`Downloaded blob content: ${downloaded}`); * } * ``` */ async download(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlobClient-download", options, async (updatedOptions) => { - var _a4; - const res = assertResponse(await this.blobContext.download({ + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-download", options, async (updatedOptions) => { + const res = (0, utils_common_js_1.assertResponse)(await this.blobContext.download({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, requestOptions: { - onDownloadProgress: coreUtil.isNode ? void 0 : options.onProgress + onDownloadProgress: core_util_1.isNodeLike ? void 0 : options.onProgress // for Node.js, progress is reported by RetriableReadableStream }, - range: offset === 0 && !count ? void 0 : rangeToString({ offset, count }), + range: offset === 0 && !count ? void 0 : (0, Range_js_1.rangeToString)({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions })); - const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); - if (!coreUtil.isNode) { + const wrappedRes = { + ...res, + _response: res._response, + // _response is made non-enumerable + objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, + objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(res.objectReplicationRules) + }; + if (!core_util_1.isNodeLike) { return wrappedRes; } if (options.maxRetryRequests === void 0 || options.maxRetryRequests < 0) { - options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + options.maxRetryRequests = constants_js_1.DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; } if (res.contentLength === void 0) { throw new RangeError(`File download response doesn't contain valid content length header`); @@ -53004,8 +56262,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!res.etag) { throw new RangeError(`File download response doesn't contain valid etag header`); } - return new BlobDownloadResponse(wrappedRes, async (start) => { - var _a5; + return new BlobDownloadResponse_js_1.BlobDownloadResponse(wrappedRes, async (start) => { const updatedDownloadOptions = { leaseAccessConditions: options.conditions, modifiedAccessConditions: { @@ -53013,9 +56270,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; ifModifiedSince: options.conditions.ifModifiedSince, ifNoneMatch: options.conditions.ifNoneMatch, ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, - ifTags: (_a5 = options.conditions) === null || _a5 === void 0 ? void 0 : _a5.tagConditions + ifTags: options.conditions?.tagConditions }, - range: rangeToString({ + range: (0, Range_js_1.rangeToString)({ count: offset + res.contentLength - start, offset: start }), @@ -53024,7 +56281,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }; - return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; + return (await this.blobContext.download({ + abortSignal: options.abortSignal, + ...updatedDownloadOptions + })).readableStreamBody; }, offset, res.contentLength, { maxRetryRequests: options.maxRetryRequests, onProgress: options.onProgress @@ -53041,9 +56301,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - options to Exists operation. */ async exists(options = {}) { - return tracingClient.withSpan("BlobClient-exists", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BlobClient-exists", options, async (updatedOptions) => { try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); await this.getProperties({ abortSignal: options.abortSignal, customerProvidedKey: options.customerProvidedKey, @@ -53054,7 +56314,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } catch (e) { if (e.statusCode === 404) { return false; - } else if (e.statusCode === 409 && (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { + } else if (e.statusCode === 409 && (e.details.errorCode === constants_js_1.BlobUsesCustomerSpecifiedEncryptionMsg || e.details.errorCode === constants_js_1.BlobDoesNotUseCustomerSpecifiedEncryption)) { return true; } throw e; @@ -53064,7 +56324,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; /** * Returns all user-defined metadata, standard HTTP properties, and system properties * for the blob. It does not return the content of the blob. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-properties * * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if * they originally contained uppercase characters. This differs from the metadata keys returned by @@ -53075,17 +56335,25 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async getProperties(options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlobClient-getProperties", options, async (updatedOptions) => { - var _a4; - const res = assertResponse(await this.blobContext.getProperties({ + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-getProperties", options, async (updatedOptions) => { + const res = (0, utils_common_js_1.assertResponse)(await this.blobContext.getProperties({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, cpkInfo: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions })); - return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + return { + ...res, + _response: res._response, + // _response is made non-enumerable + objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, + objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(res.objectReplicationRules) + }; }); } /** @@ -53093,19 +56361,21 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * during garbage collection. Note that in order to delete a blob, you must delete * all of its snapshots. You can delete both at the same time with the Delete * Blob operation. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/delete-blob + * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob * * @param options - Optional options to Blob Delete operation. */ async delete(options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("BlobClient-delete", options, async (updatedOptions) => { - var _a4; - return assertResponse(await this.blobContext.delete({ + return tracing_js_1.tracingClient.withSpan("BlobClient-delete", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.delete({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, tracingOptions: updatedOptions.tracingOptions })); }); @@ -53115,19 +56385,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * during garbage collection. Note that in order to delete a blob, you must delete * all of its snapshots. You can delete both at the same time with the Delete * Blob operation. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/delete-blob + * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob * * @param options - Optional options to Blob Delete operation. */ async deleteIfExists(options = {}) { - return tracingClient.withSpan("BlobClient-deleteIfExists", options, async (updatedOptions) => { - var _a4, _b; + return tracing_js_1.tracingClient.withSpan("BlobClient-deleteIfExists", options, async (updatedOptions) => { try { - const res = assertResponse(await this.delete(updatedOptions)); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + const res = (0, utils_common_js_1.assertResponse)(await this.delete(updatedOptions)); + return { + succeeded: true, + ...res, + _response: res._response + // _response is made non-enumerable + }; } catch (e) { - if (((_a4 = e.details) === null || _a4 === void 0 ? void 0 : _a4.errorCode) === "BlobNotFound") { - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + if (e.details?.errorCode === "BlobNotFound") { + return { + succeeded: false, + ...e.response?.parsedHeaders, + _response: e.response + }; } throw e; } @@ -53137,13 +56415,13 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Restores the contents and metadata of soft deleted blob and any associated * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 * or later. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * @see https://learn.microsoft.com/rest/api/storageservices/undelete-blob * * @param options - Optional options to Blob Undelete operation. */ async undelete(options = {}) { - return tracingClient.withSpan("BlobClient-undelete", options, async (updatedOptions) => { - return assertResponse(await this.blobContext.undelete({ + return tracing_js_1.tracingClient.withSpan("BlobClient-undelete", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.undelete({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); @@ -53154,7 +56432,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * If no value provided, or no value provided for the specified blob HTTP headers, * these blob HTTP headers without a value will be cleared. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties * * @param blobHTTPHeaders - If no value provided, or no value provided for * the specified blob HTTP headers, these blob HTTP @@ -53166,14 +56444,16 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async setHTTPHeaders(blobHTTPHeaders, options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlobClient-setHTTPHeaders", options, async (updatedOptions) => { - var _a4; - return assertResponse(await this.blobContext.setHttpHeaders({ + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-setHTTPHeaders", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setHttpHeaders({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger. tracingOptions: updatedOptions.tracingOptions })); @@ -53184,22 +56464,24 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * If no option provided, or no metadata defined in the parameter, the blob * metadata will be removed. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-metadata * * @param metadata - Replace existing metadata with this value. * If no value provided the existing metadata will be removed. * @param options - Optional options to Set Metadata operation. */ - async setMetadata(metadata2, options = {}) { + async setMetadata(metadata, options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlobClient-setMetadata", options, async (updatedOptions) => { - var _a4; - return assertResponse(await this.blobContext.setMetadata({ + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-setMetadata", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setMetadata({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - metadata: metadata2, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + metadata, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions @@ -53215,15 +56497,17 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param tags - * @param options - */ - async setTags(tags2, options = {}) { - return tracingClient.withSpan("BlobClient-setTags", options, async (updatedOptions) => { - var _a4; - return assertResponse(await this.blobContext.setTags({ + async setTags(tags, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-setTags", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setTags({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, tracingOptions: updatedOptions.tracingOptions, - tags: toBlobTags(tags2) + tags: (0, utils_common_js_1.toBlobTags)(tags) })); }); } @@ -53233,15 +56517,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - */ async getTags(options = {}) { - return tracingClient.withSpan("BlobClient-getTags", options, async (updatedOptions) => { - var _a4; - const response = assertResponse(await this.blobContext.getTags({ + return tracing_js_1.tracingClient.withSpan("BlobClient-getTags", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.blobContext.getTags({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, tracingOptions: updatedOptions.tracingOptions })); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); + const wrappedResponse = { + ...response, + _response: response._response, + // _response is made non-enumerable + tags: (0, utils_common_js_1.toTags)({ blobTagSet: response.blobTagSet }) || {} + }; return wrappedResponse; }); } @@ -53252,24 +56543,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @returns A new BlobLeaseClient object for managing leases on the blob. */ getBlobLeaseClient(proposeLeaseId) { - return new BlobLeaseClient(this, proposeLeaseId); + return new BlobLeaseClient_js_1.BlobLeaseClient(this, proposeLeaseId); } /** * Creates a read-only snapshot of a blob. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * @see https://learn.microsoft.com/rest/api/storageservices/snapshot-blob * * @param options - Optional options to the Blob Create Snapshot operation. */ async createSnapshot(options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlobClient-createSnapshot", options, async (updatedOptions) => { - var _a4; - return assertResponse(await this.blobContext.createSnapshot({ + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-createSnapshot", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.createSnapshot({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions @@ -53290,57 +56583,58 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * an Azure file in any Azure storage account. * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob * operation to copy from another storage account. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/copy-blob + * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob * - * Example using automatic polling: + * ```ts snippet:ClientsBeginCopyFromURL + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); - * const result = await copyPoller.pollUntilDone(); - * ``` + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); * - * Example using manual polling: + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobClient = containerClient.getBlobClient(blobName); * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); - * while (!poller.isDone()) { - * await poller.poll(); - * } - * const result = copyPoller.getResult(); - * ``` + * // Example using automatic polling + * const automaticCopyPoller = await blobClient.beginCopyFromURL("url"); + * const automaticResult = await automaticCopyPoller.pollUntilDone(); * - * Example using progress updates: + * // Example using manual polling + * const manualCopyPoller = await blobClient.beginCopyFromURL("url"); + * while (!manualCopyPoller.isDone()) { + * await manualCopyPoller.poll(); + * } + * const manualResult = manualCopyPoller.getResult(); * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url', { + * // Example using progress updates + * const progressUpdatesCopyPoller = await blobClient.beginCopyFromURL("url", { * onProgress(state) { * console.log(`Progress: ${state.copyProgress}`); - * } + * }, * }); - * const result = await copyPoller.pollUntilDone(); - * ``` + * const progressUpdatesResult = await progressUpdatesCopyPoller.pollUntilDone(); * - * Example using a changing polling interval (default 15 seconds): - * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url', { - * intervalInMs: 1000 // poll blob every 1 second for copy progress + * // Example using a changing polling interval (default 15 seconds) + * const pollingIntervalCopyPoller = await blobClient.beginCopyFromURL("url", { + * intervalInMs: 1000, // poll blob every 1 second for copy progress * }); - * const result = await copyPoller.pollUntilDone(); - * ``` + * const pollingIntervalResult = await pollingIntervalCopyPoller.pollUntilDone(); * - * Example using copy cancellation: - * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // Example using copy cancellation: + * const cancelCopyPoller = await blobClient.beginCopyFromURL("url"); * // cancel operation after starting it. * try { - * await copyPoller.cancelOperation(); + * await cancelCopyPoller.cancelOperation(); * // calls to get the result now throw PollerCancelledError - * await copyPoller.getResult(); - * } catch (err) { - * if (err.name === 'PollerCancelledError') { - * console.log('The copy was cancelled.'); + * cancelCopyPoller.getResult(); + * } catch (err: any) { + * if (err.name === "PollerCancelledError") { + * console.log("The copy was cancelled."); * } * } * ``` @@ -53348,15 +56642,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param copySource - url to the source Azure Blob/File. * @param options - Optional options to the Blob Start Copy From URL operation. */ - async beginCopyFromURL(copySource2, options = {}) { + async beginCopyFromURL(copySource, options = {}) { const client = { abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), getProperties: (...args) => this.getProperties(...args), startCopyFromURL: (...args) => this.startCopyFromURL(...args) }; - const poller = new BlobBeginCopyFromUrlPoller({ + const poller = new BlobStartCopyFromUrlPoller_js_1.BlobBeginCopyFromUrlPoller({ blobClient: client, - copySource: copySource2, + copySource, intervalInMs: options.intervalInMs, onProgress: options.onProgress, resumeFrom: options.resumeFrom, @@ -53368,14 +56662,14 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; /** * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero * length and full metadata. Version 2012-02-12 and newer. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * @see https://learn.microsoft.com/rest/api/storageservices/abort-copy-blob * * @param copyId - Id of the Copy From URL operation. * @param options - Optional options to the Blob Abort Copy From URL operation. */ - async abortCopyFromURL(copyId2, options = {}) { - return tracingClient.withSpan("BlobClient-abortCopyFromURL", options, async (updatedOptions) => { - return assertResponse(await this.blobContext.abortCopyFromURL(copyId2, { + async abortCopyFromURL(copyId, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-abortCopyFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.abortCopyFromURL(copyId, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions @@ -53385,36 +56679,39 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; /** * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not * return a response until the copy is complete. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob-from-url * * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication * @param options - */ - async syncCopyFromURL(copySource2, options = {}) { + async syncCopyFromURL(copySource, options = {}) { options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; - return tracingClient.withSpan("BlobClient-syncCopyFromURL", options, async (updatedOptions) => { - var _a4, _b, _c, _d, _e, _f, _g; - return assertResponse(await this.blobContext.copyFromURL(copySource2, { + return tracing_js_1.tracingClient.withSpan("BlobClient-syncCopyFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.copyFromURL(copySource, { abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, sourceModifiedAccessConditions: { - sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, - sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, - sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, - sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince + sourceIfMatch: options.sourceConditions?.ifMatch, + sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince }, sourceContentMD5: options.sourceContentMD5, - copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), - tier: toAccessTier(options.tier), - blobTagsString: toBlobTagsString(options.tags), - immutabilityPolicyExpiry: (_f = options.immutabilityPolicy) === null || _f === void 0 ? void 0 : _f.expiriesOn, - immutabilityPolicyMode: (_g = options.immutabilityPolicy) === null || _g === void 0 ? void 0 : _g.policyMode, + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags, + fileRequestIntent: options.sourceShareTokenIntent, tracingOptions: updatedOptions.tracingOptions })); }); @@ -53425,31 +56722,32 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * storage only). A premium page blob's tier determines the allowed size, IOPS, * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive * storage type. This operation does not update the blob's ETag. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-tier * * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. * @param options - Optional options to the Blob Set Tier operation. */ - async setAccessTier(tier2, options = {}) { - return tracingClient.withSpan("BlobClient-setAccessTier", options, async (updatedOptions) => { - var _a4; - return assertResponse(await this.blobContext.setTier(toAccessTier(tier2), { + async setAccessTier(tier, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-setAccessTier", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setTier((0, models_js_1.toAccessTier)(tier), { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, rehydratePriority: options.rehydratePriority, tracingOptions: updatedOptions.tracingOptions })); }); } async downloadToBuffer(param1, param2, param3, param4 = {}) { - var _a4; - let buffer2; + let buffer; let offset = 0; let count = 0; let options = param4; if (param1 instanceof Buffer) { - buffer2 = param1; + buffer = param1; offset = param2 || 0; count = typeof param3 === "number" ? param3 : 0; } else { @@ -53457,12 +56755,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; count = typeof param2 === "number" ? param2 : 0; options = param3 || {}; } - let blockSize = (_a4 = options.blockSize) !== null && _a4 !== void 0 ? _a4 : 0; + let blockSize = options.blockSize ?? 0; if (blockSize < 0) { throw new RangeError("blockSize option must be >= 0"); } if (blockSize === 0) { - blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + blockSize = constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; } if (offset < 0) { throw new RangeError("offset option must be >= 0"); @@ -53473,26 +56771,29 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!options.conditions) { options.conditions = {}; } - return tracingClient.withSpan("BlobClient-downloadToBuffer", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BlobClient-downloadToBuffer", options, async (updatedOptions) => { if (!count) { - const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + const response = await this.getProperties({ + ...options, + tracingOptions: updatedOptions.tracingOptions + }); count = response.contentLength - offset; if (count < 0) { throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); } } - if (!buffer2) { + if (!buffer) { try { - buffer2 = Buffer.alloc(count); + buffer = Buffer.alloc(count); } catch (error2) { throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile". ${error2.message}`); } } - if (buffer2.length < count) { + if (buffer.length < count) { throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); } let transferProgress = 0; - const batch = new Batch(options.concurrency); + const batch = new Batch_js_1.Batch(options.concurrency); for (let off = offset; off < offset + count; off = off + blockSize) { batch.addOperation(async () => { let chunkEnd = offset + count; @@ -53506,8 +56807,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; customerProvidedKey: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions }); - const stream3 = response.readableStreamBody; - await streamToBuffer(stream3, buffer2, off - offset, chunkEnd - offset); + const stream = response.readableStreamBody; + await (0, utils_js_1.streamToBuffer)(stream, buffer, off - offset, chunkEnd - offset); transferProgress += chunkEnd - off; if (options.onProgress) { options.onProgress({ loadedBytes: transferProgress }); @@ -53515,7 +56816,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; }); } await batch.do(); - return buffer2; + return buffer; }); } /** @@ -53535,10 +56836,13 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * at the specified path. */ async downloadToFile(filePath, offset = 0, count, options = {}) { - return tracingClient.withSpan("BlobClient-downloadToFile", options, async (updatedOptions) => { - const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + return tracing_js_1.tracingClient.withSpan("BlobClient-downloadToFile", options, async (updatedOptions) => { + const response = await this.download(offset, count, { + ...options, + tracingOptions: updatedOptions.tracingOptions + }); if (response.readableStreamBody) { - await readStreamToLocalFile(response.readableStreamBody, filePath); + await (0, utils_js_1.readStreamToLocalFile)(response.readableStreamBody, filePath); } response.blobDownloadStream = void 0; return response; @@ -53553,7 +56857,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); containerName = pathComponents[1]; blobName = pathComponents[3]; - } else if (isIpEndpointStyle(parsedUrl)) { + } else if ((0, utils_common_js_1.isIpEndpointStyle)(parsedUrl)) { const pathComponents = parsedUrl.pathname.match("/([^/]*)/([^/]*)(/(.*))?"); containerName = pathComponents[2]; blobName = pathComponents[4]; @@ -53581,21 +56885,23 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * an Azure file in any Azure storage account. * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob * operation to copy from another storage account. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/copy-blob + * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob * * @param copySource - url to the source Azure Blob/File. * @param options - Optional options to the Blob Start Copy From URL operation. */ - async startCopyFromURL(copySource2, options = {}) { - return tracingClient.withSpan("BlobClient-startCopyFromURL", options, async (updatedOptions) => { - var _a4, _b, _c; + async startCopyFromURL(copySource, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-startCopyFromURL", options, async (updatedOptions) => { options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; - return assertResponse(await this.blobContext.startCopyFromURL(copySource2, { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.startCopyFromURL(copySource, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, sourceModifiedAccessConditions: { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, @@ -53603,12 +56909,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, sourceIfTags: options.sourceConditions.tagConditions }, - immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, - immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, - tier: toAccessTier(options.tier), - blobTagsString: toBlobTagsString(options.tags), + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), sealBlob: options.sealBlob, tracingOptions: updatedOptions.tracingOptions })); @@ -53620,18 +56926,24 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties * and parameters passed in. The SAS is signed by the shared key credential of the client. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateSasUrl(options) { return new Promise((resolve) => { - if (!(this.credential instanceof StorageSharedKeyCredential)) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); - resolve(appendToURLQuery(this.url, sas)); + const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ + containerName: this._containerName, + blobName: this._name, + snapshotTime: this._snapshot, + versionId: this._versionId, + ...options + }, this.credential).toString(); + resolve((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** @@ -53640,24 +56952,30 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ generateSasStringToSign(options) { - if (!(this.credential instanceof StorageSharedKeyCredential)) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } - return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).stringToSign; + return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ + containerName: this._containerName, + blobName: this._name, + snapshotTime: this._snapshot, + versionId: this._versionId, + ...options + }, this.credential).stringToSign; } /** * * Generates a Blob Service Shared Access Signature (SAS) URI based on * the client properties and parameters passed in. The SAS is signed by the input user delegation key. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` @@ -53665,8 +56983,14 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ generateUserDelegationSasUrl(options, userDelegationKey) { return new Promise((resolve) => { - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), userDelegationKey, this.accountName).toString(); - resolve(appendToURLQuery(this.url, sas)); + const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ + containerName: this._containerName, + blobName: this._name, + snapshotTime: this._snapshot, + versionId: this._versionId, + ...options + }, userDelegationKey, this.accountName).toString(); + resolve((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** @@ -53675,14 +56999,20 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on * the client properties and parameters passed in. The SAS is signed by the input user delegation key. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateUserDelegationSasStringToSign(options, userDelegationKey) { - return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), userDelegationKey, this.accountName).stringToSign; + return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ + containerName: this._containerName, + blobName: this._name, + snapshotTime: this._snapshot, + versionId: this._versionId, + ...options + }, userDelegationKey, this.accountName).stringToSign; } /** * Delete the immutablility policy on the blob. @@ -53690,8 +57020,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Optional options to delete immutability policy on the blob. */ async deleteImmutabilityPolicy(options = {}) { - return tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy", options, async (updatedOptions) => { - return assertResponse(await this.blobContext.deleteImmutabilityPolicy({ + return tracing_js_1.tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.deleteImmutabilityPolicy({ tracingOptions: updatedOptions.tracingOptions })); }); @@ -53702,8 +57032,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Optional options to set immutability policy on the blob. */ async setImmutabilityPolicy(immutabilityPolicy, options = {}) { - return tracingClient.withSpan("BlobClient-setImmutabilityPolicy", options, async (updatedOptions) => { - return assertResponse(await this.blobContext.setImmutabilityPolicy({ + return tracing_js_1.tracingClient.withSpan("BlobClient-setImmutabilityPolicy", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setImmutabilityPolicy({ immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, tracingOptions: updatedOptions.tracingOptions @@ -53716,8 +57046,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Optional options to set legal hold on the blob. */ async setLegalHold(legalHoldEnabled, options = {}) { - return tracingClient.withSpan("BlobClient-setLegalHold", options, async (updatedOptions) => { - return assertResponse(await this.blobContext.setLegalHold(legalHoldEnabled, { + return tracing_js_1.tracingClient.withSpan("BlobClient-setLegalHold", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setLegalHold(legalHoldEnabled, { tracingOptions: updatedOptions.tracingOptions })); }); @@ -53727,60 +57057,65 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * for the specified account. * The Get Account Information operation is available on service versions beginning * with version 2018-03-28. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-account-information + * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information * * @param options - Options to the Service Get Account Info operation. * @returns Response data for the Service Get Account Info operation. */ async getAccountInfo(options = {}) { - return tracingClient.withSpan("BlobClient-getAccountInfo", options, async (updatedOptions) => { - return assertResponse(await this.blobContext.getAccountInfo({ + return tracing_js_1.tracingClient.withSpan("BlobClient-getAccountInfo", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.getAccountInfo({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); }); } }; + exports2.BlobClient = BlobClient; var AppendBlobClient = class _AppendBlobClient extends BlobClient { + /** + * appendBlobsContext provided by protocol layer. + */ + appendBlobContext; constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { let pipeline; - let url2; + let url; options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; - } else if (coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; - pipeline = newPipeline(new AnonymousCredential(), options); + url = urlOrConnectionString; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { const containerName = credentialOrPipelineOrContainerName; const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreUtil.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { - options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); } - pipeline = newPipeline(sharedKeyCredential, options); + pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - super(url2, pipeline); + super(url, pipeline); this.appendBlobContext = this.storageClientContext.appendBlob; } /** @@ -53791,8 +57126,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param snapshot - The snapshot timestamp. * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. */ - withSnapshot(snapshot2) { - return new _AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); + withSnapshot(snapshot) { + return new _AppendBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); } /** * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. @@ -53803,28 +57138,43 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage: * - * ```js - * const appendBlobClient = containerClient.getAppendBlobClient(""); + * ```ts snippet:ClientsCreateAppendBlob + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * + * const appendBlobClient = containerClient.getAppendBlobClient(blobName); * await appendBlobClient.create(); * ``` */ async create(options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("AppendBlobClient-create", options, async (updatedOptions) => { - var _a4, _b, _c; - return assertResponse(await this.appendBlobContext.create(0, { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-create", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.create(0, { abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, - immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, - immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, legalHold: options.legalHold, - blobTagsString: toBlobTagsString(options.tags), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), tracingOptions: updatedOptions.tracingOptions })); }); @@ -53837,15 +57187,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - */ async createIfNotExists(options = {}) { - const conditions = { ifNoneMatch: ETagAny }; - return tracingClient.withSpan("AppendBlobClient-createIfNotExists", options, async (updatedOptions) => { - var _a4, _b; + const conditions = { ifNoneMatch: constants_js_1.ETagAny }; + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-createIfNotExists", options, async (updatedOptions) => { try { - const res = assertResponse(await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions }))); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + const res = (0, utils_common_js_1.assertResponse)(await this.create({ + ...updatedOptions, + conditions + })); + return { + succeeded: true, + ...res, + _response: res._response + // _response is made non-enumerable + }; } catch (e) { - if (((_a4 = e.details) === null || _a4 === void 0 ? void 0 : _a4.errorCode) === "BlobAlreadyExists") { - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + if (e.details?.errorCode === "BlobAlreadyExists") { + return { + succeeded: false, + ...e.response?.parsedHeaders, + _response: e.response + }; } throw e; } @@ -53858,13 +57219,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async seal(options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("AppendBlobClient-seal", options, async (updatedOptions) => { - var _a4; - return assertResponse(await this.appendBlobContext.seal({ + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-seal", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.seal({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, tracingOptions: updatedOptions.tracingOptions })); }); @@ -53880,29 +57243,44 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage: * - * ```js + * ```ts snippet:ClientsAppendBlock + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * * const content = "Hello World!"; * * // Create a new append blob and append data to the blob. - * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * const newAppendBlobClient = containerClient.getAppendBlobClient(blobName); * await newAppendBlobClient.create(); * await newAppendBlobClient.appendBlock(content, content.length); * * // Append data to an existing append blob. - * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * const existingAppendBlobClient = containerClient.getAppendBlobClient(blobName); * await existingAppendBlobClient.appendBlock(content, content.length); * ``` */ - async appendBlock(body2, contentLength2, options = {}) { + async appendBlock(body, contentLength, options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("AppendBlobClient-appendBlock", options, async (updatedOptions) => { - var _a4; - return assertResponse(await this.appendBlobContext.appendBlock(contentLength2, body2, { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-appendBlock", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.appendBlock(contentLength, body, { abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, requestOptions: { onUploadProgress: options.onProgress }, @@ -53917,7 +57295,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; /** * The Append Block operation commits a new block of data to the end of an existing append blob * where the contents are read from a source url. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * @see https://learn.microsoft.com/rest/api/storageservices/append-block-from-url * * @param sourceURL - * The url to the blob that will be the source of the copy. A source blob in the same storage account can @@ -53931,74 +57309,89 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("AppendBlobClient-appendBlockFromURL", options, async (updatedOptions) => { - var _a4, _b, _c, _d, _e; - return assertResponse(await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-appendBlockFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, { abortSignal: options.abortSignal, - sourceRange: rangeToString({ offset: sourceOffset, count }), + sourceRange: (0, Range_js_1.rangeToString)({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, sourceModifiedAccessConditions: { - sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, - sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, - sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, - sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince + sourceIfMatch: options.sourceConditions?.ifMatch, + sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince }, - copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, + fileRequestIntent: options.sourceShareTokenIntent, tracingOptions: updatedOptions.tracingOptions })); }); } }; + exports2.AppendBlobClient = AppendBlobClient; var BlockBlobClient = class _BlockBlobClient extends BlobClient { + /** + * blobContext provided by protocol layer. + * + * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API + * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient. + */ + _blobContext; + /** + * blockBlobContext provided by protocol layer. + */ + blockBlobContext; constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { let pipeline; - let url2; + let url; options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; - } else if (coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; + url = urlOrConnectionString; if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { options = blobNameOrOptions; } - pipeline = newPipeline(new AnonymousCredential(), options); + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { const containerName = credentialOrPipelineOrContainerName; const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreUtil.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { - options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); } - pipeline = newPipeline(sharedKeyCredential, options); + pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - super(url2, pipeline); + super(url, pipeline); this.blockBlobContext = this.storageClientContext.blockBlob; this._blobContext = this.storageClientContext.blob; } @@ -54010,8 +57403,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param snapshot - The snapshot timestamp. * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. */ - withSnapshot(snapshot2) { - return new _BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); + withSnapshot(snapshot) { + return new _BlockBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. @@ -54020,17 +57413,34 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage (Node.js): * - * ```js + * ```ts snippet:ClientsQuery + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blockBlobClient = containerClient.getBlockBlobClient(blobName); + * * // Query and convert a blob to a string - * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); - * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); - * console.log("Query blob content:", downloaded); + * const queryBlockBlobResponse = await blockBlobClient.query("select from BlobStorage"); + * if (queryBlockBlobResponse.readableStreamBody) { + * const downloadedBuffer = await streamToBuffer(queryBlockBlobResponse.readableStreamBody); + * const downloaded = downloadedBuffer.toString(); + * console.log(`Query blob content: ${downloaded}`); + * } * - * async function streamToBuffer(readableStream) { + * async function streamToBuffer(readableStream: NodeJS.ReadableStream): Promise { * return new Promise((resolve, reject) => { - * const chunks = []; + * const chunks: Buffer[] = []; * readableStream.on("data", (data) => { - * chunks.push(typeof data === "string" ? Buffer.from(data) : data); + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); * }); * readableStream.on("end", () => { * resolve(Buffer.concat(chunks)); @@ -54044,26 +57454,28 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - */ async query(query, options = {}) { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - if (!coreUtil.isNode) { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + if (!core_util_1.isNodeLike) { throw new Error("This operation currently is only supported in Node.js."); } - return tracingClient.withSpan("BlockBlobClient-query", options, async (updatedOptions) => { - var _a4; - const response = assertResponse(await this._blobContext.query({ + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-query", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this._blobContext.query({ abortSignal: options.abortSignal, queryRequest: { queryType: "SQL", expression: query, - inputSerialization: toQuerySerialization(options.inputTextConfiguration), - outputSerialization: toQuerySerialization(options.outputTextConfiguration) + inputSerialization: (0, utils_common_js_1.toQuerySerialization)(options.inputTextConfiguration), + outputSerialization: (0, utils_common_js_1.toQuerySerialization)(options.outputTextConfiguration) }, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, cpkInfo: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions })); - return new BlobQueryResponse(response, { + return new BlobQueryResponse_js_1.BlobQueryResponse(response, { abortSignal: options.abortSignal, onProgress: options.onProgress, onError: options.onError @@ -54092,32 +57504,48 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage: * - * ```js + * ```ts snippet:ClientsUpload + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blockBlobClient = containerClient.getBlockBlobClient(blobName); + * * const content = "Hello world!"; * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); * ``` */ - async upload(body2, contentLength2, options = {}) { + async upload(body, contentLength, options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlockBlobClient-upload", options, async (updatedOptions) => { - var _a4, _b, _c; - return assertResponse(await this.blockBlobContext.upload(contentLength2, body2, { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-upload", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.upload(contentLength, body, { abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, requestOptions: { onUploadProgress: options.onProgress }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, - immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, - immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, legalHold: options.legalHold, - tier: toAccessTier(options.tier), - blobTagsString: toBlobTagsString(options.tags), + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), tracingOptions: updatedOptions.tracingOptions })); }); @@ -54142,16 +57570,31 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async syncUploadFromURL(sourceURL, options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlockBlobClient-syncUploadFromURL", options, async (updatedOptions) => { - var _a4, _b, _c, _d, _e, _f; - return assertResponse(await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, - sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, - sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, - sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, - sourceIfTags: (_f = options.sourceConditions) === null || _f === void 0 ? void 0 : _f.tagConditions - }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags, tracingOptions: updatedOptions.tracingOptions }))); + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-syncUploadFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.putBlobFromUrl(0, sourceURL, { + ...options, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions?.ifMatch, + sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions?.tagConditions + }, + cpkInfo: options.customerProvidedKey, + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), + copySourceTags: options.copySourceTags, + fileRequestIntent: options.sourceShareTokenIntent, + tracingOptions: updatedOptions.tracingOptions + })); }); } /** @@ -54165,10 +57608,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to the Block Blob Stage Block operation. * @returns Response data for the Block Blob Stage Block operation. */ - async stageBlock(blockId2, body2, contentLength2, options = {}) { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlockBlobClient-stageBlock", options, async (updatedOptions) => { - return assertResponse(await this.blockBlobContext.stageBlock(blockId2, contentLength2, body2, { + async stageBlock(blockId, body, contentLength, options = {}) { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-stageBlock", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.stageBlock(blockId, contentLength, body, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { @@ -54186,7 +57629,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * The Stage Block From URL operation creates a new block to be committed as part * of a blob where the contents are read from a URL. * This API is available starting in version 2018-03-28. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * @see https://learn.microsoft.com/rest/api/storageservices/put-block-from-url * * @param blockId - A 64-byte value that is base64-encoded * @param sourceURL - Specifies the URL of the blob. The value @@ -54203,18 +57646,19 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to the Block Blob Stage Block From URL operation. * @returns Response data for the Block Blob Stage Block From URL operation. */ - async stageBlockFromURL(blockId2, sourceURL, offset = 0, count, options = {}) { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlockBlobClient-stageBlockFromURL", options, async (updatedOptions) => { - return assertResponse(await this.blockBlobContext.stageBlockFromURL(blockId2, 0, sourceURL, { + async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-stageBlockFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, - sourceRange: offset === 0 && !count ? void 0 : rangeToString({ offset, count }), + sourceRange: offset === 0 && !count ? void 0 : (0, Range_js_1.rangeToString)({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, - copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + fileRequestIntent: options.sourceShareTokenIntent, tracingOptions: updatedOptions.tracingOptions })); }); @@ -54231,24 +57675,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to the Block Blob Commit Block List operation. * @returns Response data for the Block Blob Commit Block List operation. */ - async commitBlockList(blocks2, options = {}) { + async commitBlockList(blocks, options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlockBlobClient-commitBlockList", options, async (updatedOptions) => { - var _a4, _b, _c; - return assertResponse(await this.blockBlobContext.commitBlockList({ latest: blocks2 }, { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-commitBlockList", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.commitBlockList({ latest: blocks }, { abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, - immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, - immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, legalHold: options.legalHold, - tier: toAccessTier(options.tier), - blobTagsString: toBlobTagsString(options.tags), + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), tracingOptions: updatedOptions.tracingOptions })); }); @@ -54263,13 +57709,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to the Block Blob Get Block List operation. * @returns Response data for the Block Blob Get Block List operation. */ - async getBlockList(listType2, options = {}) { - return tracingClient.withSpan("BlockBlobClient-getBlockList", options, async (updatedOptions) => { - var _a4; - const res = assertResponse(await this.blockBlobContext.getBlockList(listType2, { + async getBlockList(listType, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-getBlockList", options, async (updatedOptions) => { + const res = (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.getBlockList(listType, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, tracingOptions: updatedOptions.tracingOptions })); if (!res.committedBlocks) { @@ -54298,18 +57746,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - */ async uploadData(data2, options = {}) { - return tracingClient.withSpan("BlockBlobClient-uploadData", options, async (updatedOptions) => { - if (coreUtil.isNode) { - let buffer2; + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadData", options, async (updatedOptions) => { + if (core_util_1.isNodeLike) { + let buffer; if (data2 instanceof Buffer) { - buffer2 = data2; + buffer = data2; } else if (data2 instanceof ArrayBuffer) { - buffer2 = Buffer.from(data2); + buffer = Buffer.from(data2); } else { data2 = data2; - buffer2 = Buffer.from(data2.buffer, data2.byteOffset, data2.byteLength); + buffer = Buffer.from(data2.buffer, data2.byteOffset, data2.byteLength); } - return this.uploadSeekableInternal((offset, size) => buffer2.slice(offset, offset + size), buffer2.byteLength, updatedOptions); + return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); } else { const browserBlob = new Blob([data2]); return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); @@ -54336,7 +57784,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @returns Response data for the Blob Upload operation. */ async uploadBrowserData(browserData, options = {}) { - return tracingClient.withSpan("BlockBlobClient-uploadBrowserData", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadBrowserData", options, async (updatedOptions) => { const browserBlob = new Blob([browserData]); return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); }); @@ -54357,23 +57805,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @returns Response data for the Blob Upload operation. */ async uploadSeekableInternal(bodyFactory, size, options = {}) { - var _a4, _b; - let blockSize = (_a4 = options.blockSize) !== null && _a4 !== void 0 ? _a4 : 0; - if (blockSize < 0 || blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { - throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); + let blockSize = options.blockSize ?? 0; + if (blockSize < 0 || blockSize > constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError(`blockSize option must be >= 0 and <= ${constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); } - const maxSingleShotSize = (_b = options.maxSingleShotSize) !== null && _b !== void 0 ? _b : BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; - if (maxSingleShotSize < 0 || maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { - throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); + const maxSingleShotSize = options.maxSingleShotSize ?? constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + if (maxSingleShotSize < 0 || maxSingleShotSize > constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); } if (blockSize === 0) { - if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { + if (size > constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * constants_js_1.BLOCK_BLOB_MAX_BLOCKS) { throw new RangeError(`${size} is too larger to upload to a block blob.`); } if (size > maxSingleShotSize) { - blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); - if (blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { - blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + blockSize = Math.ceil(size / constants_js_1.BLOCK_BLOB_MAX_BLOCKS); + if (blockSize < constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + blockSize = constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; } } } @@ -54383,32 +57830,32 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!options.conditions) { options.conditions = {}; } - return tracingClient.withSpan("BlockBlobClient-uploadSeekableInternal", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadSeekableInternal", options, async (updatedOptions) => { if (size <= maxSingleShotSize) { - return assertResponse(await this.upload(bodyFactory(0, size), size, updatedOptions)); + return (0, utils_common_js_1.assertResponse)(await this.upload(bodyFactory(0, size), size, updatedOptions)); } const numBlocks = Math.floor((size - 1) / blockSize) + 1; - if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`The buffer's size is too big or the BlockSize is too small;the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); + if (numBlocks > constants_js_1.BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`The buffer's size is too big or the BlockSize is too small;the number of blocks must be <= ${constants_js_1.BLOCK_BLOB_MAX_BLOCKS}`); } const blockList = []; - const blockIDPrefix = coreUtil.randomUUID(); + const blockIDPrefix = (0, core_util_2.randomUUID)(); let transferProgress = 0; - const batch = new Batch(options.concurrency); + const batch = new Batch_js_1.Batch(options.concurrency); for (let i = 0; i < numBlocks; i++) { batch.addOperation(async () => { - const blockID = generateBlockID(blockIDPrefix, i); + const blockID = (0, utils_common_js_1.generateBlockID)(blockIDPrefix, i); const start = blockSize * i; const end2 = i === numBlocks - 1 ? size : start + blockSize; - const contentLength2 = end2 - start; + const contentLength = end2 - start; blockList.push(blockID); - await this.stageBlock(blockID, bodyFactory(start, contentLength2), contentLength2, { + await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { abortSignal: options.abortSignal, conditions: options.conditions, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions }); - transferProgress += contentLength2; + transferProgress += contentLength; if (options.onProgress) { options.onProgress({ loadedBytes: transferProgress @@ -54434,15 +57881,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @returns Response data for the Blob Upload operation. */ async uploadFile(filePath, options = {}) { - return tracingClient.withSpan("BlockBlobClient-uploadFile", options, async (updatedOptions) => { - const size = (await fsStat(filePath)).size; + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadFile", options, async (updatedOptions) => { + const size = (await (0, utils_js_1.fsStat)(filePath)).size; return this.uploadSeekableInternal((offset, count) => { - return () => fsCreateReadStream(filePath, { + return () => (0, utils_js_1.fsCreateReadStream)(filePath, { autoClose: true, end: count ? offset + count - 1 : Infinity, start: offset }); - }, size, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + }, size, { + ...options, + tracingOptions: updatedOptions.tracingOptions + }); }); } /** @@ -54461,27 +57911,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to Upload Stream to Block Blob operation. * @returns Response data for the Blob Upload operation. */ - async uploadStream(stream3, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + async uploadStream(stream, bufferSize = constants_js_1.DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { if (!options.blobHTTPHeaders) { options.blobHTTPHeaders = {}; } if (!options.conditions) { options.conditions = {}; } - return tracingClient.withSpan("BlockBlobClient-uploadStream", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadStream", options, async (updatedOptions) => { let blockNum = 0; - const blockIDPrefix = coreUtil.randomUUID(); + const blockIDPrefix = (0, core_util_2.randomUUID)(); let transferProgress = 0; const blockList = []; - const scheduler = new BufferScheduler( - stream3, + const scheduler = new storage_common_1.BufferScheduler( + stream, bufferSize, maxConcurrency, - async (body2, length) => { - const blockID = generateBlockID(blockIDPrefix, blockNum); + async (body, length) => { + const blockID = (0, utils_common_js_1.generateBlockID)(blockIDPrefix, blockNum); blockList.push(blockID); blockNum++; - await this.stageBlock(blockID, body2, length, { + await this.stageBlock(blockID, body, length, { customerProvidedKey: options.customerProvidedKey, conditions: options.conditions, encryptionScope: options.encryptionScope, @@ -54499,50 +57949,58 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; Math.ceil(maxConcurrency / 4 * 3) ); await scheduler.do(); - return assertResponse(await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions }))); + return (0, utils_common_js_1.assertResponse)(await this.commitBlockList(blockList, { + ...options, + tracingOptions: updatedOptions.tracingOptions + })); }); } }; + exports2.BlockBlobClient = BlockBlobClient; var PageBlobClient = class _PageBlobClient extends BlobClient { + /** + * pageBlobsContext provided by protocol layer. + */ + pageBlobContext; constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { let pipeline; - let url2; + let url; options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; - } else if (coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; - pipeline = newPipeline(new AnonymousCredential(), options); + url = urlOrConnectionString; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { const containerName = credentialOrPipelineOrContainerName; const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreUtil.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { - options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); } - pipeline = newPipeline(sharedKeyCredential, options); + pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - super(url2, pipeline); + super(url, pipeline); this.pageBlobContext = this.storageClientContext.pageBlob; } /** @@ -54553,8 +58011,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param snapshot - The snapshot timestamp. * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. */ - withSnapshot(snapshot2) { - return new _PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); + withSnapshot(snapshot) { + return new _PageBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); } /** * Creates a page blob of the specified length. Call uploadPages to upload data @@ -54567,23 +58025,25 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async create(size, options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("PageBlobClient-create", options, async (updatedOptions) => { - var _a4, _b, _c; - return assertResponse(await this.pageBlobContext.create(0, size, { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("PageBlobClient-create", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.create(0, size, { abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, - immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, - immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, legalHold: options.legalHold, - tier: toAccessTier(options.tier), - blobTagsString: toBlobTagsString(options.tags), + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), tracingOptions: updatedOptions.tracingOptions })); }); @@ -54598,15 +58058,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - */ async createIfNotExists(size, options = {}) { - return tracingClient.withSpan("PageBlobClient-createIfNotExists", options, async (updatedOptions) => { - var _a4, _b; + return tracing_js_1.tracingClient.withSpan("PageBlobClient-createIfNotExists", options, async (updatedOptions) => { try { - const conditions = { ifNoneMatch: ETagAny }; - const res = assertResponse(await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions }))); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + const conditions = { ifNoneMatch: constants_js_1.ETagAny }; + const res = (0, utils_common_js_1.assertResponse)(await this.create(size, { + ...options, + conditions, + tracingOptions: updatedOptions.tracingOptions + })); + return { + succeeded: true, + ...res, + _response: res._response + // _response is made non-enumerable + }; } catch (e) { - if (((_a4 = e.details) === null || _a4 === void 0 ? void 0 : _a4.errorCode) === "BlobAlreadyExists") { - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + if (e.details?.errorCode === "BlobAlreadyExists") { + return { + succeeded: false, + ...e.response?.parsedHeaders, + _response: e.response + }; } throw e; } @@ -54622,19 +58094,21 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to the Page Blob Upload Pages operation. * @returns Response data for the Page Blob Upload Pages operation. */ - async uploadPages(body2, offset, count, options = {}) { + async uploadPages(body, offset, count, options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("PageBlobClient-uploadPages", options, async (updatedOptions) => { - var _a4; - return assertResponse(await this.pageBlobContext.uploadPages(count, body2, { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("PageBlobClient-uploadPages", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.uploadPages(count, body, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, requestOptions: { onUploadProgress: options.onProgress }, - range: rangeToString({ offset, count }), + range: (0, Range_js_1.rangeToString)({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, @@ -54647,7 +58121,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; /** * The Upload Pages operation writes a range of pages to a page blob where the * contents are read from a URL. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * @see https://learn.microsoft.com/rest/api/storageservices/put-page-from-url * * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob @@ -54658,25 +58132,28 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("PageBlobClient-uploadPagesFromURL", options, async (updatedOptions) => { - var _a4, _b, _c, _d, _e; - return assertResponse(await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("PageBlobClient-uploadPagesFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.uploadPagesFromURL(sourceURL, (0, Range_js_1.rangeToString)({ offset: sourceOffset, count }), 0, (0, Range_js_1.rangeToString)({ offset: destOffset, count }), { abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, sourceModifiedAccessConditions: { - sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, - sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, - sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, - sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince + sourceIfMatch: options.sourceConditions?.ifMatch, + sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, - copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + fileRequestIntent: options.sourceShareTokenIntent, tracingOptions: updatedOptions.tracingOptions })); }); @@ -54692,13 +58169,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async clearPages(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("PageBlobClient-clearPages", options, async (updatedOptions) => { - var _a4; - return assertResponse(await this.pageBlobContext.clearPages(0, { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-clearPages", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.clearPages(0, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), - range: rangeToString({ offset, count }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + range: (0, Range_js_1.rangeToString)({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, @@ -54717,16 +58196,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async getPageRanges(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("PageBlobClient-getPageRanges", options, async (updatedOptions) => { - var _a4; - const response = assertResponse(await this.pageBlobContext.getPageRanges({ + return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRanges", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRanges({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), - range: rangeToString({ offset, count }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + range: (0, Range_js_1.rangeToString)({ offset, count }), tracingOptions: updatedOptions.tracingOptions })); - return rangeResponseFromModel(response); + return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(response); }); } /** @@ -54741,15 +58222,17 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. * @param options - Options to PageBlob Get Page Ranges Segment operation. */ - async listPageRangesSegment(offset = 0, count, marker2, options = {}) { - return tracingClient.withSpan("PageBlobClient-getPageRangesSegment", options, async (updatedOptions) => { - var _a4; - return assertResponse(await this.pageBlobContext.getPageRanges({ + async listPageRangesSegment(offset = 0, count, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesSegment", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRanges({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), - range: rangeToString({ offset, count }), - marker: marker2, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + range: (0, Range_js_1.rangeToString)({ offset, count }), + marker, maxPageSize: options.maxPageSize, tracingOptions: updatedOptions.tracingOptions })); @@ -54769,17 +58252,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to List Page Ranges operation. */ - listPageRangeItemSegments() { - return tslib.__asyncGenerator(this, arguments, function* listPageRangeItemSegments_1(offset = 0, count, marker2, options = {}) { - let getPageRangeItemSegmentsResponse; - if (!!marker2 || marker2 === void 0) { - do { - getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker2, options)); - marker2 = getPageRangeItemSegmentsResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); - } while (marker2); - } - }); + async *listPageRangeItemSegments(offset = 0, count, marker, options = {}) { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === void 0) { + do { + getPageRangeItemSegmentsResponse = await this.listPageRangesSegment(offset, count, marker, options); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield await getPageRangeItemSegmentsResponse; + } while (marker); + } } /** * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects @@ -54788,27 +58269,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param count - Number of bytes to get. * @param options - Options to List Page Ranges operation. */ - listPageRangeItems() { - return tslib.__asyncGenerator(this, arguments, function* listPageRangeItems_1(offset = 0, count, options = {}) { - var _a4, e_1, _b, _c; - let marker2; - try { - for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker2, options)), _f; _f = yield tslib.__await(_e.next()), _a4 = _f.done, !_a4; _d = true) { - _c = _f.value; - _d = false; - const getPageRangesSegment = _c; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); - } - } catch (e_1_1) { - e_1 = { error: e_1_1 }; - } finally { - try { - if (!_d && !_a4 && (_b = _e.return)) yield tslib.__await(_b.call(_e)); - } finally { - if (e_1) throw e_1.error; - } - } - }); + async *listPageRangeItems(offset = 0, count, options = {}) { + let marker; + for await (const getPageRangesSegment of this.listPageRangeItemSegments(offset, count, marker, options)) { + yield* (0, utils_common_js_1.ExtractPageRangeInfoItems)(getPageRangesSegment); + } } /** * Returns an async iterable iterator to list of page ranges for a page blob. @@ -54816,66 +58281,67 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * .byPage() returns an async iterable iterator to list of page ranges for a page blob. * - * Example using `for await` syntax: + * ```ts snippet:ClientsListPageBlobs + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; * - * ```js - * // Get the pageBlobClient before you run these snippets, - * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const pageBlobClient = containerClient.getPageBlobClient(blobName); + * + * // Example using `for await` syntax * let i = 1; * for await (const pageRange of pageBlobClient.listPageRanges()) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } - * ``` * - * Example using `iter.next()`: - * - * ```js - * let i = 1; - * let iter = pageBlobClient.listPageRanges(); - * let pageRangeItem = await iter.next(); - * while (!pageRangeItem.done) { - * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); - * pageRangeItem = await iter.next(); + * // Example using `iter.next()` syntax + * i = 1; + * const iter = pageBlobClient.listPageRanges(); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Page range ${i++}: ${value.start} - ${value.end}`); + * ({ value, done } = await iter.next()); * } - * ``` - * - * Example using `byPage()`: * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { - * for (const pageRange of response) { + * // Example using `byPage()` syntax + * i = 1; + * for await (const page of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of page.pageRange || []) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * } - * ``` * - * Example using paging with a marker: - * - * ```js - * let i = 1; + * // Example using paging with a marker + * i = 1; * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; - * * // Prints 2 page ranges - * for (const pageRange of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * if (response.pageRange) { + * for (const pageRange of response.pageRange) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } * } - * * // Gets next marker * let marker = response.continuationToken; - * * // Passing next marker as continuationToken - * * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; - * * // Prints 10 page ranges - * for (const blob of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * if (response.pageRange) { + * for (const pageRange of response.pageRange) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } * } * ``` + * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param options - Options to the Page Blob Get Ranges operation. @@ -54901,7 +58367,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + return this.listPageRangeItemSegments(offset, count, settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...options + }); } }; } @@ -54917,17 +58386,19 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("PageBlobClient-getPageRangesDiff", options, async (updatedOptions) => { - var _a4; - const result2 = assertResponse(await this.pageBlobContext.getPageRangesDiff({ + return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesDiff", options, async (updatedOptions) => { + const result2 = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, prevsnapshot: prevSnapshot, - range: rangeToString({ offset, count }), + range: (0, Range_js_1.rangeToString)({ offset, count }), tracingOptions: updatedOptions.tracingOptions })); - return rangeResponseFromModel(result2); + return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(result2); }); } /** @@ -54944,20 +58415,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker2, options = {}) { - return tracingClient.withSpan("PageBlobClient-getPageRangesDiffSegment", options, async (updatedOptions) => { - var _a4; - return assertResponse(await this.pageBlobContext.getPageRangesDiff({ - abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, - leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a4 = options === null || options === void 0 ? void 0 : options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesDiffSegment", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options?.abortSignal, + leaseAccessConditions: options?.conditions, + modifiedAccessConditions: { + ...options?.conditions, + ifTags: options?.conditions?.tagConditions + }, prevsnapshot: prevSnapshotOrUrl, - range: rangeToString({ + range: (0, Range_js_1.rangeToString)({ offset, count }), - marker: marker2, - maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize, + marker, + maxPageSize: options?.maxPageSize, tracingOptions: updatedOptions.tracingOptions })); }); @@ -54978,17 +58451,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker2, options) { - return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItemSegments_1() { - let getPageRangeItemSegmentsResponse; - if (!!marker2 || marker2 === void 0) { - do { - getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker2, options)); - marker2 = getPageRangeItemSegmentsResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); - } while (marker2); - } - }); + async *listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === void 0) { + do { + getPageRangeItemSegmentsResponse = await this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield await getPageRangeItemSegmentsResponse; + } while (marker); + } } /** * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects @@ -54998,27 +58469,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { - return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() { - var _a4, e_2, _b, _c; - let marker2; - try { - for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker2, options)), _f; _f = yield tslib.__await(_e.next()), _a4 = _f.done, !_a4; _d = true) { - _c = _f.value; - _d = false; - const getPageRangesSegment = _c; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); - } - } catch (e_2_1) { - e_2 = { error: e_2_1 }; - } finally { - try { - if (!_d && !_a4 && (_b = _e.return)) yield tslib.__await(_b.call(_e)); - } finally { - if (e_2) throw e_2.error; - } - } - }); + async *listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { + let marker; + for await (const getPageRangesSegment of this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)) { + yield* (0, utils_common_js_1.ExtractPageRangeInfoItems)(getPageRangesSegment); + } } /** * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. @@ -55026,66 +58481,76 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. * - * Example using `for await` syntax: + * ```ts snippet:ClientsListPageBlobsDiff + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; * - * ```js - * // Get the pageBlobClient before you run these snippets, - * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` - * let i = 1; - * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * ``` + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); * - * Example using `iter.next()`: + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const pageBlobClient = containerClient.getPageBlobClient(blobName); * - * ```js + * const offset = 0; + * const count = 1024; + * const previousSnapshot = ""; + * // Example using `for await` syntax * let i = 1; - * let iter = pageBlobClient.listPageRangesDiff(); - * let pageRangeItem = await iter.next(); - * while (!pageRangeItem.done) { - * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); - * pageRangeItem = await iter.next(); + * for await (const pageRange of pageBlobClient.listPageRangesDiff(offset, count, previousSnapshot)) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } - * ``` * - * Example using `byPage()`: + * // Example using `iter.next()` syntax + * i = 1; + * const iter = pageBlobClient.listPageRangesDiff(offset, count, previousSnapshot); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Page range ${i++}: ${value.start} - ${value.end}`); + * ({ value, done } = await iter.next()); + * } * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { - * for (const pageRange of response) { + * // Example using `byPage()` syntax + * i = 1; + * for await (const page of pageBlobClient + * .listPageRangesDiff(offset, count, previousSnapshot) + * .byPage({ maxPageSize: 20 })) { + * for (const pageRange of page.pageRange || []) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * } - * ``` * - * Example using paging with a marker: - * - * ```js - * let i = 1; - * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * // Example using paging with a marker + * i = 1; + * let iterator = pageBlobClient + * .listPageRangesDiff(offset, count, previousSnapshot) + * .byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; - * * // Prints 2 page ranges - * for (const pageRange of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * if (response.pageRange) { + * for (const pageRange of response.pageRange) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } * } - * * // Gets next marker * let marker = response.continuationToken; - * * // Passing next marker as continuationToken - * - * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * iterator = pageBlobClient + * .listPageRangesDiff(offset, count, previousSnapshot) + * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; - * * // Prints 10 page ranges - * for (const blob of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * if (response.pageRange) { + * for (const pageRange of response.pageRange) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } * } * ``` + * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. @@ -55094,7 +58559,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ listPageRangesDiff(offset, count, prevSnapshot, options = {}) { options.conditions = options.conditions || {}; - const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); + const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, { + ...options + }); return { /** * The next method, part of the iteration protocol @@ -55112,7 +58579,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...options + }); } }; } @@ -55126,19 +58596,21 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to the Page Blob Get Page Ranges Diff operation. * @returns Response data for the Page Blob Get Page Range Diff operation. */ - async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl2, options = {}) { + async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options, async (updatedOptions) => { - var _a4; - const response = assertResponse(await this.pageBlobContext.getPageRangesDiff({ + return tracing_js_1.tracingClient.withSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), - prevSnapshotUrl: prevSnapshotUrl2, - range: rangeToString({ offset, count }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + prevSnapshotUrl, + range: (0, Range_js_1.rangeToString)({ offset, count }), tracingOptions: updatedOptions.tracingOptions })); - return rangeResponseFromModel(response); + return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(response); }); } /** @@ -55151,12 +58623,14 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async resize(size, options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("PageBlobClient-resize", options, async (updatedOptions) => { - var _a4; - return assertResponse(await this.pageBlobContext.resize(size, { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-resize", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.resize(size, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); @@ -55164,22 +58638,24 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } /** * Sets a page blob's sequence number. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties * * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. * @param sequenceNumber - Required if sequenceNumberAction is max or update * @param options - Options to the Page Blob Update Sequence Number operation. * @returns Response data for the Page Blob Update Sequence Number operation. */ - async updateSequenceNumber(sequenceNumberAction2, sequenceNumber, options = {}) { + async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("PageBlobClient-updateSequenceNumber", options, async (updatedOptions) => { - var _a4; - return assertResponse(await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction2, { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-updateSequenceNumber", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, { abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, tracingOptions: updatedOptions.tracingOptions })); }); @@ -55190,37 +58666,71 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * copied snapshot are transferred to the destination. * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. * @see https://learn.microsoft.com/rest/api/storageservices/incremental-copy-blob - * @see https://learn.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * @see https://learn.microsoft.com/azure/virtual-machines/windows/incremental-snapshots * * @param copySource - Specifies the name of the source page blob snapshot. For example, * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= * @param options - Options to the Page Blob Copy Incremental operation. * @returns Response data for the Page Blob Copy Incremental operation. */ - async startCopyIncremental(copySource2, options = {}) { - return tracingClient.withSpan("PageBlobClient-startCopyIncremental", options, async (updatedOptions) => { - var _a4; - return assertResponse(await this.pageBlobContext.copyIncremental(copySource2, { + async startCopyIncremental(copySource, options = {}) { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-startCopyIncremental", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.copyIncremental(copySource, { abortSignal: options.abortSignal, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a4 = options.conditions) === null || _a4 === void 0 ? void 0 : _a4.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, tracingOptions: updatedOptions.tracingOptions })); }); } }; + exports2.PageBlobClient = PageBlobClient; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BatchUtils.js +var require_BatchUtils = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BatchUtils.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getBodyAsText = getBodyAsText; + exports2.utf8ByteLength = utf8ByteLength; + var utils_js_1 = require_utils4(); + var constants_js_1 = require_constants9(); async function getBodyAsText(batchResponse) { - let buffer2 = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); - const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer2); - buffer2 = buffer2.slice(0, responseLength); - return buffer2.toString(); + let buffer = Buffer.alloc(constants_js_1.BATCH_MAX_PAYLOAD_IN_BYTES); + const responseLength = await (0, utils_js_1.streamToBuffer2)(batchResponse.readableStreamBody, buffer); + buffer = buffer.slice(0, responseLength); + return buffer.toString(); } function utf8ByteLength(str) { return Buffer.byteLength(str); } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BatchResponseParser.js +var require_BatchResponseParser = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BatchResponseParser.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BatchResponseParser = void 0; + var core_rest_pipeline_1 = require_commonjs6(); + var core_http_compat_1 = require_commonjs9(); + var constants_js_1 = require_constants9(); + var BatchUtils_js_1 = require_BatchUtils(); + var log_js_1 = require_log5(); var HTTP_HEADER_DELIMITER = ": "; var SPACE_DELIMITER = " "; var NOT_FOUND = -1; var BatchResponseParser = class { + batchResponse; + responseBatchBoundary; + perResponsePrefix; + batchResponseEnding; + subRequests; constructor(batchResponse, subRequests) { if (!batchResponse || !batchResponse.contentType) { throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); @@ -55231,15 +58741,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; this.batchResponse = batchResponse; this.subRequests = subRequests; this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; - this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; + this.perResponsePrefix = `--${this.responseBatchBoundary}${constants_js_1.HTTP_LINE_ENDING}`; this.batchResponseEnding = `--${this.responseBatchBoundary}--`; } - // For example of response, please refer to https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch#response + // For example of response, please refer to https://learn.microsoft.com/rest/api/storageservices/blob-batch#response async parseBatchResponse() { - if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { + if (this.batchResponse._response.status !== constants_js_1.HTTPURLConnection.HTTP_ACCEPTED) { throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); } - const responseBodyAsText = await getBodyAsText(this.batchResponse); + const responseBodyAsText = await (0, BatchUtils_js_1.getBodyAsText)(this.batchResponse); const subResponses = responseBodyAsText.split(this.batchResponseEnding)[0].split(this.perResponsePrefix).slice(1); const subResponseCount = subResponses.length; if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { @@ -55251,18 +58761,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; for (let index2 = 0; index2 < subResponseCount; index2++) { const subResponse = subResponses[index2]; const deserializedSubResponse = {}; - deserializedSubResponse.headers = coreHttpCompat.toHttpHeadersLike(coreRestPipeline.createHttpHeaders()); - const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); + deserializedSubResponse.headers = (0, core_http_compat_1.toHttpHeadersLike)((0, core_rest_pipeline_1.createHttpHeaders)()); + const responseLines = subResponse.split(`${constants_js_1.HTTP_LINE_ENDING}`); let subRespHeaderStartFound = false; let subRespHeaderEndFound = false; let subRespFailed = false; let contentId = NOT_FOUND; for (const responseLine of responseLines) { if (!subRespHeaderStartFound) { - if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { + if (responseLine.startsWith(constants_js_1.HeaderConstants.CONTENT_ID)) { contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); } - if (responseLine.startsWith(HTTP_VERSION_1_1)) { + if (responseLine.startsWith(constants_js_1.HTTP_VERSION_1_1)) { subRespHeaderStartFound = true; const tokens = responseLine.split(SPACE_DELIMITER); deserializedSubResponse.status = parseInt(tokens[1]); @@ -55282,7 +58792,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } const tokens = responseLine.split(HTTP_HEADER_DELIMITER); deserializedSubResponse.headers.set(tokens[0], tokens[1]); - if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { + if (tokens[0] === constants_js_1.HeaderConstants.X_MS_ERROR_CODE) { deserializedSubResponse.errorCode = tokens[1]; subRespFailed = true; } @@ -55297,7 +58807,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; deserializedSubResponse._request = this.subRequests.get(contentId); deserializedSubResponses[contentId] = deserializedSubResponse; } else { - logger.error(`subResponses[${index2}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + log_js_1.logger.error(`subResponses[${index2}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); } if (subRespFailed) { subResponsesFailedCount++; @@ -55312,6 +58822,16 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; }; } }; + exports2.BatchResponseParser = BatchResponseParser; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/Mutex.js +var require_Mutex = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/Mutex.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Mutex = void 0; var MutexLockStatus; (function(MutexLockStatus2) { MutexLockStatus2[MutexLockStatus2["LOCKED"] = 0] = "LOCKED"; @@ -55351,6 +58871,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; resolve(); }); } + static keys = {}; + static listeners = {}; static onUnlockEvent(key, handler) { if (this.listeners[key] === void 0) { this.listeners[key] = [handler]; @@ -55367,11 +58889,36 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - Mutex.keys = {}; - Mutex.listeners = {}; + exports2.Mutex = Mutex; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BlobBatch.js +var require_BlobBatch = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BlobBatch.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobBatch = void 0; + var core_util_1 = require_commonjs4(); + var core_auth_1 = require_commonjs7(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_2 = require_commonjs4(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var Clients_js_1 = require_Clients(); + var Mutex_js_1 = require_Mutex(); + var Pipeline_js_1 = require_Pipeline(); + var utils_common_js_1 = require_utils_common(); + var core_xml_1 = require_commonjs10(); + var constants_js_1 = require_constants9(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var tracing_js_1 = require_tracing(); + var core_client_1 = require_commonjs8(); + var StorageSharedKeyCredentialPolicyV2_js_1 = require_StorageSharedKeyCredentialPolicyV22(); var BlobBatch = class { + batchRequest; + batch = "batch"; + batchType; constructor() { - this.batch = "batch"; this.batchRequest = new InnerBatchRequest(); } /** @@ -55395,13 +58942,13 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return this.batchRequest.getSubRequests(); } async addSubRequestInternal(subRequest, assembleSubRequestFunc) { - await Mutex.lock(this.batch); + await Mutex_js_1.Mutex.lock(this.batch); try { this.batchRequest.preAddSubRequest(subRequest); await assembleSubRequestFunc(); this.batchRequest.postAddSubRequest(subRequest); } finally { - await Mutex.unlock(this.batch); + await Mutex_js_1.Mutex.unlock(this.batch); } } setBatchType(batchType) { @@ -55413,13 +58960,13 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { - let url2; + let url; let credential; - if (typeof urlOrBlobClient === "string" && (coreUtil.isNode && credentialOrOptions instanceof StorageSharedKeyCredential || credentialOrOptions instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrOptions))) { - url2 = urlOrBlobClient; + if (typeof urlOrBlobClient === "string" && (core_util_2.isNodeLike && credentialOrOptions instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrOptions instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrOptions))) { + url = urlOrBlobClient; credential = credentialOrOptions; - } else if (urlOrBlobClient instanceof BlobClient) { - url2 = urlOrBlobClient.url; + } else if (urlOrBlobClient instanceof Clients_js_1.BlobClient) { + url = urlOrBlobClient.url; credential = urlOrBlobClient.credential; options = credentialOrOptions; } else { @@ -55428,28 +58975,28 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!options) { options = {}; } - return tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => { this.setBatchType("delete"); await this.addSubRequestInternal({ - url: url2, + url, credential }, async () => { - await new BlobClient(url2, this.batchRequest.createPipeline(credential)).delete(updatedOptions); + await new Clients_js_1.BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); }); }); } async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { - let url2; + let url; let credential; - let tier2; - if (typeof urlOrBlobClient === "string" && (coreUtil.isNode && credentialOrTier instanceof StorageSharedKeyCredential || credentialOrTier instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrTier))) { - url2 = urlOrBlobClient; + let tier; + if (typeof urlOrBlobClient === "string" && (core_util_2.isNodeLike && credentialOrTier instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrTier instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrTier))) { + url = urlOrBlobClient; credential = credentialOrTier; - tier2 = tierOrOptions; - } else if (urlOrBlobClient instanceof BlobClient) { - url2 = urlOrBlobClient.url; + tier = tierOrOptions; + } else if (urlOrBlobClient instanceof Clients_js_1.BlobClient) { + url = urlOrBlobClient.url; credential = urlOrBlobClient.credential; - tier2 = credentialOrTier; + tier = credentialOrTier; options = tierOrOptions; } else { throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); @@ -55457,24 +59004,32 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!options) { options = {}; } - return tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => { this.setBatchType("setAccessTier"); await this.addSubRequestInternal({ - url: url2, + url, credential }, async () => { - await new BlobClient(url2, this.batchRequest.createPipeline(credential)).setAccessTier(tier2, updatedOptions); + await new Clients_js_1.BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); }); }); } }; + exports2.BlobBatch = BlobBatch; var InnerBatchRequest = class { + operationCount; + body; + subRequests; + boundary; + subRequestPrefix; + multipartContentType; + batchRequestEnding; constructor() { this.operationCount = 0; this.body = ""; - const tempGuid = coreUtil.randomUUID(); + const tempGuid = (0, core_util_1.randomUUID)(); this.boundary = `batch_${tempGuid}`; - this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + this.subRequestPrefix = `--${this.boundary}${constants_js_1.HTTP_LINE_ENDING}${constants_js_1.HeaderConstants.CONTENT_TYPE}: application/http${constants_js_1.HTTP_LINE_ENDING}${constants_js_1.HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; this.batchRequestEnding = `--${this.boundary}--`; this.subRequests = /* @__PURE__ */ new Map(); @@ -55487,9 +59042,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. */ createPipeline(credential) { - const corePipeline = coreRestPipeline.createEmptyPipeline(); - corePipeline.addPolicy(coreClient.serializationPolicy({ - stringifyXML: coreXml.stringifyXML, + const corePipeline = (0, core_rest_pipeline_1.createEmptyPipeline)(); + corePipeline.addPolicy((0, core_client_1.serializationPolicy)({ + stringifyXML: core_xml_1.stringifyXML, serializerOptions: { xml: { xmlCharKey: "#" @@ -55498,19 +59053,19 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; }), { phase: "Serialize" }); corePipeline.addPolicy(batchHeaderFilterPolicy()); corePipeline.addPolicy(batchRequestAssemblePolicy(this), { afterPhase: "Sign" }); - if (coreAuth.isTokenCredential(credential)) { - corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ + if ((0, core_auth_1.isTokenCredential)(credential)) { + corePipeline.addPolicy((0, core_rest_pipeline_1.bearerTokenAuthenticationPolicy)({ credential, - scopes: StorageOAuthScopes, - challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge } + scopes: constants_js_1.StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: core_client_1.authorizeRequestOnTenantChallenge } }), { phase: "Sign" }); - } else if (credential instanceof StorageSharedKeyCredential) { - corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ + } else if (credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential) { + corePipeline.addPolicy((0, StorageSharedKeyCredentialPolicyV2_js_1.storageSharedKeyCredentialPolicy)({ accountName: credential.accountName, accountKey: credential.accountKey }), { phase: "Sign" }); } - const pipeline = new Pipeline([]); + const pipeline = new Pipeline_js_1.Pipeline([]); pipeline._credential = credential; pipeline._corePipeline = corePipeline; return pipeline; @@ -55519,23 +59074,23 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; this.body += [ this.subRequestPrefix, // sub request constant prefix - `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, + `${constants_js_1.HeaderConstants.CONTENT_ID}: ${this.operationCount}`, // sub request's content ID "", // empty line after sub request's content ID - `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}` + `${request.method.toString()} ${(0, utils_common_js_1.getURLPathAndQuery)(request.url)} ${constants_js_1.HTTP_VERSION_1_1}${constants_js_1.HTTP_LINE_ENDING}` // sub request start line with method - ].join(HTTP_LINE_ENDING); + ].join(constants_js_1.HTTP_LINE_ENDING); for (const [name, value] of request.headers) { - this.body += `${name}: ${value}${HTTP_LINE_ENDING}`; + this.body += `${name}: ${value}${constants_js_1.HTTP_LINE_ENDING}`; } - this.body += HTTP_LINE_ENDING; + this.body += constants_js_1.HTTP_LINE_ENDING; } preAddSubRequest(subRequest) { - if (this.operationCount >= BATCH_MAX_REQUEST) { - throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); + if (this.operationCount >= constants_js_1.BATCH_MAX_REQUEST) { + throw new RangeError(`Cannot exceed ${constants_js_1.BATCH_MAX_REQUEST} sub requests in a single batch`); } - const path6 = getURLPath(subRequest.url); + const path6 = (0, utils_common_js_1.getURLPath)(subRequest.url); if (!path6 || path6 === "") { throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } @@ -55546,7 +59101,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } // Return the http request body with assembling the ending line to the sub request body. getHttpRequestBody() { - return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; + return `${this.body}${this.batchRequestEnding}${constants_js_1.HTTP_LINE_ENDING}`; } getMultipartContentType() { return this.multipartContentType; @@ -55563,7 +59118,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return { request, status: 200, - headers: coreRestPipeline.createHttpHeaders() + headers: (0, core_rest_pipeline_1.createHttpHeaders)() }; } }; @@ -55574,7 +59129,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; async sendRequest(request, next2) { let xMsHeaderName = ""; for (const [name] of request.headers) { - if (iEqual(name, HeaderConstants.X_MS_VERSION)) { + if ((0, utils_common_js_1.iEqual)(name, constants_js_1.HeaderConstants.X_MS_VERSION)) { xMsHeaderName = name; } } @@ -55585,18 +59140,36 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } }; } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BlobBatchClient.js +var require_BlobBatchClient = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BlobBatchClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobBatchClient = void 0; + var BatchResponseParser_js_1 = require_BatchResponseParser(); + var BatchUtils_js_1 = require_BatchUtils(); + var BlobBatch_js_1 = require_BlobBatch(); + var tracing_js_1 = require_tracing(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var StorageContextClient_js_1 = require_StorageContextClient(); + var Pipeline_js_1 = require_Pipeline(); + var utils_common_js_1 = require_utils_common(); var BlobBatchClient = class { - constructor(url2, credentialOrPipeline, options) { + serviceOrContainerContext; + constructor(url, credentialOrPipeline, options) { let pipeline; - if (isPipelineLike(credentialOrPipeline)) { + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipeline)) { pipeline = credentialOrPipeline; } else if (!credentialOrPipeline) { - pipeline = newPipeline(new AnonymousCredential(), options); + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else { - pipeline = newPipeline(credentialOrPipeline, options); + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipeline, options); } - const storageClientContext = new StorageContextClient(url2, getCoreClientOptions(pipeline)); - const path6 = getURLPath(url2); + const storageClientContext = new StorageContextClient_js_1.StorageContextClient(url, (0, Pipeline_js_1.getCoreClientOptions)(pipeline)); + const path6 = (0, utils_common_js_1.getURLPath)(url); if (path6 && path6 !== "/") { this.serviceOrContainerContext = storageClientContext.container; } else { @@ -55608,10 +59181,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * A BlobBatch represents an aggregated set of operations on blobs. */ createBatch() { - return new BlobBatch(); + return new BlobBatch_js_1.BlobBatch(); } async deleteBlobs(urlsOrBlobClients, credentialOrOptions, options) { - const batch = new BlobBatch(); + const batch = new BlobBatch_js_1.BlobBatch(); for (const urlOrBlobClient of urlsOrBlobClients) { if (typeof urlOrBlobClient === "string") { await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); @@ -55622,7 +59195,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return this.submitBatch(batch); } async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, options) { - const batch = new BlobBatch(); + const batch = new BlobBatch_js_1.BlobBatch(); for (const urlOrBlobClient of urlsOrBlobClients) { if (typeof urlOrBlobClient === "string") { await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); @@ -55640,11 +59213,25 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage: * - * ```js - * let batchRequest = new BlobBatch(); - * await batchRequest.deleteBlob(urlInString0, credential0); - * await batchRequest.deleteBlob(urlInString1, credential1, { - * deleteSnapshots: "include" + * ```ts snippet:BlobBatchClientSubmitBatch + * import { DefaultAzureCredential } from "@azure/identity"; + * import { BlobServiceClient, BlobBatch } from "@azure/storage-blob"; + * + * const account = ""; + * const credential = new DefaultAzureCredential(); + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * credential, + * ); + * + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobBatchClient = containerClient.getBlobBatchClient(); + * + * const batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob("", credential); + * await batchRequest.deleteBlob("", credential, { + * deleteSnapshots: "include", * }); * const batchResp = await blobBatchClient.submitBatch(batchRequest); * console.log(batchResp.subResponsesSucceededCount); @@ -55652,17 +59239,32 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example using a lease: * - * ```js - * let batchRequest = new BlobBatch(); - * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); - * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { - * conditions: { leaseId: leaseId } + * ```ts snippet:BlobBatchClientSubmitBatchWithLease + * import { DefaultAzureCredential } from "@azure/identity"; + * import { BlobServiceClient, BlobBatch } from "@azure/storage-blob"; + * + * const account = ""; + * const credential = new DefaultAzureCredential(); + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * credential, + * ); + * + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobBatchClient = containerClient.getBlobBatchClient(); + * const blobClient = containerClient.getBlobClient(""); + * + * const batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blobClient, "Cool"); + * await batchRequest.setBlobAccessTier(blobClient, "Cool", { + * conditions: { leaseId: "" }, * }); * const batchResp = await blobBatchClient.submitBatch(batchRequest); * console.log(batchResp.subResponsesSucceededCount); * ``` * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch + * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch * * @param batchRequest - A set of Delete or SetTier operations. * @param options - @@ -55671,10 +59273,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!batchRequest || batchRequest.getSubRequests().size === 0) { throw new RangeError("Batch request should contain one or more sub requests."); } - return tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => { const batchRequestBody = batchRequest.getHttpRequestBody(); - const rawBatchResponse = assertResponse(await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign({}, updatedOptions))); - const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + const rawBatchResponse = (0, utils_common_js_1.assertResponse)(await this.serviceOrContainerContext.submitBatch((0, BatchUtils_js_1.utf8ByteLength)(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, { + ...updatedOptions + })); + const batchResponseParser = new BatchResponseParser_js_1.BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); const responseSummary = await batchResponseParser.parseBatchResponse(); const res = { _response: rawBatchResponse._response, @@ -55691,7 +59295,35 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; }); } }; - var ContainerClient = class extends StorageClient { + exports2.BlobBatchClient = BlobBatchClient; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/ContainerClient.js +var require_ContainerClient = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/ContainerClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ContainerClient = void 0; + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); + var core_auth_1 = require_commonjs7(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var Pipeline_js_1 = require_Pipeline(); + var StorageClient_js_1 = require_StorageClient(); + var tracing_js_1 = require_tracing(); + var utils_common_js_1 = require_utils_common(); + var BlobSASSignatureValues_js_1 = require_BlobSASSignatureValues(); + var BlobLeaseClient_js_1 = require_BlobLeaseClient(); + var Clients_js_1 = require_Clients(); + var BlobBatchClient_js_1 = require_BlobBatchClient(); + var ContainerClient = class extends StorageClient_js_1.StorageClient { + /** + * containerContext provided by protocol layer. + */ + containerContext; + _containerName; /** * The name of the container. */ @@ -55700,48 +59332,48 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, options) { let pipeline; - let url2; + let url; options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; - } else if (coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; - pipeline = newPipeline(new AnonymousCredential(), options); + url = urlOrConnectionString; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string") { const containerName = credentialOrPipelineOrContainerName; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreUtil.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = (0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)); if (!options.proxyOptions) { - options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); } - pipeline = newPipeline(sharedKeyCredential, options); + pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); + url = (0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)) + "?" + extractedCreds.accountSas; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } else { throw new Error("Expecting non-empty strings for containerName parameter"); } - super(url2, pipeline); + super(url, pipeline); this._containerName = this.getContainerNameFromUrl(); this.containerContext = this.storageClientContext.container; } /** * Creates a new container under the specified account. If the container with * the same name already exists, the operation fails. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/create-container + * @see https://learn.microsoft.com/rest/api/storageservices/create-container * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * * @param options - Options to Container Create operation. @@ -55749,34 +59381,52 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage: * - * ```js - * const containerClient = blobServiceClient.getContainerClient(""); + * ```ts snippet:ContainerClientCreate + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); * const createContainerResponse = await containerClient.create(); * console.log("Container was created successfully", createContainerResponse.requestId); * ``` */ async create(options = {}) { - return tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => { - return assertResponse(await this.containerContext.create(updatedOptions)); + return tracing_js_1.tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.create(updatedOptions)); }); } /** * Creates a new container under the specified account. If the container with * the same name already exists, it is not changed. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/create-container + * @see https://learn.microsoft.com/rest/api/storageservices/create-container * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * * @param options - */ async createIfNotExists(options = {}) { - return tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => { - var _a4, _b; + return tracing_js_1.tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => { try { const res = await this.create(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + return { + succeeded: true, + ...res, + _response: res._response + // _response is made non-enumerable + }; } catch (e) { - if (((_a4 = e.details) === null || _a4 === void 0 ? void 0 : _a4.errorCode) === "ContainerAlreadyExists") { - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + if (e.details?.errorCode === "ContainerAlreadyExists") { + return { + succeeded: false, + ...e.response?.parsedHeaders, + _response: e.response + }; } else { throw e; } @@ -55793,7 +59443,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - */ async exists(options = {}) { - return tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => { try { await this.getProperties({ abortSignal: options.abortSignal, @@ -55815,7 +59465,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @returns A new BlobClient object for the given blob name. */ getBlobClient(blobName) { - return new BlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + return new Clients_js_1.BlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); } /** * Creates an {@link AppendBlobClient} @@ -55823,7 +59473,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param blobName - An append blob name */ getAppendBlobClient(blobName) { - return new AppendBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + return new Clients_js_1.AppendBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); } /** * Creates a {@link BlockBlobClient} @@ -55833,15 +59483,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage: * - * ```js - * const content = "Hello world!"; + * ```ts snippet:ClientsUpload + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blockBlobClient = containerClient.getBlockBlobClient(blobName); * - * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const content = "Hello world!"; * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); * ``` */ getBlockBlobClient(blobName) { - return new BlockBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + return new Clients_js_1.BlockBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); } /** * Creates a {@link PageBlobClient} @@ -55849,12 +59511,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param blobName - A page blob name */ getPageBlobClient(blobName) { - return new PageBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + return new Clients_js_1.PageBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); } /** * Returns all user-defined metadata and system properties for the specified * container. The data returned does not include the container's list of blobs. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-container-properties + * @see https://learn.microsoft.com/rest/api/storageservices/get-container-properties * * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if * they originally contained uppercase characters. This differs from the metadata keys returned by @@ -55867,14 +59529,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!options.conditions) { options.conditions = {}; } - return tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => { - return assertResponse(await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), { tracingOptions: updatedOptions.tracingOptions }))); + return tracing_js_1.tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.getProperties({ + abortSignal: options.abortSignal, + ...options.conditions, + tracingOptions: updatedOptions.tracingOptions + })); }); } /** * Marks the specified container for deletion. The container and any blobs * contained within it are later deleted during garbage collection. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/delete-container + * @see https://learn.microsoft.com/rest/api/storageservices/delete-container * * @param options - Options to Container Delete operation. */ @@ -55882,8 +59548,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!options.conditions) { options.conditions = {}; } - return tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => { - return assertResponse(await this.containerContext.delete({ + return tracing_js_1.tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.delete({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions, @@ -55894,19 +59560,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; /** * Marks the specified container for deletion if it exists. The container and any blobs * contained within it are later deleted during garbage collection. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/delete-container + * @see https://learn.microsoft.com/rest/api/storageservices/delete-container * * @param options - Options to Container Delete operation. */ async deleteIfExists(options = {}) { - return tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => { - var _a4, _b; + return tracing_js_1.tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => { try { const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + return { + succeeded: true, + ...res, + _response: res._response + }; } catch (e) { - if (((_a4 = e.details) === null || _a4 === void 0 ? void 0 : _a4.errorCode) === "ContainerNotFound") { - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + if (e.details?.errorCode === "ContainerNotFound") { + return { + succeeded: false, + ...e.response?.parsedHeaders, + _response: e.response + }; } throw e; } @@ -55918,24 +59591,24 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * If no option provided, or no metadata defined in the parameter, the container * metadata will be removed. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * @see https://learn.microsoft.com/rest/api/storageservices/set-container-metadata * * @param metadata - Replace existing metadata with this value. * If no value provided the existing metadata will be removed. * @param options - Options to Container Set Metadata operation. */ - async setMetadata(metadata2, options = {}) { + async setMetadata(metadata, options = {}) { if (!options.conditions) { options.conditions = {}; } if (options.conditions.ifUnmodifiedSince) { throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); } - return tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => { - return assertResponse(await this.containerContext.setMetadata({ + return tracing_js_1.tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.setMetadata({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - metadata: metadata2, + metadata, modifiedAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions })); @@ -55948,7 +59621,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * @see https://learn.microsoft.com/rest/api/storageservices/get-container-acl * * @param options - Options to Container Get Access Policy operation. */ @@ -55956,8 +59629,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!options.conditions) { options.conditions = {}; } - return tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => { - const response = assertResponse(await this.containerContext.getAccessPolicy({ + return tracing_js_1.tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.getAccessPolicy({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions @@ -56006,29 +59679,29 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. * During this interval, a shared access signature that is associated with the stored access policy will * fail with status code 403 (Forbidden), until the access policy becomes active. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * @see https://learn.microsoft.com/rest/api/storageservices/set-container-acl * * @param access - The level of public access to data in the container. * @param containerAcl - Array of elements each having a unique Id and details of the access policy. * @param options - Options to Container Set Access Policy operation. */ - async setAccessPolicy(access4, containerAcl2, options = {}) { + async setAccessPolicy(access3, containerAcl, options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => { const acl = []; - for (const identifier of containerAcl2 || []) { + for (const identifier of containerAcl || []) { acl.push({ accessPolicy: { - expiresOn: identifier.accessPolicy.expiresOn ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) : "", + expiresOn: identifier.accessPolicy.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(identifier.accessPolicy.expiresOn) : "", permissions: identifier.accessPolicy.permissions, - startsOn: identifier.accessPolicy.startsOn ? truncatedISO8061Date(identifier.accessPolicy.startsOn) : "" + startsOn: identifier.accessPolicy.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(identifier.accessPolicy.startsOn) : "" }, id: identifier.id }); } - return assertResponse(await this.containerContext.setAccessPolicy({ + return (0, utils_common_js_1.assertResponse)(await this.containerContext.setAccessPolicy({ abortSignal: options.abortSignal, - access: access4, + access: access3, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions, @@ -56043,7 +59716,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @returns A new BlobLeaseClient object for managing leases on the container. */ getBlobLeaseClient(proposeLeaseId) { - return new BlobLeaseClient(this, proposeLeaseId); + return new BlobLeaseClient_js_1.BlobLeaseClient(this, proposeLeaseId); } /** * Creates a new block blob, or updates the content of an existing block blob. @@ -56067,10 +59740,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to configure the Block Blob Upload operation. * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. */ - async uploadBlockBlob(blobName, body2, contentLength2, options = {}) { - return tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => { + async uploadBlockBlob(blobName, body, contentLength, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => { const blockBlobClient = this.getBlockBlobClient(blobName); - const response = await blockBlobClient.upload(body2, contentLength2, updatedOptions); + const response = await blockBlobClient.upload(body, contentLength, updatedOptions); return { blockBlobClient, response @@ -56082,14 +59755,14 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * during garbage collection. Note that in order to delete a blob, you must delete * all of its snapshots. You can delete both at the same time with the Delete * Blob operation. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/delete-blob + * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob * * @param blobName - * @param options - Options to Blob Delete operation. * @returns Block blob deletion response data. */ async deleteBlob(blobName, options = {}) { - return tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => { let blobClient = this.getBlobClient(blobName); if (options.versionId) { blobClient = blobClient.withVersion(options.versionId); @@ -56107,13 +59780,33 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. * @param options - Options to Container List Blob Flat Segment operation. */ - async listBlobFlatSegment(marker2, options = {}) { - return tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => { - const response = assertResponse(await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker: marker2 }, options), { tracingOptions: updatedOptions.tracingOptions }))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { - const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); - return blobItem; - }) }) }); + async listBlobFlatSegment(marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.listBlobFlatSegment({ + marker, + ...options, + tracingOptions: updatedOptions.tracingOptions + })); + const wrappedResponse = { + ...response, + _response: { + ...response._response, + parsedBody: (0, utils_common_js_1.ConvertInternalResponseOfListBlobFlat)(response._response.parsedBody) + }, + // _response is made non-enumerable + segment: { + ...response.segment, + blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = { + ...blobItemInternal, + name: (0, utils_common_js_1.BlobNameToString)(blobItemInternal.name), + tags: (0, utils_common_js_1.toTags)(blobItemInternal.blobTags), + objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(blobItemInternal.objectReplicationMetadata) + }; + return blobItem; + }) + } + }; return wrappedResponse; }); } @@ -56128,17 +59821,40 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. * @param options - Options to Container List Blob Hierarchy Segment operation. */ - async listBlobHierarchySegment(delimiter2, marker2, options = {}) { - return tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => { - var _a4; - const response = assertResponse(await this.containerContext.listBlobHierarchySegment(delimiter2, Object.assign(Object.assign({ marker: marker2 }, options), { tracingOptions: updatedOptions.tracingOptions }))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { - const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); - return blobItem; - }), blobPrefixes: (_a4 = response.segment.blobPrefixes) === null || _a4 === void 0 ? void 0 : _a4.map((blobPrefixInternal) => { - const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); - return blobPrefix; - }) }) }); + async listBlobHierarchySegment(delimiter, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.listBlobHierarchySegment(delimiter, { + marker, + ...options, + tracingOptions: updatedOptions.tracingOptions + })); + const wrappedResponse = { + ...response, + _response: { + ...response._response, + parsedBody: (0, utils_common_js_1.ConvertInternalResponseOfListBlobHierarchy)(response._response.parsedBody) + }, + // _response is made non-enumerable + segment: { + ...response.segment, + blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = { + ...blobItemInternal, + name: (0, utils_common_js_1.BlobNameToString)(blobItemInternal.name), + tags: (0, utils_common_js_1.toTags)(blobItemInternal.blobTags), + objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(blobItemInternal.objectReplicationMetadata) + }; + return blobItem; + }), + blobPrefixes: response.segment.blobPrefixes?.map((blobPrefixInternal) => { + const blobPrefix = { + ...blobPrefixInternal, + name: (0, utils_common_js_1.BlobNameToString)(blobPrefixInternal.name) + }; + return blobPrefix; + }) + } + }; return wrappedResponse; }); } @@ -56154,44 +59870,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ - listSegments(marker_1) { - return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker2, options = {}) { - let listBlobsFlatSegmentResponse; - if (!!marker2 || marker2 === void 0) { - do { - listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker2, options)); - marker2 = listBlobsFlatSegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); - } while (marker2); - } - }); + async *listSegments(marker, options = {}) { + let listBlobsFlatSegmentResponse; + if (!!marker || marker === void 0) { + do { + listBlobsFlatSegmentResponse = await this.listBlobFlatSegment(marker, options); + marker = listBlobsFlatSegmentResponse.continuationToken; + yield await listBlobsFlatSegmentResponse; + } while (marker); + } } /** * Returns an AsyncIterableIterator of {@link BlobItem} objects * * @param options - Options to list blobs operation. */ - listItems() { - return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { - var _a4, e_1, _b, _c; - let marker2; - try { - for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker2, options)), _f; _f = yield tslib.__await(_e.next()), _a4 = _f.done, !_a4; _d = true) { - _c = _f.value; - _d = false; - const listBlobsFlatSegmentResponse = _c; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); - } - } catch (e_1_1) { - e_1 = { error: e_1_1 }; - } finally { - try { - if (!_d && !_a4 && (_b = _e.return)) yield tslib.__await(_b.call(_e)); - } finally { - if (e_1) throw e_1.error; - } - } - }); + async *listItems(options = {}) { + let marker; + for await (const listBlobsFlatSegmentResponse of this.listSegments(marker, options)) { + yield* listBlobsFlatSegmentResponse.segment.blobItems; + } } /** * Returns an async iterable iterator to list all the blobs @@ -56199,64 +59897,63 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * .byPage() returns an async iterable iterator to list the blobs in pages. * - * Example using `for await` syntax: + * ```ts snippet:ReadmeSampleListBlobs_Multiple + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; * - * ```js - * // Get the containerClient before you run these snippets, - * // Can be obtained from `blobServiceClient.getContainerClient("");` - * let i = 1; - * for await (const blob of containerClient.listBlobsFlat()) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * ``` + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); * - * Example using `iter.next()`: + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); * - * ```js + * // Example using `for await` syntax * let i = 1; - * let iter = containerClient.listBlobsFlat(); - * let blobItem = await iter.next(); - * while (!blobItem.done) { - * console.log(`Blob ${i++}: ${blobItem.value.name}`); - * blobItem = await iter.next(); + * const blobs = containerClient.listBlobsFlat(); + * for await (const blob of blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); * } - * ``` * - * Example using `byPage()`: + * // Example using `iter.next()` syntax + * i = 1; + * const iter = containerClient.listBlobsFlat(); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Blob ${i++}: ${value.name}`); + * ({ value, done } = await iter.next()); + * } * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { - * for (const blob of response.segment.blobItems) { + * // Example using `byPage()` syntax + * i = 1; + * for await (const page of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of page.segment.blobItems) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } - * ``` - * - * Example using paging with a marker: * - * ```js - * let i = 1; + * // Example using paging with a marker + * i = 1; * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; - * * // Prints 2 blob names - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); + * if (response.segment.blobItems) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * } - * * // Gets next marker * let marker = response.continuationToken; - * * // Passing next marker as continuationToken - * * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; - * * // Prints 10 blob names - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); + * if (response.segment.blobItems) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * } * ``` * @@ -56264,41 +59961,44 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @returns An asyncIterableIterator that supports paging. */ listBlobsFlat(options = {}) { - const include2 = []; + const include = []; if (options.includeCopy) { - include2.push("copy"); + include.push("copy"); } if (options.includeDeleted) { - include2.push("deleted"); + include.push("deleted"); } if (options.includeMetadata) { - include2.push("metadata"); + include.push("metadata"); } if (options.includeSnapshots) { - include2.push("snapshots"); + include.push("snapshots"); } if (options.includeVersions) { - include2.push("versions"); + include.push("versions"); } if (options.includeUncommitedBlobs) { - include2.push("uncommittedblobs"); + include.push("uncommittedblobs"); } if (options.includeTags) { - include2.push("tags"); + include.push("tags"); } if (options.includeDeletedWithVersions) { - include2.push("deletedwithversions"); + include.push("deletedwithversions"); } if (options.includeImmutabilityPolicy) { - include2.push("immutabilitypolicy"); + include.push("immutabilitypolicy"); } if (options.includeLegalHold) { - include2.push("legalhold"); + include.push("legalhold"); } if (options.prefix === "") { options.prefix = void 0; } - const updatedOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); + const updatedOptions = { + ...options, + ...include.length > 0 ? { include } : {} + }; const iter = this.listItems(updatedOptions); return { /** @@ -56317,7 +60017,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + return this.listSegments(settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...updatedOptions + }); } }; } @@ -56334,17 +60037,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ - listHierarchySegments(delimiter_1, marker_1) { - return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1(delimiter2, marker2, options = {}) { - let listBlobsHierarchySegmentResponse; - if (!!marker2 || marker2 === void 0) { - do { - listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter2, marker2, options)); - marker2 = listBlobsHierarchySegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); - } while (marker2); - } - }); + async *listHierarchySegments(delimiter, marker, options = {}) { + let listBlobsHierarchySegmentResponse; + if (!!marker || marker === void 0) { + do { + listBlobsHierarchySegmentResponse = await this.listBlobHierarchySegment(delimiter, marker, options); + marker = listBlobsHierarchySegmentResponse.continuationToken; + yield await listBlobsHierarchySegmentResponse; + } while (marker); + } } /** * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. @@ -56352,35 +60053,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param delimiter - The character or string used to define the virtual hierarchy * @param options - Options to list blobs operation. */ - listItemsByHierarchy(delimiter_1) { - return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1(delimiter2, options = {}) { - var _a4, e_2, _b, _c; - let marker2; - try { - for (var _d = true, _e = tslib.__asyncValues(this.listHierarchySegments(delimiter2, marker2, options)), _f; _f = yield tslib.__await(_e.next()), _a4 = _f.done, !_a4; _d = true) { - _c = _f.value; - _d = false; - const listBlobsHierarchySegmentResponse = _c; - const segment = listBlobsHierarchySegmentResponse.segment; - if (segment.blobPrefixes) { - for (const prefix2 of segment.blobPrefixes) { - yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix2)); - } - } - for (const blob of segment.blobItems) { - yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); - } - } - } catch (e_2_1) { - e_2 = { error: e_2_1 }; - } finally { - try { - if (!_d && !_a4 && (_b = _e.return)) yield tslib.__await(_b.call(_e)); - } finally { - if (e_2) throw e_2.error; + async *listItemsByHierarchy(delimiter, options = {}) { + let marker; + for await (const listBlobsHierarchySegmentResponse of this.listHierarchySegments(delimiter, marker, options)) { + const segment = listBlobsHierarchySegmentResponse.segment; + if (segment.blobPrefixes) { + for (const prefix of segment.blobPrefixes) { + yield { + kind: "prefix", + ...prefix + }; } } - }); + for (const blob of segment.blobItems) { + yield { kind: "blob", ...blob }; + } + } } /** * Returns an async iterable iterator to list all the blobs by hierarchy. @@ -56388,118 +60076,138 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. * - * Example using `for await` syntax: + * ```ts snippet:ReadmeSampleListBlobsByHierarchy + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; * - * ```js - * for await (const item of containerClient.listBlobsByHierarchy("/")) { - * if (item.kind === "prefix") { - * console.log(`\tBlobPrefix: ${item.name}`); + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * + * // Example using `for await` syntax + * let i = 1; + * const blobs = containerClient.listBlobsByHierarchy("/"); + * for await (const blob of blobs) { + * if (blob.kind === "prefix") { + * console.log(`\tBlobPrefix: ${blob.name}`); * } else { - * console.log(`\tBlobItem: name - ${item.name}`); + * console.log(`\tBlobItem: name - ${blob.name}`); * } * } - * ``` * - * Example using `iter.next()`: - * - * ```js - * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); - * let entity = await iter.next(); - * while (!entity.done) { - * let item = entity.value; - * if (item.kind === "prefix") { - * console.log(`\tBlobPrefix: ${item.name}`); + * // Example using `iter.next()` syntax + * i = 1; + * const iter = containerClient.listBlobsByHierarchy("/"); + * let { value, done } = await iter.next(); + * while (!done) { + * if (value.kind === "prefix") { + * console.log(`\tBlobPrefix: ${value.name}`); * } else { - * console.log(`\tBlobItem: name - ${item.name}`); + * console.log(`\tBlobItem: name - ${value.name}`); * } - * entity = await iter.next(); + * ({ value, done } = await iter.next()); * } - * ``` * - * Example using `byPage()`: - * - * ```js - * console.log("Listing blobs by hierarchy by page"); - * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { - * const segment = response.segment; + * // Example using `byPage()` syntax + * i = 1; + * for await (const page of containerClient.listBlobsByHierarchy("/").byPage({ maxPageSize: 20 })) { + * const segment = page.segment; * if (segment.blobPrefixes) { * for (const prefix of segment.blobPrefixes) { * console.log(`\tBlobPrefix: ${prefix.name}`); * } * } - * for (const blob of response.segment.blobItems) { + * for (const blob of page.segment.blobItems) { * console.log(`\tBlobItem: name - ${blob.name}`); * } * } - * ``` - * - * Example using paging with a max page size: * - * ```js - * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); - * - * let i = 1; - * for await (const response of containerClient - * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) - * .byPage({ maxPageSize: 2 })) { - * console.log(`Page ${i++}`); - * const segment = response.segment; - * - * if (segment.blobPrefixes) { - * for (const prefix of segment.blobPrefixes) { - * console.log(`\tBlobPrefix: ${prefix.name}`); - * } + * // Example using paging with a marker + * i = 1; + * let iterator = containerClient.listBlobsByHierarchy("/").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * // Prints 2 blob names + * if (response.blobPrefixes) { + * for (const prefix of response.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); * } - * + * } + * if (response.segment.blobItems) { * for (const blob of response.segment.blobItems) { * console.log(`\tBlobItem: name - ${blob.name}`); * } * } + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .listBlobsByHierarchy("/") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * // Prints 10 blob names + * if (response.blobPrefixes) { + * for (const prefix of response.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * if (response.segment.blobItems) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } * ``` * * @param delimiter - The character or string used to define the virtual hierarchy * @param options - Options to list blobs operation. */ - listBlobsByHierarchy(delimiter2, options = {}) { - if (delimiter2 === "") { + listBlobsByHierarchy(delimiter, options = {}) { + if (delimiter === "") { throw new RangeError("delimiter should contain one or more characters"); } - const include2 = []; + const include = []; if (options.includeCopy) { - include2.push("copy"); + include.push("copy"); } if (options.includeDeleted) { - include2.push("deleted"); + include.push("deleted"); } if (options.includeMetadata) { - include2.push("metadata"); + include.push("metadata"); } if (options.includeSnapshots) { - include2.push("snapshots"); + include.push("snapshots"); } if (options.includeVersions) { - include2.push("versions"); + include.push("versions"); } if (options.includeUncommitedBlobs) { - include2.push("uncommittedblobs"); + include.push("uncommittedblobs"); } if (options.includeTags) { - include2.push("tags"); + include.push("tags"); } if (options.includeDeletedWithVersions) { - include2.push("deletedwithversions"); + include.push("deletedwithversions"); } if (options.includeImmutabilityPolicy) { - include2.push("immutabilitypolicy"); + include.push("immutabilitypolicy"); } if (options.includeLegalHold) { - include2.push("legalhold"); + include.push("legalhold"); } if (options.prefix === "") { options.prefix = void 0; } - const updatedOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); - const iter = this.listItemsByHierarchy(delimiter2, updatedOptions); + const updatedOptions = { + ...options, + ...include.length > 0 ? { include } : {} + }; + const iter = this.listItemsByHierarchy(delimiter, updatedOptions); return { /** * The next method, part of the iteration protocol @@ -56517,7 +60225,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.listHierarchySegments(delimiter2, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + return this.listHierarchySegments(delimiter, settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...updatedOptions + }); } }; } @@ -56538,23 +60249,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - async findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options = {}) { - return tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => { - const response = assertResponse(await this.containerContext.filterBlobs({ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.filterBlobs({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, - marker: marker2, + marker, maxPageSize: options.maxPageSize, tracingOptions: updatedOptions.tracingOptions })); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { - var _a4; - let tagValue = ""; - if (((_a4 = blob.tags) === null || _a4 === void 0 ? void 0 : _a4.blobTagSet.length) === 1) { - tagValue = blob.tags.blobTagSet[0].value; - } - return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); - }) }); + const wrappedResponse = { + ...response, + _response: response._response, + // _response is made non-enumerable + blobs: response.blobs.map((blob) => { + let tagValue = ""; + if (blob.tags?.blobTagSet.length === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return { ...blob, tags: (0, utils_common_js_1.toTags)(blob.tags), tagValue }; + }) + }; return wrappedResponse; }); } @@ -56574,18 +60289,16 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker2, options = {}) { - let response; - if (!!marker2 || marker2 === void 0) { - do { - response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options)); - response.blobs = response.blobs || []; - marker2 = response.continuationToken; - yield yield tslib.__await(response); - } while (marker2); - } - }); + async *findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + let response; + if (!!marker || marker === void 0) { + do { + response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield response; + } while (marker); + } } /** * Returns an AsyncIterableIterator for blobs. @@ -56596,27 +60309,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to findBlobsByTagsItems. */ - findBlobsByTagsItems(tagFilterSqlExpression_1) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { - var _a4, e_3, _b, _c; - let marker2; - try { - for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options)), _f; _f = yield tslib.__await(_e.next()), _a4 = _f.done, !_a4; _d = true) { - _c = _f.value; - _d = false; - const segment = _c; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); - } - } catch (e_3_1) { - e_3 = { error: e_3_1 }; - } finally { - try { - if (!_d && !_a4 && (_b = _e.return)) yield tslib.__await(_b.call(_e)); - } finally { - if (e_3) throw e_3.error; - } - } - }); + async *findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + let marker; + for await (const segment of this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)) { + yield* segment.blobs; + } } /** * Returns an async iterable iterator to find all blobs with specified tag @@ -56626,53 +60323,54 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example using `for await` syntax: * - * ```js + * ```ts snippet:ReadmeSampleFindBlobsByTags + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * + * // Example using `for await` syntax * let i = 1; * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { * console.log(`Blob ${i++}: ${blob.name}`); * } - * ``` * - * Example using `iter.next()`: - * - * ```js - * let i = 1; + * // Example using `iter.next()` syntax + * i = 1; * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); - * let blobItem = await iter.next(); - * while (!blobItem.done) { - * console.log(`Blob ${i++}: ${blobItem.value.name}`); - * blobItem = await iter.next(); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Blob ${i++}: ${value.name}`); + * ({ value, done } = await iter.next()); * } - * ``` - * - * Example using `byPage()`: * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } + * // Example using `byPage()` syntax + * i = 1; + * for await (const page of containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ maxPageSize: 20 })) { + * for (const blob of page.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); * } * } - * ``` - * - * Example using paging with a marker: * - * ```js - * let i = 1; + * // Example using paging with a marker + * i = 1; * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; - * * // Prints 2 blob names * if (response.blobs) { * for (const blob of response.blobs) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } - * * // Gets next marker * let marker = response.continuationToken; * // Passing next marker as continuationToken @@ -56680,11 +60378,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * .findBlobsByTags("tagkey='tagvalue'") * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; - * - * // Prints blob names + * // Prints 10 blob names * if (response.blobs) { * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); + * console.log(`Blob ${i++}: ${blob.name}`); * } * } * ``` @@ -56696,7 +60393,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to find blobs by tags. */ findBlobsByTags(tagFilterSqlExpression, options = {}) { - const listSegmentOptions = Object.assign({}, options); + const listSegmentOptions = { + ...options + }; const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); return { /** @@ -56715,7 +60414,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...listSegmentOptions + }); } }; } @@ -56724,14 +60426,14 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * for the specified account. * The Get Account Information operation is available on service versions beginning * with version 2018-03-28. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-account-information + * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information * * @param options - Options to the Service Get Account Info operation. * @returns Response data for the Service Get Account Info operation. */ async getAccountInfo(options = {}) { - return tracingClient.withSpan("ContainerClient-getAccountInfo", options, async (updatedOptions) => { - return assertResponse(await this.containerContext.getAccountInfo({ + return tracing_js_1.tracingClient.withSpan("ContainerClient-getAccountInfo", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.getAccountInfo({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); @@ -56743,7 +60445,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; const parsedUrl = new URL(this.url); if (parsedUrl.hostname.split(".")[1] === "blob") { containerName = parsedUrl.pathname.split("/")[1]; - } else if (isIpEndpointStyle(parsedUrl)) { + } else if ((0, utils_common_js_1.isIpEndpointStyle)(parsedUrl)) { containerName = parsedUrl.pathname.split("/")[2]; } else { containerName = parsedUrl.pathname.split("/")[1]; @@ -56763,18 +60465,21 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties * and parameters passed in. The SAS is signed by the shared key credential of the client. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateSasUrl(options) { return new Promise((resolve) => { - if (!(this.credential instanceof StorageSharedKeyCredential)) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); - resolve(appendToURLQuery(this.url, sas)); + const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ + containerName: this._containerName, + ...options + }, this.credential).toString(); + resolve((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** @@ -56783,23 +60488,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI * based on the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ generateSasStringToSign(options) { - if (!(this.credential instanceof StorageSharedKeyCredential)) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } - return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName }, options), this.credential).stringToSign; + return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ + containerName: this._containerName, + ...options + }, this.credential).stringToSign; } /** * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties * and parameters passed in. The SAS is signed by the input user delegation key. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` @@ -56807,50 +60515,51 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ generateUserDelegationSasUrl(options, userDelegationKey) { return new Promise((resolve) => { - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), userDelegationKey, this.accountName).toString(); - resolve(appendToURLQuery(this.url, sas)); + const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ + containerName: this._containerName, + ...options + }, userDelegationKey, this.accountName).toString(); + resolve((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI * based on the client properties and parameters passed in. The SAS is signed by the input user delegation key. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateUserDelegationSasStringToSign(options, userDelegationKey) { - return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName }, options), userDelegationKey, this.accountName).stringToSign; + return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ + containerName: this._containerName, + ...options + }, userDelegationKey, this.accountName).stringToSign; } /** * Creates a BlobBatchClient object to conduct batch operations. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch + * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch * * @returns A new BlobBatchClient object for this container. */ getBlobBatchClient() { - return new BlobBatchClient(this.url, this.pipeline); + return new BlobBatchClient_js_1.BlobBatchClient(this.url, this.pipeline); } }; + exports2.ContainerClient = ContainerClient; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASPermissions.js +var require_AccountSASPermissions = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASPermissions.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AccountSASPermissions = void 0; var AccountSASPermissions = class _AccountSASPermissions { - constructor() { - this.read = false; - this.write = false; - this.delete = false; - this.deleteVersion = false; - this.list = false; - this.add = false; - this.create = false; - this.update = false; - this.process = false; - this.tag = false; - this.filter = false; - this.setImmutabilityPolicy = false; - this.permanentDelete = false; - } /** * Parse initializes the AccountSASPermissions fields from a string. * @@ -56954,6 +60663,58 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } return accountSASPermissions; } + /** + * Permission to read resources and list queues and tables granted. + */ + read = false; + /** + * Permission to write resources granted. + */ + write = false; + /** + * Permission to delete blobs and files granted. + */ + delete = false; + /** + * Permission to delete versions granted. + */ + deleteVersion = false; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + list = false; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + add = false; + /** + * Permission to create blobs and files granted. + */ + create = false; + /** + * Permissions to update messages and table entities granted. + */ + update = false; + /** + * Permission to get and delete messages granted. + */ + process = false; + /** + * Specfies Tag access granted. + */ + tag = false; + /** + * Permission to filter blobs. + */ + filter = false; + /** + * Permission to set immutability policy. + */ + setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete = false; /** * Produces the SAS permissions string for an Azure Storage account. * Call this method to set AccountSASSignatureValues Permissions field. @@ -56961,7 +60722,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Using this method will guarantee the resource types are in * an order accepted by the service. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-an-account-sas * */ toString() { @@ -57008,12 +60769,17 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return permissions.join(""); } }; + exports2.AccountSASPermissions = AccountSASPermissions; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASResourceTypes.js +var require_AccountSASResourceTypes = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASResourceTypes.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AccountSASResourceTypes = void 0; var AccountSASResourceTypes = class _AccountSASResourceTypes { - constructor() { - this.service = false; - this.container = false; - this.object = false; - } /** * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an * Error if it encounters a character that does not correspond to a valid resource type. @@ -57039,10 +60805,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } return accountSASResourceTypes; } + /** + * Permission to access service level APIs granted. + */ + service = false; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + container = false; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + object = false; /** * Converts the given resource types to a string. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-an-account-sas * */ toString() { @@ -57059,13 +60837,17 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return resourceTypes.join(""); } }; + exports2.AccountSASResourceTypes = AccountSASResourceTypes; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASServices.js +var require_AccountSASServices = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASServices.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AccountSASServices = void 0; var AccountSASServices = class _AccountSASServices { - constructor() { - this.blob = false; - this.file = false; - this.queue = false; - this.table = false; - } /** * Creates an {@link AccountSASServices} from the specified services string. This method will throw an * Error if it encounters a character that does not correspond to a valid service. @@ -57094,6 +60876,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } return accountSASServices; } + /** + * Permission to access blob resources granted. + */ + blob = false; + /** + * Permission to access file resources granted. + */ + file = false; + /** + * Permission to access queue resources granted. + */ + queue = false; + /** + * Permission to access table resources granted. + */ + table = false; /** * Converts the given services to a string. * @@ -57115,44 +60913,62 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return services.join(""); } }; + exports2.AccountSASServices = AccountSASServices; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASSignatureValues.js +var require_AccountSASSignatureValues = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASSignatureValues.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.generateAccountSASQueryParameters = generateAccountSASQueryParameters; + exports2.generateAccountSASQueryParametersInternal = generateAccountSASQueryParametersInternal; + var AccountSASPermissions_js_1 = require_AccountSASPermissions(); + var AccountSASResourceTypes_js_1 = require_AccountSASResourceTypes(); + var AccountSASServices_js_1 = require_AccountSASServices(); + var SasIPRange_js_1 = require_SasIPRange(); + var SASQueryParameters_js_1 = require_SASQueryParameters(); + var constants_js_1 = require_constants9(); + var utils_common_js_1 = require_utils_common(); function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { return generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential).sasQueryParameters; } function generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential) { - const version2 = accountSASSignatureValues.version ? accountSASSignatureValues.version : SERVICE_VERSION; - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.setImmutabilityPolicy && version2 < "2020-08-04") { + const version = accountSASSignatureValues.version ? accountSASSignatureValues.version : constants_js_1.SERVICE_VERSION; + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.setImmutabilityPolicy && version < "2020-08-04") { throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.deleteVersion && version2 < "2019-10-10") { + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.deleteVersion && version < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.permanentDelete && version2 < "2019-10-10") { + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.permanentDelete && version < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version2 < "2019-12-12") { + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version < "2019-12-12") { throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.filter && version2 < "2019-12-12") { + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.filter && version < "2019-12-12") { throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); } - if (accountSASSignatureValues.encryptionScope && version2 < "2020-12-06") { + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); } - const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); - const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); - const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + const parsedPermissions = AccountSASPermissions_js_1.AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + const parsedServices = AccountSASServices_js_1.AccountSASServices.parse(accountSASSignatureValues.services).toString(); + const parsedResourceTypes = AccountSASResourceTypes_js_1.AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); let stringToSign; - if (version2 >= "2020-12-06") { + if (version >= "2020-12-06") { stringToSign = [ sharedKeyCredential.accountName, parsedPermissions, parsedServices, parsedResourceTypes, - accountSASSignatureValues.startsOn ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) : "", - truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.startsOn, false) : "", + (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(accountSASSignatureValues.ipRange) : "", accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version2, + version, accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", "" // Account SAS requires an additional newline character @@ -57163,22 +60979,50 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; parsedPermissions, parsedServices, parsedResourceTypes, - accountSASSignatureValues.startsOn ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) : "", - truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.startsOn, false) : "", + (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(accountSASSignatureValues.ipRange) : "", accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version2, + version, "" // Account SAS requires an additional newline character ].join("\n"); } const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return { - sasQueryParameters: new SASQueryParameters(version2, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, accountSASSignatureValues.encryptionScope), + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, accountSASSignatureValues.encryptionScope), stringToSign }; } - var BlobServiceClient = class _BlobServiceClient extends StorageClient { + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BlobServiceClient.js +var require_BlobServiceClient = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BlobServiceClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobServiceClient = void 0; + var core_auth_1 = require_commonjs7(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); + var Pipeline_js_1 = require_Pipeline(); + var ContainerClient_js_1 = require_ContainerClient(); + var utils_common_js_1 = require_utils_common(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var utils_common_js_2 = require_utils_common(); + var tracing_js_1 = require_tracing(); + var BlobBatchClient_js_1 = require_BlobBatchClient(); + var StorageClient_js_1 = require_StorageClient(); + var AccountSASPermissions_js_1 = require_AccountSASPermissions(); + var AccountSASSignatureValues_js_1 = require_AccountSASSignatureValues(); + var AccountSASServices_js_1 = require_AccountSASServices(); + var BlobServiceClient = class _BlobServiceClient extends StorageClient_js_1.StorageClient { + /** + * serviceContext provided by protocol layer. + */ + serviceContext; /** * * Creates an instance of BlobServiceClient from connection string. @@ -57193,35 +61037,35 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ static fromConnectionString(connectionString, options) { options = options || {}; - const extractedCreds = extractConnectionStringParts(connectionString); + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(connectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreUtil.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); if (!options.proxyOptions) { - options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); } - const pipeline = newPipeline(sharedKeyCredential, options); + const pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); return new _BlobServiceClient(extractedCreds.url, pipeline); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { - const pipeline = newPipeline(new AnonymousCredential(), options); + const pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); return new _BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } - constructor(url2, credentialOrPipeline, options) { + constructor(url, credentialOrPipeline, options) { let pipeline; - if (isPipelineLike(credentialOrPipeline)) { + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipeline)) { pipeline = credentialOrPipeline; - } else if (coreUtil.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential || credentialOrPipeline instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipeline)) { - pipeline = newPipeline(credentialOrPipeline, options); + } else if (core_util_1.isNodeLike && credentialOrPipeline instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipeline instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipeline)) { + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipeline, options); } else { - pipeline = newPipeline(new AnonymousCredential(), options); + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } - super(url2, pipeline); + super(url, pipeline); this.serviceContext = this.storageClientContext.service; } /** @@ -57232,22 +61076,31 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage: * - * ```js + * ```ts snippet:BlobServiceClientGetContainerClient + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * * const containerClient = blobServiceClient.getContainerClient(""); * ``` */ getContainerClient(containerName) { - return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); + return new ContainerClient_js_1.ContainerClient((0, utils_common_js_1.appendToURLPath)(this.url, encodeURIComponent(containerName)), this.pipeline); } /** - * Create a Blob container. @see https://learn.microsoft.com/en-us/rest/api/storageservices/create-container + * Create a Blob container. @see https://learn.microsoft.com/rest/api/storageservices/create-container * * @param containerName - Name of the container to create. * @param options - Options to configure Container Create operation. * @returns Container creation response and the corresponding container client. */ async createContainer(containerName, options = {}) { - return tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(containerName); const containerCreateResponse = await containerClient.create(updatedOptions); return { @@ -57264,7 +61117,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @returns Container deletion response. */ async deleteContainer(containerName, options = {}) { - return tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(containerName); return containerClient.delete(updatedOptions); }); @@ -57278,47 +61131,29 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to configure Container Restore operation. * @returns Container deletion response. */ - async undeleteContainer(deletedContainerName2, deletedContainerVersion2, options = {}) { - return tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => { - const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName2); + async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => { + const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); const containerContext = containerClient["storageClientContext"].container; - const containerUndeleteResponse = assertResponse(await containerContext.restore({ - deletedContainerName: deletedContainerName2, - deletedContainerVersion: deletedContainerVersion2, + const containerUndeleteResponse = (0, utils_common_js_2.assertResponse)(await containerContext.restore({ + deletedContainerName, + deletedContainerVersion, tracingOptions: updatedOptions.tracingOptions })); return { containerClient, containerUndeleteResponse }; }); } - /** - * Rename an existing Blob Container. - * - * @param sourceContainerName - The name of the source container. - * @param destinationContainerName - The new name of the container. - * @param options - Options to configure Container Rename operation. - */ - /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ - // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. - async renameContainer(sourceContainerName2, destinationContainerName, options = {}) { - return tracingClient.withSpan("BlobServiceClient-renameContainer", options, async (updatedOptions) => { - var _a4; - const containerClient = this.getContainerClient(destinationContainerName); - const containerContext = containerClient["storageClientContext"].container; - const containerRenameResponse = assertResponse(await containerContext.rename(sourceContainerName2, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a4 = options.sourceCondition) === null || _a4 === void 0 ? void 0 : _a4.leaseId }))); - return { containerClient, containerRenameResponse }; - }); - } /** * Gets the properties of a storage account’s Blob service, including properties * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-properties * * @param options - Options to the Service Get Properties operation. * @returns Response data for the Service Get Properties operation. */ async getProperties(options = {}) { - return tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => { - return assertResponse(await this.serviceContext.getProperties({ + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getProperties({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); @@ -57327,15 +61162,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; /** * Sets properties for a storage account’s Blob service endpoint, including properties * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-service-properties * * @param properties - * @param options - Options to the Service Set Properties operation. * @returns Response data for the Service Set Properties operation. */ async setProperties(properties, options = {}) { - return tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => { - return assertResponse(await this.serviceContext.setProperties(properties, { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.setProperties(properties, { abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); @@ -57345,14 +61180,14 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Retrieves statistics related to replication for the Blob service. It is only * available on the secondary location endpoint when read-access geo-redundant * replication is enabled for the storage account. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-stats * * @param options - Options to the Service Get Statistics operation. * @returns Response data for the Service Get Statistics operation. */ async getStatistics(options = {}) { - return tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => { - return assertResponse(await this.serviceContext.getStatistics({ + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getStatistics({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); @@ -57363,14 +61198,14 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * for the specified account. * The Get Account Information operation is available on service versions beginning * with version 2018-03-28. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-account-information + * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information * * @param options - Options to the Service Get Account Info operation. * @returns Response data for the Service Get Account Info operation. */ async getAccountInfo(options = {}) { - return tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => { - return assertResponse(await this.serviceContext.getAccountInfo({ + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getAccountInfo({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); @@ -57378,7 +61213,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } /** * Returns a list of the containers under the specified account. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * @see https://learn.microsoft.com/rest/api/storageservices/list-containers2 * * @param marker - A string value that identifies the portion of * the list of containers to be returned with the next listing operation. The @@ -57390,9 +61225,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to the Service List Container Segment operation. * @returns Response data for the Service List Container Segment operation. */ - async listContainersSegment(marker2, options = {}) { - return tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => { - return assertResponse(await this.serviceContext.listContainersSegment(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker: marker2 }, options), { include: typeof options.include === "string" ? [options.include] : options.include, tracingOptions: updatedOptions.tracingOptions }))); + async listContainersSegment(marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.listContainersSegment({ + abortSignal: options.abortSignal, + marker, + ...options, + include: typeof options.include === "string" ? [options.include] : options.include, + tracingOptions: updatedOptions.tracingOptions + })); }); } /** @@ -57413,23 +61254,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - async findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options = {}) { - return tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => { - const response = assertResponse(await this.serviceContext.filterBlobs({ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = (0, utils_common_js_2.assertResponse)(await this.serviceContext.filterBlobs({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, - marker: marker2, + marker, maxPageSize: options.maxPageSize, tracingOptions: updatedOptions.tracingOptions })); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { - var _a4; - let tagValue = ""; - if (((_a4 = blob.tags) === null || _a4 === void 0 ? void 0 : _a4.blobTagSet.length) === 1) { - tagValue = blob.tags.blobTagSet[0].value; - } - return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); - }) }); + const wrappedResponse = { + ...response, + _response: response._response, + // _response is made non-enumerable + blobs: response.blobs.map((blob) => { + let tagValue = ""; + if (blob.tags?.blobTagSet.length === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return { ...blob, tags: (0, utils_common_js_1.toTags)(blob.tags), tagValue }; + }) + }; return wrappedResponse; }); } @@ -57449,18 +61294,16 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker2, options = {}) { - let response; - if (!!marker2 || marker2 === void 0) { - do { - response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options)); - response.blobs = response.blobs || []; - marker2 = response.continuationToken; - yield yield tslib.__await(response); - } while (marker2); - } - }); + async *findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + let response; + if (!!marker || marker === void 0) { + do { + response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield response; + } while (marker); + } } /** * Returns an AsyncIterableIterator for blobs. @@ -57471,27 +61314,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to findBlobsByTagsItems. */ - findBlobsByTagsItems(tagFilterSqlExpression_1) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { - var _a4, e_1, _b, _c; - let marker2; - try { - for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options)), _f; _f = yield tslib.__await(_e.next()), _a4 = _f.done, !_a4; _d = true) { - _c = _f.value; - _d = false; - const segment = _c; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); - } - } catch (e_1_1) { - e_1 = { error: e_1_1 }; - } finally { - try { - if (!_d && !_a4 && (_b = _e.return)) yield tslib.__await(_b.call(_e)); - } finally { - if (e_1) throw e_1.error; - } - } - }); + async *findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + let marker; + for await (const segment of this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)) { + yield* segment.blobs; + } } /** * Returns an async iterable iterator to find all blobs with specified tag @@ -57499,57 +61326,53 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * .byPage() returns an async iterable iterator to list the blobs in pages. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-properties * - * Example using `for await` syntax: + * ```ts snippet:BlobServiceClientFindBlobsByTags + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); * - * ```js + * // Use for await to iterate the blobs * let i = 1; * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { - * console.log(`Blob ${i++}: ${container.name}`); + * console.log(`Blob ${i++}: ${blob.name}`); * } - * ``` * - * Example using `iter.next()`: - * - * ```js - * let i = 1; + * // Use iter.next() to iterate the blobs + * i = 1; * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); - * let blobItem = await iter.next(); - * while (!blobItem.done) { - * console.log(`Blob ${i++}: ${blobItem.value.name}`); - * blobItem = await iter.next(); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Blob ${i++}: ${value.name}`); + * ({ value, done } = await iter.next()); * } - * ``` - * - * Example using `byPage()`: * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } + * // Use byPage() to iterate the blobs + * i = 1; + * for await (const page of blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ maxPageSize: 20 })) { + * for (const blob of page.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); * } * } - * ``` - * - * Example using paging with a marker: * - * ```js - * let i = 1; + * // Use paging with a marker + * i = 1; * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; - * * // Prints 2 blob names * if (response.blobs) { * for (const blob of response.blobs) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } - * * // Gets next marker * let marker = response.continuationToken; * // Passing next marker as continuationToken @@ -57561,7 +61384,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * // Prints blob names * if (response.blobs) { * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); + * console.log(`Blob ${i++}: ${blob.name}`); * } * } * ``` @@ -57573,7 +61396,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to find blobs by tags. */ findBlobsByTags(tagFilterSqlExpression, options = {}) { - const listSegmentOptions = Object.assign({}, options); + const listSegmentOptions = { + ...options + }; const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); return { /** @@ -57592,7 +61417,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...listSegmentOptions + }); } }; } @@ -57608,45 +61436,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to list containers operation. */ - listSegments(marker_1) { - return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker2, options = {}) { - let listContainersSegmentResponse; - if (!!marker2 || marker2 === void 0) { - do { - listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker2, options)); - listContainersSegmentResponse.containerItems = listContainersSegmentResponse.containerItems || []; - marker2 = listContainersSegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); - } while (marker2); - } - }); + async *listSegments(marker, options = {}) { + let listContainersSegmentResponse; + if (!!marker || marker === void 0) { + do { + listContainersSegmentResponse = await this.listContainersSegment(marker, options); + listContainersSegmentResponse.containerItems = listContainersSegmentResponse.containerItems || []; + marker = listContainersSegmentResponse.continuationToken; + yield await listContainersSegmentResponse; + } while (marker); + } } /** * Returns an AsyncIterableIterator for Container Items * * @param options - Options to list containers operation. */ - listItems() { - return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { - var _a4, e_2, _b, _c; - let marker2; - try { - for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker2, options)), _f; _f = yield tslib.__await(_e.next()), _a4 = _f.done, !_a4; _d = true) { - _c = _f.value; - _d = false; - const segment = _c; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); - } - } catch (e_2_1) { - e_2 = { error: e_2_1 }; - } finally { - try { - if (!_d && !_a4 && (_b = _e.return)) yield tslib.__await(_b.call(_e)); - } finally { - if (e_2) throw e_2.error; - } - } - }); + async *listItems(options = {}) { + let marker; + for await (const segment of this.listSegments(marker, options)) { + yield* segment.containerItems; + } } /** * Returns an async iterable iterator to list all the containers @@ -57654,45 +61464,41 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * .byPage() returns an async iterable iterator to list the containers in pages. * - * Example using `for await` syntax: + * ```ts snippet:BlobServiceClientListContainers + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); * - * ```js + * // Use for await to iterate the containers * let i = 1; * for await (const container of blobServiceClient.listContainers()) { * console.log(`Container ${i++}: ${container.name}`); * } - * ``` - * - * Example using `iter.next()`: * - * ```js - * let i = 1; + * // Use iter.next() to iterate the containers + * i = 1; * const iter = blobServiceClient.listContainers(); - * let containerItem = await iter.next(); - * while (!containerItem.done) { - * console.log(`Container ${i++}: ${containerItem.value.name}`); - * containerItem = await iter.next(); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Container ${i++}: ${value.name}`); + * ({ value, done } = await iter.next()); * } - * ``` * - * Example using `byPage()`: - * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { - * if (response.containerItems) { - * for (const container of response.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); - * } + * // Use byPage() to iterate the containers + * i = 1; + * for await (const page of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * for (const container of page.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); * } * } - * ``` * - * Example using paging with a marker: - * - * ```js - * let i = 1; + * // Use paging with a marker + * i = 1; * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * @@ -57714,7 +61520,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * // Prints 10 container names * if (response.containerItems) { * for (const container of response.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); + * console.log(`Container ${i++}: ${container.name}`); * } * } * ``` @@ -57726,17 +61532,20 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (options.prefix === "") { options.prefix = void 0; } - const include2 = []; + const include = []; if (options.includeDeleted) { - include2.push("deleted"); + include.push("deleted"); } if (options.includeMetadata) { - include2.push("metadata"); + include.push("metadata"); } if (options.includeSystem) { - include2.push("system"); + include.push("system"); } - const listSegmentOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); + const listSegmentOptions = { + ...options, + ...include.length > 0 ? { include } : {} + }; const iter = this.listItems(listSegmentOptions); return { /** @@ -57755,7 +61564,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + return this.listSegments(settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...listSegmentOptions + }); } }; } @@ -57765,16 +61577,16 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Retrieves a user delegation key for the Blob service. This is only a valid operation when using * bearer token authentication. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * @see https://learn.microsoft.com/rest/api/storageservices/get-user-delegation-key * * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time */ - async getUserDelegationKey(startsOn, expiresOn2, options = {}) { - return tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => { - const response = assertResponse(await this.serviceContext.getUserDelegationKey({ - startsOn: truncatedISO8061Date(startsOn, false), - expiresOn: truncatedISO8061Date(expiresOn2, false) + async getUserDelegationKey(startsOn, expiresOn, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => { + const response = (0, utils_common_js_2.assertResponse)(await this.serviceContext.getUserDelegationKey({ + startsOn: (0, utils_common_js_2.truncatedISO8061Date)(startsOn, false), + expiresOn: (0, utils_common_js_2.truncatedISO8061Date)(expiresOn, false) }, { abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions @@ -57788,19 +61600,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; signedVersion: response.signedVersion, value: response.value }; - const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); + const res = { + _response: response._response, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + version: response.version, + date: response.date, + errorCode: response.errorCode, + ...userDelegationKey + }; return res; }); } /** * Creates a BlobBatchClient object to conduct batch operations. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch + * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch * * @returns A new BlobBatchClient object for this service. */ getBlobBatchClient() { - return new BlobBatchClient(this.url, this.pipeline); + return new BlobBatchClient_js_1.BlobBatchClient(this.url, this.pipeline); } /** * Only available for BlobServiceClient constructed with a shared key credential. @@ -57808,7 +61628,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties * and parameters passed in. The SAS is signed by the shared key credential of the client. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * @see https://learn.microsoft.com/rest/api/storageservices/create-account-sas * * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. * @param permissions - Specifies the list of permissions to be associated with the SAS. @@ -57816,21 +61636,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Optional parameters. * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - generateAccountSasUrl(expiresOn2, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { - if (!(this.credential instanceof StorageSharedKeyCredential)) { + generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions_js_1.AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); } - if (expiresOn2 === void 0) { + if (expiresOn === void 0) { const now = /* @__PURE__ */ new Date(); - expiresOn2 = new Date(now.getTime() + 3600 * 1e3); + expiresOn = new Date(now.getTime() + 3600 * 1e3); } - const sas = generateAccountSASQueryParameters(Object.assign({ + const sas = (0, AccountSASSignatureValues_js_1.generateAccountSASQueryParameters)({ permissions, - expiresOn: expiresOn2, + expiresOn, resourceTypes, - services: AccountSASServices.parse("b").toString() - }, options), this.credential).toString(); - return appendToURLQuery(this.url, sas); + services: AccountSASServices_js_1.AccountSASServices.parse("b").toString(), + ...options + }, this.credential).toString(); + return (0, utils_common_js_1.appendToURLQuery)(this.url, sas); } /** * Only available for BlobServiceClient constructed with a shared key credential. @@ -57838,7 +61659,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Generates string to sign for a Blob account Shared Access Signature (SAS) URI based on * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * @see https://learn.microsoft.com/rest/api/storageservices/create-account-sas * * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. * @param permissions - Specifies the list of permissions to be associated with the SAS. @@ -57846,66 +61667,124 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Optional parameters. * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - generateSasStringToSign(expiresOn2, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { - if (!(this.credential instanceof StorageSharedKeyCredential)) { + generateSasStringToSign(expiresOn, permissions = AccountSASPermissions_js_1.AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); } - if (expiresOn2 === void 0) { + if (expiresOn === void 0) { const now = /* @__PURE__ */ new Date(); - expiresOn2 = new Date(now.getTime() + 3600 * 1e3); + expiresOn = new Date(now.getTime() + 3600 * 1e3); } - return generateAccountSASQueryParametersInternal(Object.assign({ + return (0, AccountSASSignatureValues_js_1.generateAccountSASQueryParametersInternal)({ permissions, - expiresOn: expiresOn2, + expiresOn, resourceTypes, - services: AccountSASServices.parse("b").toString() - }, options), this.credential).stringToSign; + services: AccountSASServices_js_1.AccountSASServices.parse("b").toString(), + ...options + }, this.credential).stringToSign; } }; - exports2.KnownEncryptionAlgorithmType = void 0; - (function(KnownEncryptionAlgorithmType) { - KnownEncryptionAlgorithmType["AES256"] = "AES256"; - })(exports2.KnownEncryptionAlgorithmType || (exports2.KnownEncryptionAlgorithmType = {})); - Object.defineProperty(exports2, "RestError", { - enumerable: true, - get: function() { - return coreRestPipeline.RestError; - } - }); - exports2.AccountSASPermissions = AccountSASPermissions; - exports2.AccountSASResourceTypes = AccountSASResourceTypes; - exports2.AccountSASServices = AccountSASServices; - exports2.AnonymousCredential = AnonymousCredential; - exports2.AnonymousCredentialPolicy = AnonymousCredentialPolicy; - exports2.AppendBlobClient = AppendBlobClient; - exports2.BaseRequestPolicy = BaseRequestPolicy; - exports2.BlobBatch = BlobBatch; - exports2.BlobBatchClient = BlobBatchClient; - exports2.BlobClient = BlobClient; - exports2.BlobLeaseClient = BlobLeaseClient; - exports2.BlobSASPermissions = BlobSASPermissions; exports2.BlobServiceClient = BlobServiceClient; - exports2.BlockBlobClient = BlockBlobClient; - exports2.ContainerClient = ContainerClient; - exports2.ContainerSASPermissions = ContainerSASPermissions; - exports2.Credential = Credential; - exports2.CredentialPolicy = CredentialPolicy; - exports2.PageBlobClient = PageBlobClient; - exports2.Pipeline = Pipeline; - exports2.SASQueryParameters = SASQueryParameters; - exports2.StorageBrowserPolicy = StorageBrowserPolicy; - exports2.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; - exports2.StorageOAuthScopes = StorageOAuthScopes; - exports2.StorageRetryPolicy = StorageRetryPolicy; - exports2.StorageRetryPolicyFactory = StorageRetryPolicyFactory; - exports2.StorageSharedKeyCredential = StorageSharedKeyCredential; - exports2.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; - exports2.generateAccountSASQueryParameters = generateAccountSASQueryParameters; - exports2.generateBlobSASQueryParameters = generateBlobSASQueryParameters; - exports2.getBlobServiceAccountAudience = getBlobServiceAccountAudience; - exports2.isPipelineLike = isPipelineLike; - exports2.logger = logger; - exports2.newPipeline = newPipeline; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BatchResponse.js +var require_BatchResponse = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BatchResponse.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generatedModels.js +var require_generatedModels = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generatedModels.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.KnownEncryptionAlgorithmType = void 0; + var KnownEncryptionAlgorithmType; + (function(KnownEncryptionAlgorithmType2) { + KnownEncryptionAlgorithmType2["AES256"] = "AES256"; + })(KnownEncryptionAlgorithmType || (exports2.KnownEncryptionAlgorithmType = KnownEncryptionAlgorithmType = {})); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/index.js +var require_commonjs13 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.logger = exports2.RestError = exports2.BaseRequestPolicy = exports2.StorageOAuthScopes = exports2.newPipeline = exports2.isPipelineLike = exports2.Pipeline = exports2.getBlobServiceAccountAudience = exports2.StorageBlobAudience = exports2.PremiumPageBlobTier = exports2.BlockBlobTier = exports2.generateBlobSASQueryParameters = exports2.generateAccountSASQueryParameters = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var core_rest_pipeline_1 = require_commonjs6(); + Object.defineProperty(exports2, "RestError", { enumerable: true, get: function() { + return core_rest_pipeline_1.RestError; + } }); + tslib_1.__exportStar(require_BlobServiceClient(), exports2); + tslib_1.__exportStar(require_Clients(), exports2); + tslib_1.__exportStar(require_ContainerClient(), exports2); + tslib_1.__exportStar(require_BlobLeaseClient(), exports2); + tslib_1.__exportStar(require_AccountSASPermissions(), exports2); + tslib_1.__exportStar(require_AccountSASResourceTypes(), exports2); + tslib_1.__exportStar(require_AccountSASServices(), exports2); + var AccountSASSignatureValues_js_1 = require_AccountSASSignatureValues(); + Object.defineProperty(exports2, "generateAccountSASQueryParameters", { enumerable: true, get: function() { + return AccountSASSignatureValues_js_1.generateAccountSASQueryParameters; + } }); + tslib_1.__exportStar(require_BlobBatch(), exports2); + tslib_1.__exportStar(require_BlobBatchClient(), exports2); + tslib_1.__exportStar(require_BatchResponse(), exports2); + tslib_1.__exportStar(require_BlobSASPermissions(), exports2); + var BlobSASSignatureValues_js_1 = require_BlobSASSignatureValues(); + Object.defineProperty(exports2, "generateBlobSASQueryParameters", { enumerable: true, get: function() { + return BlobSASSignatureValues_js_1.generateBlobSASQueryParameters; + } }); + tslib_1.__exportStar(require_StorageBrowserPolicyFactory2(), exports2); + tslib_1.__exportStar(require_ContainerSASPermissions(), exports2); + tslib_1.__exportStar(require_AnonymousCredential(), exports2); + tslib_1.__exportStar(require_Credential(), exports2); + tslib_1.__exportStar(require_StorageSharedKeyCredential(), exports2); + var models_js_1 = require_models2(); + Object.defineProperty(exports2, "BlockBlobTier", { enumerable: true, get: function() { + return models_js_1.BlockBlobTier; + } }); + Object.defineProperty(exports2, "PremiumPageBlobTier", { enumerable: true, get: function() { + return models_js_1.PremiumPageBlobTier; + } }); + Object.defineProperty(exports2, "StorageBlobAudience", { enumerable: true, get: function() { + return models_js_1.StorageBlobAudience; + } }); + Object.defineProperty(exports2, "getBlobServiceAccountAudience", { enumerable: true, get: function() { + return models_js_1.getBlobServiceAccountAudience; + } }); + var Pipeline_js_1 = require_Pipeline(); + Object.defineProperty(exports2, "Pipeline", { enumerable: true, get: function() { + return Pipeline_js_1.Pipeline; + } }); + Object.defineProperty(exports2, "isPipelineLike", { enumerable: true, get: function() { + return Pipeline_js_1.isPipelineLike; + } }); + Object.defineProperty(exports2, "newPipeline", { enumerable: true, get: function() { + return Pipeline_js_1.newPipeline; + } }); + Object.defineProperty(exports2, "StorageOAuthScopes", { enumerable: true, get: function() { + return Pipeline_js_1.StorageOAuthScopes; + } }); + tslib_1.__exportStar(require_StorageRetryPolicyFactory(), exports2); + var RequestPolicy_js_1 = require_RequestPolicy(); + Object.defineProperty(exports2, "BaseRequestPolicy", { enumerable: true, get: function() { + return RequestPolicy_js_1.BaseRequestPolicy; + } }); + tslib_1.__exportStar(require_AnonymousCredentialPolicy(), exports2); + tslib_1.__exportStar(require_CredentialPolicy(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicyFactory(), exports2); + tslib_1.__exportStar(require_StorageSharedKeyCredentialPolicy(), exports2); + tslib_1.__exportStar(require_SASQueryParameters(), exports2); + tslib_1.__exportStar(require_generatedModels(), exports2); + var log_js_1 = require_log5(); + Object.defineProperty(exports2, "logger", { enumerable: true, get: function() { + return log_js_1.logger; + } }); } }); @@ -58047,7 +61926,7 @@ var require_uploadUtils = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; var core8 = __importStar2(require_core()); - var storage_blob_1 = require_dist4(); + var storage_blob_1 = require_commonjs13(); var errors_1 = require_errors2(); var UploadProgress = class { constructor(contentLength) { @@ -58330,7 +62209,7 @@ var require_requestUtils = __commonJS({ }); // ../../node_modules/@actions/cache/node_modules/@azure/abort-controller/dist/index.js -var require_dist5 = __commonJS({ +var require_dist4 = __commonJS({ "../../node_modules/@actions/cache/node_modules/@azure/abort-controller/dist/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); @@ -58535,18 +62414,18 @@ var require_downloadUtils = __commonJS({ exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; var core8 = __importStar2(require_core()); var http_client_1 = require_lib(); - var storage_blob_1 = require_dist4(); + var storage_blob_1 = require_commonjs13(); var buffer = __importStar2(require("buffer")); var fs5 = __importStar2(require("fs")); - var stream2 = __importStar2(require("stream")); + var stream = __importStar2(require("stream")); var util = __importStar2(require("util")); var utils = __importStar2(require_cacheUtils()); var constants_1 = require_constants6(); var requestUtils_1 = require_requestUtils(); - var abort_controller_1 = require_dist5(); + var abort_controller_1 = require_dist4(); function pipeResponseToStream(response, output) { return __awaiter2(this, void 0, void 0, function* () { - const pipeline = util.promisify(stream2.pipeline); + const pipeline = util.promisify(stream.pipeline); yield pipeline(response.message, output); }); } @@ -62201,7 +66080,7 @@ var require_enum_object = __commonJS({ }); // ../../node_modules/@protobuf-ts/runtime/build/commonjs/index.js -var require_commonjs12 = __commonJS({ +var require_commonjs14 = __commonJS({ "../../node_modules/@protobuf-ts/runtime/build/commonjs/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); @@ -62392,7 +66271,7 @@ var require_reflection_info2 = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.readServiceOption = exports2.readMethodOption = exports2.readMethodOptions = exports2.normalizeMethodInfo = void 0; - var runtime_1 = require_commonjs12(); + var runtime_1 = require_commonjs14(); function normalizeMethodInfo(method, service) { var _a4, _b, _c; let m = method; @@ -62501,7 +66380,7 @@ var require_rpc_options = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.mergeRpcOptions = void 0; - var runtime_1 = require_commonjs12(); + var runtime_1 = require_commonjs14(); function mergeRpcOptions(defaults, options) { if (!options) return defaults; @@ -62638,7 +66517,7 @@ var require_rpc_output_stream = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RpcOutputStreamController = void 0; var deferred_1 = require_deferred(); - var runtime_1 = require_commonjs12(); + var runtime_1 = require_commonjs14(); var RpcOutputStreamController = class { constructor() { this._lis = { @@ -63093,7 +66972,7 @@ var require_test_transport = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.TestTransport = void 0; var rpc_error_1 = require_rpc_error(); - var runtime_1 = require_commonjs12(); + var runtime_1 = require_commonjs14(); var rpc_output_stream_1 = require_rpc_output_stream(); var rpc_options_1 = require_rpc_options(); var unary_call_1 = require_unary_call(); @@ -63166,7 +67045,7 @@ var require_test_transport = __commonJS({ * The returned promise resolves when the stream is closed. It should * not reject. If it does, code is broken. */ - streamResponses(method, stream2, abort) { + streamResponses(method, stream, abort) { return __awaiter2(this, void 0, void 0, function* () { const messages = []; if (this.data.response === void 0) { @@ -63183,31 +67062,31 @@ var require_test_transport = __commonJS({ try { yield delay(this.responseDelay, abort)(void 0); } catch (error2) { - stream2.notifyError(error2); + stream.notifyError(error2); return; } if (this.data.response instanceof rpc_error_1.RpcError) { - stream2.notifyError(this.data.response); + stream.notifyError(this.data.response); return; } for (let msg of messages) { - stream2.notifyMessage(msg); + stream.notifyMessage(msg); try { yield delay(this.betweenResponseDelay, abort)(void 0); } catch (error2) { - stream2.notifyError(error2); + stream.notifyError(error2); return; } } if (this.data.status instanceof rpc_error_1.RpcError) { - stream2.notifyError(this.data.status); + stream.notifyError(this.data.status); return; } if (this.data.trailers instanceof rpc_error_1.RpcError) { - stream2.notifyError(this.data.trailers); + stream.notifyError(this.data.trailers); return; } - stream2.notifyComplete(); + stream.notifyComplete(); }); } // Creates a promise for response status from the mock data. @@ -63337,7 +67216,7 @@ var require_rpc_interceptor = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.stackDuplexStreamingInterceptors = exports2.stackClientStreamingInterceptors = exports2.stackServerStreamingInterceptors = exports2.stackUnaryInterceptors = exports2.stackIntercept = void 0; - var runtime_1 = require_commonjs12(); + var runtime_1 = require_commonjs14(); function stackIntercept(kind, transport, method, options, input) { var _a4, _b, _c, _d; if (kind == "unary") { @@ -63461,7 +67340,7 @@ var require_server_call_context = __commonJS({ }); // ../../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js -var require_commonjs13 = __commonJS({ +var require_commonjs15 = __commonJS({ "../../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); @@ -63547,11 +67426,11 @@ var require_cachescope = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.CacheScope = void 0; - var runtime_1 = require_commonjs12(); - var runtime_2 = require_commonjs12(); - var runtime_3 = require_commonjs12(); - var runtime_4 = require_commonjs12(); - var runtime_5 = require_commonjs12(); + var runtime_1 = require_commonjs14(); + var runtime_2 = require_commonjs14(); + var runtime_3 = require_commonjs14(); + var runtime_4 = require_commonjs14(); + var runtime_5 = require_commonjs14(); var CacheScope$Type = class extends runtime_5.MessageType { constructor() { super("github.actions.results.entities.v1.CacheScope", [ @@ -63623,11 +67502,11 @@ var require_cachemetadata = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.CacheMetadata = void 0; - var runtime_1 = require_commonjs12(); - var runtime_2 = require_commonjs12(); - var runtime_3 = require_commonjs12(); - var runtime_4 = require_commonjs12(); - var runtime_5 = require_commonjs12(); + var runtime_1 = require_commonjs14(); + var runtime_2 = require_commonjs14(); + var runtime_3 = require_commonjs14(); + var runtime_4 = require_commonjs14(); + var runtime_5 = require_commonjs14(); var cachescope_1 = require_cachescope(); var CacheMetadata$Type = class extends runtime_5.MessageType { constructor() { @@ -63689,17 +67568,17 @@ var require_cachemetadata = __commonJS({ }); // ../../node_modules/@actions/cache/lib/generated/results/api/v1/cache.js -var require_cache2 = __commonJS({ +var require_cache3 = __commonJS({ "../../node_modules/@actions/cache/lib/generated/results/api/v1/cache.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.CacheService = exports2.GetCacheEntryDownloadURLResponse = exports2.GetCacheEntryDownloadURLRequest = exports2.FinalizeCacheEntryUploadResponse = exports2.FinalizeCacheEntryUploadRequest = exports2.CreateCacheEntryResponse = exports2.CreateCacheEntryRequest = void 0; - var runtime_rpc_1 = require_commonjs13(); - var runtime_1 = require_commonjs12(); - var runtime_2 = require_commonjs12(); - var runtime_3 = require_commonjs12(); - var runtime_4 = require_commonjs12(); - var runtime_5 = require_commonjs12(); + var runtime_rpc_1 = require_commonjs15(); + var runtime_1 = require_commonjs14(); + var runtime_2 = require_commonjs14(); + var runtime_3 = require_commonjs14(); + var runtime_4 = require_commonjs14(); + var runtime_5 = require_commonjs14(); var cachemetadata_1 = require_cachemetadata(); var CreateCacheEntryRequest$Type = class extends runtime_5.MessageType { constructor() { @@ -64148,7 +68027,7 @@ var require_cache_twirp_client = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.CacheServiceClientProtobuf = exports2.CacheServiceClientJSON = void 0; - var cache_1 = require_cache2(); + var cache_1 = require_cache3(); var CacheServiceClientJSON = class { constructor(rpc) { this.rpc = rpc; @@ -64674,7 +68553,7 @@ var require_tar = __commonJS({ }); // ../../node_modules/@actions/cache/lib/cache.js -var require_cache3 = __commonJS({ +var require_cache4 = __commonJS({ "../../node_modules/@actions/cache/lib/cache.js"(exports2) { "use strict"; var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { @@ -65120,7 +68999,7 @@ var require_context = __commonJS({ }); // ../../node_modules/@actions/github/lib/internal/utils.js -var require_utils4 = __commonJS({ +var require_utils5 = __commonJS({ "../../node_modules/@actions/github/lib/internal/utils.js"(exports2) { "use strict"; var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { @@ -69026,7 +72905,7 @@ var require_dist_node10 = __commonJS({ }); // ../../node_modules/@actions/github/lib/utils.js -var require_utils5 = __commonJS({ +var require_utils6 = __commonJS({ "../../node_modules/@actions/github/lib/utils.js"(exports2) { "use strict"; var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { @@ -69059,7 +72938,7 @@ var require_utils5 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOctokitOptions = exports2.GitHub = exports2.defaults = exports2.context = void 0; var Context = __importStar2(require_context()); - var Utils = __importStar2(require_utils4()); + var Utils = __importStar2(require_utils5()); var core_1 = require_dist_node8(); var plugin_rest_endpoint_methods_1 = require_dist_node9(); var plugin_paginate_rest_1 = require_dist_node10(); @@ -69119,7 +72998,7 @@ var require_github = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOctokit = exports2.context = void 0; var Context = __importStar2(require_context()); - var utils_1 = require_utils5(); + var utils_1 = require_utils6(); exports2.context = new Context.Context(); function getOctokit4(token, options, ...additionalPlugins) { const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); @@ -88997,7 +92876,7 @@ ${ctx.indent}`; }); // ../../node_modules/yaml/dist/log.js -var require_log5 = __commonJS({ +var require_log7 = __commonJS({ "../../node_modules/yaml/dist/log.js"(exports2) { "use strict"; var node_process = require("process"); @@ -89079,7 +92958,7 @@ var require_merge = __commonJS({ var require_addPairToJSMap = __commonJS({ "../../node_modules/yaml/dist/nodes/addPairToJSMap.js"(exports2) { "use strict"; - var log = require_log5(); + var log = require_log7(); var merge2 = require_merge(); var stringify2 = require_stringify(); var identity = require_identity(); @@ -94512,7 +98391,7 @@ var require_public_api = __commonJS({ var composer = require_composer(); var Document2 = require_Document(); var errors2 = require_errors3(); - var log = require_log5(); + var log = require_log7(); var identity = require_identity(); var lineCounter = require_line_counter(); var parser = require_parser(); @@ -94603,7 +98482,7 @@ var require_public_api = __commonJS({ }); // ../../node_modules/yaml/dist/index.js -var require_dist6 = __commonJS({ +var require_dist5 = __commonJS({ "../../node_modules/yaml/dist/index.js"(exports2) { "use strict"; var composer = require_composer(); @@ -96124,7 +100003,7 @@ var require_tool_cache = __commonJS({ var path6 = __importStar2(require("path")); var httpm = __importStar2(require_lib()); var semver4 = __importStar2(require_semver2()); - var stream2 = __importStar2(require("stream")); + var stream = __importStar2(require("stream")); var util = __importStar2(require("util")); var assert_1 = require("assert"); var exec_1 = require_exec(); @@ -96184,7 +100063,7 @@ var require_tool_cache = __commonJS({ core8.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } - const pipeline = util.promisify(stream2.pipeline); + const pipeline = util.promisify(stream.pipeline); const responseMessageFactory = _getGlobal("TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY", () => response.message); const readStream = responseMessageFactory(); let succeeded = false; @@ -97919,7 +101798,7 @@ var require_light = __commonJS({ }); // ../../node_modules/semver/internal/constants.js -var require_constants10 = __commonJS({ +var require_constants12 = __commonJS({ "../../node_modules/semver/internal/constants.js"(exports2, module2) { "use strict"; var SEMVER_SPEC_VERSION = "2.0.0"; @@ -97968,7 +101847,7 @@ var require_re = __commonJS({ MAX_SAFE_COMPONENT_LENGTH, MAX_SAFE_BUILD_LENGTH, MAX_LENGTH - } = require_constants10(); + } = require_constants12(); var debug2 = require_debug2(); exports2 = module2.exports = {}; var re = exports2.re = []; @@ -98094,7 +101973,7 @@ var require_semver3 = __commonJS({ "../../node_modules/semver/classes/semver.js"(exports2, module2) { "use strict"; var debug2 = require_debug2(); - var { MAX_LENGTH, MAX_SAFE_INTEGER } = require_constants10(); + var { MAX_LENGTH, MAX_SAFE_INTEGER } = require_constants12(); var { safeRe: re, t } = require_re(); var parseOptions = require_parse_options(); var { compareIdentifiers } = require_identifiers(); @@ -98924,7 +102803,7 @@ var require_range = __commonJS({ tildeTrimReplace, caretTrimReplace } = require_re(); - var { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = require_constants10(); + var { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = require_constants12(); var isNullSet = (c) => c.value === "<0.0.0-0"; var isAny = (c) => c.value === ""; var isSatisfiable = (comparators, options) => { @@ -99736,7 +103615,7 @@ var require_semver4 = __commonJS({ "../../node_modules/semver/index.js"(exports2, module2) { "use strict"; var internalRe = require_re(); - var constants3 = require_constants10(); + var constants3 = require_constants12(); var SemVer = require_semver3(); var identifiers = require_identifiers(); var parse10 = require_parse2(); @@ -100346,7 +104225,7 @@ var require_stringify2 = __commonJS({ }); // ../../node_modules/css-what/lib/commonjs/index.js -var require_commonjs14 = __commonJS({ +var require_commonjs16 = __commonJS({ "../../node_modules/css-what/lib/commonjs/index.js"(exports2) { "use strict"; var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { @@ -104878,7 +108757,7 @@ var require_errors4 = __commonJS({ }); // ../../node_modules/cheerio/node_modules/undici/lib/core/constants.js -var require_constants11 = __commonJS({ +var require_constants13 = __commonJS({ "../../node_modules/cheerio/node_modules/undici/lib/core/constants.js"(exports2, module2) { "use strict"; var wellknownHeaderNames = ( @@ -105012,7 +108891,7 @@ var require_tree = __commonJS({ var { wellknownHeaderNames, headerNameLowerCasedRecord - } = require_constants11(); + } = require_constants13(); var TstNode = class _TstNode { /** @type {any} */ value = null; @@ -105154,14 +109033,14 @@ var require_util11 = __commonJS({ var assert = require("node:assert"); var { kDestroyed, kBodyUsed, kListeners, kBody } = require_symbols6(); var { IncomingMessage } = require("node:http"); - var stream2 = require("node:stream"); + var stream = require("node:stream"); var net = require("node:net"); var { Blob: Blob2 } = require("node:buffer"); var { stringify: stringify2 } = require("node:querystring"); var { EventEmitter: EE } = require("node:events"); var timers = require_timers2(); var { InvalidArgumentError, ConnectTimeoutError } = require_errors4(); - var { headerNameLowerCasedRecord } = require_constants11(); + var { headerNameLowerCasedRecord } = require_constants13(); var { tree } = require_tree(); var [nodeMajor, nodeMinor] = process.versions.node.split(".", 2).map((v) => Number(v)); var BodyAsyncIterable = class { @@ -105328,24 +109207,24 @@ var require_util11 = __commonJS({ return null; } function isDestroyed(body) { - return body && !!(body.destroyed || body[kDestroyed] || stream2.isDestroyed?.(body)); + return body && !!(body.destroyed || body[kDestroyed] || stream.isDestroyed?.(body)); } - function destroy(stream3, err) { - if (stream3 == null || !isStream(stream3) || isDestroyed(stream3)) { + function destroy(stream2, err) { + if (stream2 == null || !isStream(stream2) || isDestroyed(stream2)) { return; } - if (typeof stream3.destroy === "function") { - if (Object.getPrototypeOf(stream3).constructor === IncomingMessage) { - stream3.socket = null; + if (typeof stream2.destroy === "function") { + if (Object.getPrototypeOf(stream2).constructor === IncomingMessage) { + stream2.socket = null; } - stream3.destroy(err); + stream2.destroy(err); } else if (err) { queueMicrotask(() => { - stream3.emit("error", err); + stream2.emit("error", err); }); } - if (stream3.destroyed !== true) { - stream3[kDestroyed] = true; + if (stream2.destroyed !== true) { + stream2[kDestroyed] = true; } } var KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/; @@ -105453,7 +109332,7 @@ var require_util11 = __commonJS({ } } function isDisturbed(body) { - return !!(body && (stream2.isDisturbed(body) || body[kBodyUsed])); + return !!(body && (stream.isDisturbed(body) || body[kBodyUsed])); } function getSocketInfo(socket) { return { @@ -105956,7 +109835,7 @@ var require_request3 = __commonJS({ normalizedMethodRecords } = require_util11(); var { channels } = require_diagnostics(); - var { headerNameLowerCasedRecord } = require_constants11(); + var { headerNameLowerCasedRecord } = require_constants13(); var invalidPathRegex = /[^\u0021-\u00ff]/; var kHandler = Symbol("handler"); var Request = class { @@ -106364,9 +110243,9 @@ var require_dispatcher2 = __commonJS({ throw new Error("not implemented"); } compose(...args) { - const interceptors = Array.isArray(args[0]) ? args[0] : args; + const interceptors2 = Array.isArray(args[0]) ? args[0] : args; let dispatch = this.dispatch.bind(this); - for (const interceptor of interceptors) { + for (const interceptor of interceptors2) { if (interceptor == null) { continue; } @@ -106714,7 +110593,7 @@ var require_connect2 = __commonJS({ }); // ../../node_modules/cheerio/node_modules/undici/lib/llhttp/utils.js -var require_utils6 = __commonJS({ +var require_utils7 = __commonJS({ "../../node_modules/cheerio/node_modules/undici/lib/llhttp/utils.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); @@ -106732,12 +110611,12 @@ var require_utils6 = __commonJS({ }); // ../../node_modules/cheerio/node_modules/undici/lib/llhttp/constants.js -var require_constants12 = __commonJS({ +var require_constants14 = __commonJS({ "../../node_modules/cheerio/node_modules/undici/lib/llhttp/constants.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SPECIAL_HEADERS = exports2.MINOR = exports2.MAJOR = exports2.HTAB_SP_VCHAR_OBS_TEXT = exports2.QUOTED_STRING = exports2.CONNECTION_TOKEN_CHARS = exports2.HEADER_CHARS = exports2.TOKEN = exports2.HEX = exports2.URL_CHAR = exports2.USERINFO_CHARS = exports2.MARK = exports2.ALPHANUM = exports2.NUM = exports2.HEX_MAP = exports2.NUM_MAP = exports2.ALPHA = exports2.STATUSES_HTTP = exports2.H_METHOD_MAP = exports2.METHOD_MAP = exports2.METHODS_RTSP = exports2.METHODS_ICE = exports2.METHODS_HTTP = exports2.HEADER_STATE = exports2.FINISH = exports2.STATUSES = exports2.METHODS = exports2.LENIENT_FLAGS = exports2.FLAGS = exports2.TYPE = exports2.ERROR = void 0; - var utils_1 = require_utils6(); + var utils_1 = require_utils7(); exports2.ERROR = { OK: 0, INTERNAL: 1, @@ -107351,7 +111230,7 @@ var require_llhttp_simd_wasm2 = __commonJS({ }); // ../../node_modules/cheerio/node_modules/undici/lib/web/fetch/constants.js -var require_constants13 = __commonJS({ +var require_constants15 = __commonJS({ "../../node_modules/cheerio/node_modules/undici/lib/web/fetch/constants.js"(exports2, module2) { "use strict"; var corsSafeListedMethods = ( @@ -108442,7 +112321,7 @@ var require_util12 = __commonJS({ "use strict"; var { Transform } = require("node:stream"); var zlib = require("node:zlib"); - var { redirectStatusSet, referrerPolicyTokens, badPortsSet } = require_constants13(); + var { redirectStatusSet, referrerPolicyTokens, badPortsSet } = require_constants15(); var { getGlobalOrigin } = require_global3(); var { collectASequenceOfCodePoints, collectAnHTTPQuotedString, removeChars, parseMIMEType } = require_data_url(); var { performance: performance2 } = require("node:perf_hooks"); @@ -109807,19 +113686,19 @@ var require_body2 = __commonJS({ function noop() { } var streamRegistry = new FinalizationRegistry((weakRef) => { - const stream2 = weakRef.deref(); - if (stream2 && !stream2.locked && !isDisturbed(stream2) && !isErrored(stream2)) { - stream2.cancel("Response object has been garbage collected").catch(noop); + const stream = weakRef.deref(); + if (stream && !stream.locked && !isDisturbed(stream) && !isErrored(stream)) { + stream.cancel("Response object has been garbage collected").catch(noop); } }); function extractBody(object, keepalive = false) { - let stream2 = null; + let stream = null; if (webidl.is.ReadableStream(object)) { - stream2 = object; + stream = object; } else if (webidl.is.Blob(object)) { - stream2 = object.stream(); + stream = object.stream(); } else { - stream2 = new ReadableStream({ + stream = new ReadableStream({ async pull(controller) { const buffer = typeof source === "string" ? textEncoder.encode(source) : source; if (buffer.byteLength) { @@ -109832,7 +113711,7 @@ var require_body2 = __commonJS({ type: "bytes" }); } - assert(webidl.is.ReadableStream(stream2)); + assert(webidl.is.ReadableStream(stream)); let action = null; let source = null; let length = null; @@ -109911,14 +113790,14 @@ Content-Type: ${value.type || "application/octet-stream"}\r "Response body object should not be disturbed or locked" ); } - stream2 = webidl.is.ReadableStream(object) ? object : ReadableStreamFrom(object); + stream = webidl.is.ReadableStream(object) ? object : ReadableStreamFrom(object); } if (typeof source === "string" || util.isBuffer(source)) { length = Buffer.byteLength(source); } if (action != null) { let iterator; - stream2 = new ReadableStream({ + stream = new ReadableStream({ async start() { iterator = action(object)[Symbol.asyncIterator](); }, @@ -109930,7 +113809,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r controller.byobRequest?.respond(0); }); } else { - if (!isErrored(stream2)) { + if (!isErrored(stream)) { const buffer = new Uint8Array(value); if (buffer.byteLength) { controller.enqueue(buffer); @@ -109945,7 +113824,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r type: "bytes" }); } - const body = { stream: stream2, source, length }; + const body = { stream, source, length }; return [body, type]; } function safelyExtractBody(object, keepalive = false) { @@ -110133,7 +114012,7 @@ var require_client_h1 = __commonJS({ kHTTPContext, kClosed } = require_symbols6(); - var constants3 = require_constants12(); + var constants3 = require_constants14(); var EMPTY_BUF = Buffer.alloc(0); var FastBuffer = Buffer[Symbol.species]; var removeAllListeners = util.removeAllListeners; @@ -111465,7 +115344,7 @@ var require_client_h2 = __commonJS({ headers[key] = val2; } } - let stream2 = null; + let stream = null; const { hostname, port } = client[kUrl]; headers[HTTP2_HEADER_AUTHORITY] = host || `${hostname}${port ? `:${port}` : ""}`; headers[HTTP2_HEADER_METHOD] = method; @@ -111475,9 +115354,9 @@ var require_client_h2 = __commonJS({ } err = err || new RequestAbortedError(); util.errorRequest(client, request, err); - if (stream2 != null) { - stream2.removeAllListeners("data"); - stream2.close(); + if (stream != null) { + stream.removeAllListeners("data"); + stream.close(); client[kOnError](err); client[kResume](); } @@ -111493,23 +115372,23 @@ var require_client_h2 = __commonJS({ } if (method === "CONNECT") { session.ref(); - stream2 = session.request(headers, { endStream: false, signal }); - if (!stream2.pending) { - request.onUpgrade(null, null, stream2); + stream = session.request(headers, { endStream: false, signal }); + if (!stream.pending) { + request.onUpgrade(null, null, stream); ++session[kOpenStreams]; client[kQueue][client[kRunningIdx]++] = null; } else { - stream2.once("ready", () => { - request.onUpgrade(null, null, stream2); + stream.once("ready", () => { + request.onUpgrade(null, null, stream); ++session[kOpenStreams]; client[kQueue][client[kRunningIdx]++] = null; }); } - stream2.once("close", () => { + stream.once("close", () => { session[kOpenStreams] -= 1; if (session[kOpenStreams] === 0) session.unref(); }); - stream2.setTimeout(requestTimeout); + stream.setTimeout(requestTimeout); return true; } headers[HTTP2_HEADER_PATH] = path6; @@ -111555,36 +115434,36 @@ var require_client_h2 = __commonJS({ const shouldEndStream = method === "GET" || method === "HEAD" || body === null; if (expectContinue) { headers[HTTP2_HEADER_EXPECT] = "100-continue"; - stream2 = session.request(headers, { endStream: shouldEndStream, signal }); - stream2.once("continue", writeBodyH2); + stream = session.request(headers, { endStream: shouldEndStream, signal }); + stream.once("continue", writeBodyH2); } else { - stream2 = session.request(headers, { + stream = session.request(headers, { endStream: shouldEndStream, signal }); writeBodyH2(); } ++session[kOpenStreams]; - stream2.setTimeout(requestTimeout); - stream2.once("response", (headers2) => { + stream.setTimeout(requestTimeout); + stream.once("response", (headers2) => { const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers2; request.onResponseStarted(); if (request.aborted) { - stream2.removeAllListeners("data"); + stream.removeAllListeners("data"); return; } - if (request.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream2.resume.bind(stream2), "") === false) { - stream2.pause(); + if (request.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream.resume.bind(stream), "") === false) { + stream.pause(); } }); - stream2.on("data", (chunk) => { + stream.on("data", (chunk) => { if (request.onData(chunk) === false) { - stream2.pause(); + stream.pause(); } }); - stream2.once("end", (err) => { - stream2.removeAllListeners("data"); - if (stream2.state?.state == null || stream2.state.state < 6) { + stream.once("end", (err) => { + stream.removeAllListeners("data"); + if (stream.state?.state == null || stream.state.state < 6) { if (!request.aborted && !request.completed) { request.onComplete({}); } @@ -111601,34 +115480,34 @@ var require_client_h2 = __commonJS({ client[kResume](); } }); - stream2.once("close", () => { - stream2.removeAllListeners("data"); + stream.once("close", () => { + stream.removeAllListeners("data"); session[kOpenStreams] -= 1; if (session[kOpenStreams] === 0) { session.unref(); } }); - stream2.once("error", function(err) { - stream2.removeAllListeners("data"); + stream.once("error", function(err) { + stream.removeAllListeners("data"); abort(err); }); - stream2.once("frameError", (type, code) => { - stream2.removeAllListeners("data"); + stream.once("frameError", (type, code) => { + stream.removeAllListeners("data"); abort(new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`)); }); - stream2.on("aborted", () => { - stream2.removeAllListeners("data"); + stream.on("aborted", () => { + stream.removeAllListeners("data"); }); - stream2.on("timeout", () => { + stream.on("timeout", () => { const err = new InformationalError(`HTTP/2: "stream timeout after ${requestTimeout}"`); - stream2.removeAllListeners("data"); + stream.removeAllListeners("data"); session[kOpenStreams] -= 1; if (session[kOpenStreams] === 0) { session.unref(); } abort(err); }); - stream2.once("trailers", (trailers) => { + stream.once("trailers", (trailers) => { if (request.aborted || request.completed) { return; } @@ -111639,7 +115518,7 @@ var require_client_h2 = __commonJS({ if (!body || contentLength === 0) { writeBuffer( abort, - stream2, + stream, null, client, request, @@ -111650,7 +115529,7 @@ var require_client_h2 = __commonJS({ } else if (util.isBuffer(body)) { writeBuffer( abort, - stream2, + stream, body, client, request, @@ -111662,7 +115541,7 @@ var require_client_h2 = __commonJS({ if (typeof body.stream === "function") { writeIterable( abort, - stream2, + stream, body.stream(), client, request, @@ -111673,7 +115552,7 @@ var require_client_h2 = __commonJS({ } else { writeBlob( abort, - stream2, + stream, body, client, request, @@ -111687,7 +115566,7 @@ var require_client_h2 = __commonJS({ abort, client[kSocket], expectsPayload, - stream2, + stream, body, client, request, @@ -111696,7 +115575,7 @@ var require_client_h2 = __commonJS({ } else if (util.isIterable(body)) { writeIterable( abort, - stream2, + stream, body, client, request, @@ -111888,7 +115767,7 @@ var require_client2 = __commonJS({ function getPipelining(client) { return client[kPipelining] ?? client[kHTTPContext]?.defaultPipelining ?? 1; } - var Client = class extends DispatcherBase { + var Client2 = class extends DispatcherBase { /** * * @param {string|URL} url @@ -112305,7 +116184,7 @@ var require_client2 = __commonJS({ } } } - module2.exports = Client; + module2.exports = Client2; } }); @@ -112555,7 +116434,7 @@ var require_pool2 = __commonJS({ kGetDispatcher, kRemoveClient } = require_pool_base2(); - var Client = require_client2(); + var Client2 = require_client2(); var { InvalidArgumentError } = require_errors4(); @@ -112566,7 +116445,7 @@ var require_pool2 = __commonJS({ var kConnections = Symbol("connections"); var kFactory = Symbol("factory"); function defaultFactory(origin, opts) { - return new Client(origin, opts); + return new Client2(origin, opts); } var Pool = class extends PoolBase { constructor(origin, { @@ -112796,7 +116675,7 @@ var require_agent2 = __commonJS({ var { kClients, kRunning, kClose, kDestroy, kDispatch, kUrl } = require_symbols6(); var DispatcherBase = require_dispatcher_base2(); var Pool = require_pool2(); - var Client = require_client2(); + var Client2 = require_client2(); var util = require_util11(); var kOnConnect = Symbol("onConnect"); var kOnDisconnect = Symbol("onDisconnect"); @@ -112805,7 +116684,7 @@ var require_agent2 = __commonJS({ var kFactory = Symbol("factory"); var kOptions = Symbol("options"); function defaultFactory(origin, opts) { - return opts && opts.connections === 1 ? new Client(origin, opts) : new Pool(origin, opts); + return opts && opts.connections === 1 ? new Client2(origin, opts) : new Pool(origin, opts); } var Agent = class extends DispatcherBase { constructor({ factory = defaultFactory, connect, ...options } = {}) { @@ -112910,7 +116789,7 @@ var require_proxy_agent2 = __commonJS({ var DispatcherBase = require_dispatcher_base2(); var { InvalidArgumentError, RequestAbortedError, SecureProxyConnectionError } = require_errors4(); var buildConnector = require_connect2(); - var Client = require_client2(); + var Client2 = require_client2(); var kAgent = Symbol("proxy agent"); var kClient = Symbol("proxy client"); var kProxyHeaders = Symbol("proxy headers"); @@ -112936,7 +116815,7 @@ var require_proxy_agent2 = __commonJS({ throw new InvalidArgumentError("ProxyClient only supports http and https protocols"); } super(); - this.#client = new Client(origin, opts); + this.#client = new Client2(origin, opts); } async [kClose]() { await this.#client.close(); @@ -113005,7 +116884,7 @@ var require_proxy_agent2 = __commonJS({ if (origin2.protocol === "http:") { return new ProxyClient(origin2, options); } - return new Client(origin2, options); + return new Client2(origin2, options); } : void 0; const connect = buildConnector({ ...opts.proxyTls }); this[kConnectEndpoint] = buildConnector({ ...opts.requestTls }); @@ -113610,7 +117489,7 @@ var require_h2c_client = __commonJS({ var { kClose, kDestroy } = require_symbols6(); var { InvalidArgumentError } = require_errors4(); var util = require_util11(); - var Client = require_client2(); + var Client2 = require_client2(); var DispatcherBase = require_dispatcher_base2(); var H2CClient = class extends DispatcherBase { #client = null; @@ -113638,7 +117517,7 @@ var require_h2c_client = __commonJS({ "h2c-client: pipelining cannot be greater than maxConcurrentStreams" ); } - this.#client = new Client(origin, { + this.#client = new Client2(origin, { ...opts, connect: this.#buildConnector(connect2), maxConcurrentStreams: defaultMaxConcurrentStreams, @@ -113959,13 +117838,13 @@ var require_readable2 = __commonJS({ function isUnusable(bodyReadable) { return util.isDisturbed(bodyReadable) || isLocked(bodyReadable); } - function consume(stream2, type) { - assert(!stream2[kConsume]); + function consume(stream, type) { + assert(!stream[kConsume]); return new Promise((resolve, reject) => { - if (isUnusable(stream2)) { - const rState = stream2._readableState; + if (isUnusable(stream)) { + const rState = stream._readableState; if (rState.destroyed && rState.closeEmitted === false) { - stream2.on("error", (err) => { + stream.on("error", (err) => { reject(err); }).on("close", () => { reject(new TypeError("unusable")); @@ -113975,22 +117854,22 @@ var require_readable2 = __commonJS({ } } else { queueMicrotask(() => { - stream2[kConsume] = { + stream[kConsume] = { type, - stream: stream2, + stream, resolve, reject, length: 0, body: [] }; - stream2.on("error", function(err) { + stream.on("error", function(err) { consumeFinish(this[kConsume], err); }).on("close", function() { if (this[kConsume].body !== null) { consumeFinish(this[kConsume], new RequestAbortedError()); } }); - consumeStart(stream2[kConsume]); + consumeStart(stream[kConsume]); }); } }); @@ -114052,7 +117931,7 @@ var require_readable2 = __commonJS({ return buffer; } function consumeEnd(consume2, encoding) { - const { type, body, resolve, stream: stream2, length } = consume2; + const { type, body, resolve, stream, length } = consume2; try { if (type === "text") { resolve(chunksDecode(body, length, encoding)); @@ -114061,13 +117940,13 @@ var require_readable2 = __commonJS({ } else if (type === "arrayBuffer") { resolve(chunksConcat(body, length).buffer); } else if (type === "blob") { - resolve(new Blob(body, { type: stream2[kContentType] })); + resolve(new Blob(body, { type: stream[kContentType] })); } else if (type === "bytes") { resolve(chunksConcat(body, length)); } consumeFinish(consume2); } catch (err) { - stream2.destroy(err); + stream.destroy(err); } } function consumePush(consume2, chunk) { @@ -114456,10 +118335,10 @@ var require_api_stream2 = __commonJS({ } } }; - function stream2(opts, factory, callback) { + function stream(opts, factory, callback) { if (callback === void 0) { return new Promise((resolve, reject) => { - stream2.call(this, opts, factory, (err, data2) => { + stream.call(this, opts, factory, (err, data2) => { return err ? reject(err) : resolve(data2); }); }); @@ -114475,7 +118354,7 @@ var require_api_stream2 = __commonJS({ queueMicrotask(() => callback(err, { opaque })); } } - module2.exports = stream2; + module2.exports = stream; } }); @@ -115442,7 +119321,7 @@ var require_mock_client2 = __commonJS({ "../../node_modules/cheerio/node_modules/undici/lib/mock/mock-client.js"(exports2, module2) { "use strict"; var { promisify } = require("node:util"); - var Client = require_client2(); + var Client2 = require_client2(); var { buildMockDispatch } = require_mock_utils2(); var { kDispatches, @@ -115457,7 +119336,7 @@ var require_mock_client2 = __commonJS({ var { MockInterceptor } = require_mock_interceptor2(); var Symbols = require_symbols6(); var { InvalidArgumentError } = require_errors4(); - var MockClient = class extends Client { + var MockClient = class extends Client2 { constructor(origin, opts) { if (!opts || !opts.agent || typeof opts.agent.dispatch !== "function") { throw new InvalidArgumentError("Argument opts.agent must implement Agent"); @@ -116770,7 +120649,7 @@ var require_dns = __commonJS({ }); // ../../node_modules/cheerio/node_modules/undici/lib/util/cache.js -var require_cache4 = __commonJS({ +var require_cache5 = __commonJS({ "../../node_modules/cheerio/node_modules/undici/lib/util/cache.js"(exports2, module2) { "use strict"; var { @@ -117212,7 +121091,7 @@ var require_cache_handler = __commonJS({ parseCacheControlHeader, parseVaryHeader, isEtagUsable - } = require_cache4(); + } = require_cache5(); var { parseHttpDate } = require_date(); function noop() { } @@ -117510,7 +121389,7 @@ var require_memory_cache_store = __commonJS({ "use strict"; var { Writable } = require("node:stream"); var { EventEmitter } = require("node:events"); - var { assertCacheKey, assertCacheValue } = require_cache4(); + var { assertCacheKey, assertCacheValue } = require_cache5(); var MemoryCacheStore = class extends EventEmitter { #maxCount = 1024; #maxSize = 104857600; @@ -117769,7 +121648,7 @@ var require_cache_revalidation_handler = __commonJS({ }); // ../../node_modules/cheerio/node_modules/undici/lib/interceptor/cache.js -var require_cache5 = __commonJS({ +var require_cache6 = __commonJS({ "../../node_modules/cheerio/node_modules/undici/lib/interceptor/cache.js"(exports2, module2) { "use strict"; var assert = require("node:assert"); @@ -117778,7 +121657,7 @@ var require_cache5 = __commonJS({ var CacheHandler = require_cache_handler(); var MemoryCacheStore = require_memory_cache_store(); var CacheRevalidationHandler = require_cache_revalidation_handler(); - var { assertCacheStore, assertCacheMethods, makeCacheKey, normaliseHeaders, parseCacheControlHeader } = require_cache4(); + var { assertCacheStore, assertCacheMethods, makeCacheKey, normaliseHeaders, parseCacheControlHeader } = require_cache5(); var { AbortError } = require_errors4(); function needsRevalidation(result2, cacheControlDirectives) { if (cacheControlDirectives?.["no-cache"]) { @@ -117834,30 +121713,30 @@ var require_cache5 = __commonJS({ return dispatch(opts, new CacheHandler(globalOpts, cacheKey, handler)); } function sendCachedValue(handler, opts, result2, age, context3, isStale) { - const stream2 = util.isStream(result2.body) ? result2.body : Readable.from(result2.body ?? []); - assert(!stream2.destroyed, "stream should not be destroyed"); - assert(!stream2.readableDidRead, "stream should not be readableDidRead"); + const stream = util.isStream(result2.body) ? result2.body : Readable.from(result2.body ?? []); + assert(!stream.destroyed, "stream should not be destroyed"); + assert(!stream.readableDidRead, "stream should not be readableDidRead"); const controller = { resume() { - stream2.resume(); + stream.resume(); }, pause() { - stream2.pause(); + stream.pause(); }, get paused() { - return stream2.isPaused(); + return stream.isPaused(); }, get aborted() { - return stream2.destroyed; + return stream.destroyed; }, get reason() { - return stream2.errored; + return stream.errored; }, abort(reason) { - stream2.destroy(reason ?? new AbortError()); + stream.destroy(reason ?? new AbortError()); } }; - stream2.on("error", function(err) { + stream.on("error", function(err) { if (!this.readableEnded) { if (typeof handler.onResponseError === "function") { handler.onResponseError(controller, err); @@ -117871,7 +121750,7 @@ var require_cache5 = __commonJS({ } }); handler.onRequestStart?.(controller, context3); - if (stream2.destroyed) { + if (stream.destroyed) { return; } const headers = { ...result2.headers, age: String(age) }; @@ -117880,9 +121759,9 @@ var require_cache5 = __commonJS({ } handler.onResponseStart?.(controller, result2.statusCode, headers, result2.statusMessage); if (opts.method === "HEAD") { - stream2.destroy(); + stream.destroy(); } else { - stream2.on("data", function(chunk) { + stream.on("data", function(chunk) { handler.onResponseData?.(controller, chunk); }); } @@ -118021,7 +121900,7 @@ var require_sqlite_cache_store = __commonJS({ "../../node_modules/cheerio/node_modules/undici/lib/cache/sqlite-cache-store.js"(exports2, module2) { "use strict"; var { Writable } = require("node:stream"); - var { assertCacheKey, assertCacheValue } = require_cache4(); + var { assertCacheKey, assertCacheValue } = require_cache5(); var DatabaseSync; var VERSION2 = 3; var MAX_ENTRY_SIZE = 2 * 1e3 * 1e3 * 1e3; @@ -118857,7 +122736,7 @@ var require_response3 = __commonJS({ var { redirectStatusSet, nullBodyStatus - } = require_constants13(); + } = require_constants15(); var { webidl } = require_webidl2(); var { URLSerializer } = require_data_url(); var { kConstruct } = require_symbols6(); @@ -119280,7 +123159,7 @@ var require_request4 = __commonJS({ requestCredentials, requestCache, requestDuplex - } = require_constants13(); + } = require_constants15(); var { kEnumerableProperty, normalizedMethodRecordsBase, normalizedMethodRecords } = util; var { webidl } = require_webidl2(); var { URLSerializer } = require_data_url(); @@ -120062,7 +123941,7 @@ var require_fetch2 = __commonJS({ safeMethodsSet, requestBodyHeader, subresourceSet - } = require_constants13(); + } = require_constants15(); var EE = require("node:events"); var { Readable, pipeline, finished, isErrored, isReadable } = require("node:stream"); var { addAbortListener, bufferToLowerCasedHeaderName } = require_util11(); @@ -120852,7 +124731,7 @@ var require_fetch2 = __commonJS({ fetchParams.controller.abort(reason); } }; - const stream2 = new ReadableStream( + const stream = new ReadableStream( { start(controller) { fetchParams.controller.controller = controller; @@ -120862,7 +124741,7 @@ var require_fetch2 = __commonJS({ type: "bytes" } ); - response.body = { stream: stream2, source: null, length: null }; + response.body = { stream, source: null, length: null }; if (!fetchParams.controller.resume) { fetchParams.controller.on("terminated", onAborted); } @@ -120898,7 +124777,7 @@ var require_fetch2 = __commonJS({ if (buffer.byteLength) { fetchParams.controller.controller.enqueue(buffer); } - if (isErrored(stream2)) { + if (isErrored(stream)) { fetchParams.controller.terminate(); return; } @@ -120910,13 +124789,13 @@ var require_fetch2 = __commonJS({ function onAborted(reason) { if (isAborted(fetchParams)) { response.aborted = true; - if (isReadable(stream2)) { + if (isReadable(stream)) { fetchParams.controller.controller.error( fetchParams.controller.serializedAbortReason ); } } else { - if (isReadable(stream2)) { + if (isReadable(stream)) { fetchParams.controller.controller.error(new TypeError("terminated", { cause: isErrorLike(reason) ? reason : void 0 })); @@ -121101,7 +124980,7 @@ var require_util13 = __commonJS({ }); // ../../node_modules/cheerio/node_modules/undici/lib/web/cache/cache.js -var require_cache6 = __commonJS({ +var require_cache7 = __commonJS({ "../../node_modules/cheerio/node_modules/undici/lib/web/cache/cache.js"(exports2, module2) { "use strict"; var assert = require("node:assert"); @@ -121305,8 +125184,8 @@ var require_cache6 = __commonJS({ const clonedResponse = cloneResponse(innerResponse); const bodyReadPromise = createDeferredPromise(); if (innerResponse.body != null) { - const stream2 = innerResponse.body.stream; - const reader = stream2.getReader(); + const stream = innerResponse.body.stream; + const reader = stream.getReader(); readAllBytes(reader, bodyReadPromise.resolve, bodyReadPromise.reject); } else { bodyReadPromise.resolve(void 0); @@ -121653,7 +125532,7 @@ var require_cache6 = __commonJS({ var require_cachestorage2 = __commonJS({ "../../node_modules/cheerio/node_modules/undici/lib/web/cache/cachestorage.js"(exports2, module2) { "use strict"; - var { Cache } = require_cache6(); + var { Cache } = require_cache7(); var { webidl } = require_webidl2(); var { kEnumerableProperty } = require_util11(); var { kConstruct } = require_symbols6(); @@ -121760,7 +125639,7 @@ var require_cachestorage2 = __commonJS({ }); // ../../node_modules/cheerio/node_modules/undici/lib/web/cookies/constants.js -var require_constants14 = __commonJS({ +var require_constants16 = __commonJS({ "../../node_modules/cheerio/node_modules/undici/lib/web/cookies/constants.js"(exports2, module2) { "use strict"; var maxAttributeValueSize = 1024; @@ -121946,7 +125825,7 @@ var require_util14 = __commonJS({ var require_parse4 = __commonJS({ "../../node_modules/cheerio/node_modules/undici/lib/web/cookies/parse.js"(exports2, module2) { "use strict"; - var { maxNameValuePairSize, maxAttributeValueSize } = require_constants14(); + var { maxNameValuePairSize, maxAttributeValueSize } = require_constants16(); var { isCTLExcludingHtab } = require_util14(); var { collectASequenceOfCodePointsFast } = require_data_url(); var assert = require("node:assert"); @@ -122487,7 +126366,7 @@ var require_events2 = __commonJS({ }); // ../../node_modules/cheerio/node_modules/undici/lib/web/websocket/constants.js -var require_constants15 = __commonJS({ +var require_constants17 = __commonJS({ "../../node_modules/cheerio/node_modules/undici/lib/web/websocket/constants.js"(exports2, module2) { "use strict"; var uid = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"; @@ -122546,7 +126425,7 @@ var require_constants15 = __commonJS({ var require_util15 = __commonJS({ "../../node_modules/cheerio/node_modules/undici/lib/web/websocket/util.js"(exports2, module2) { "use strict"; - var { states, opcodes } = require_constants15(); + var { states, opcodes } = require_constants17(); var { isUtf8 } = require("node:buffer"); var { collectASequenceOfCodePointsFast, removeHTTPWhitespace } = require_data_url(); function isConnecting(readyState) { @@ -122718,7 +126597,7 @@ var require_util15 = __commonJS({ var require_frame2 = __commonJS({ "../../node_modules/cheerio/node_modules/undici/lib/web/websocket/frame.js"(exports2, module2) { "use strict"; - var { maxUnsigned16Bit, opcodes } = require_constants15(); + var { maxUnsigned16Bit, opcodes } = require_constants17(); var BUFFER_SIZE = 8 * 1024; var crypto2; var buffer = null; @@ -122830,7 +126709,7 @@ var require_frame2 = __commonJS({ var require_connection2 = __commonJS({ "../../node_modules/cheerio/node_modules/undici/lib/web/websocket/connection.js"(exports2, module2) { "use strict"; - var { uid, states, sentCloseFrameState, emptyBuffer, opcodes } = require_constants15(); + var { uid, states, sentCloseFrameState, emptyBuffer, opcodes } = require_constants17(); var { parseExtensions, isClosed, isClosing, isEstablished, validateCloseCodeAndReason } = require_util15(); var { makeRequest } = require_request4(); var { fetching } = require_fetch2(); @@ -123036,7 +126915,7 @@ var require_receiver2 = __commonJS({ "use strict"; var { Writable } = require("node:stream"); var assert = require("node:assert"); - var { parserStates, opcodes, states, emptyBuffer, sentCloseFrameState } = require_constants15(); + var { parserStates, opcodes, states, emptyBuffer, sentCloseFrameState } = require_constants17(); var { isValidStatusCode, isValidOpcode, @@ -123347,7 +127226,7 @@ var require_sender = __commonJS({ "../../node_modules/cheerio/node_modules/undici/lib/web/websocket/sender.js"(exports2, module2) { "use strict"; var { WebsocketFrameSend } = require_frame2(); - var { opcodes, sendHints } = require_constants15(); + var { opcodes, sendHints } = require_constants17(); var FixedQueue = require_fixed_queue2(); var SendQueue = class { /** @@ -123436,7 +127315,7 @@ var require_websocket2 = __commonJS({ var { webidl } = require_webidl2(); var { URLSerializer } = require_data_url(); var { environmentSettingsObject } = require_util12(); - var { staticPropertyDescriptors, states, sentCloseFrameState, sendHints, opcodes } = require_constants15(); + var { staticPropertyDescriptors, states, sentCloseFrameState, sendHints, opcodes } = require_constants17(); var { isConnecting, isEstablished, @@ -123976,7 +127855,7 @@ var require_websocketstream = __commonJS({ "use strict"; var { createDeferredPromise } = require_promise(); var { environmentSettingsObject } = require_util12(); - var { states, opcodes, sentCloseFrameState } = require_constants15(); + var { states, opcodes, sentCloseFrameState } = require_constants17(); var { webidl } = require_webidl2(); var { getURLRecord, isValidSubprotocol, isEstablished, utf8Decode } = require_util15(); var { establishWebSocketConnection, failWebsocketConnection, closeWebSocketConnection } = require_connection2(); @@ -124838,7 +128717,7 @@ var require_eventsource = __commonJS({ var require_undici2 = __commonJS({ "../../node_modules/cheerio/node_modules/undici/index.js"(exports2, module2) { "use strict"; - var Client = require_client2(); + var Client2 = require_client2(); var Dispatcher = require_dispatcher2(); var Pool = require_pool2(); var BalancedPool = require_balanced_pool2(); @@ -124863,7 +128742,7 @@ var require_undici2 = __commonJS({ var RedirectHandler = require_redirect_handler(); Object.assign(Dispatcher.prototype, api); module2.exports.Dispatcher = Dispatcher; - module2.exports.Client = Client; + module2.exports.Client = Client2; module2.exports.Pool = Pool; module2.exports.BalancedPool = BalancedPool; module2.exports.Agent = Agent; @@ -124880,7 +128759,7 @@ var require_undici2 = __commonJS({ retry: require_retry(), dump: require_dump(), dns: require_dns(), - cache: require_cache5() + cache: require_cache6() }; module2.exports.cacheStores = { MemoryCacheStore: require_memory_cache_store() @@ -125003,7 +128882,7 @@ var require_undici2 = __commonJS({ }); // ../../node_modules/whatwg-mimetype/lib/utils.js -var require_utils7 = __commonJS({ +var require_utils8 = __commonJS({ "../../node_modules/whatwg-mimetype/lib/utils.js"(exports2) { "use strict"; exports2.removeLeadingAndTrailingHTTPWhitespace = (string) => { @@ -125061,7 +128940,7 @@ var require_mime_type_parameters = __commonJS({ asciiLowercase, solelyContainsHTTPTokenCodePoints, soleyContainsHTTPQuotedStringTokenCodePoints - } = require_utils7(); + } = require_utils8(); module2.exports = class MIMETypeParameters { constructor(map2) { this._map = map2; @@ -125126,7 +129005,7 @@ var require_parser2 = __commonJS({ soleyContainsHTTPQuotedStringTokenCodePoints, asciiLowercase, collectAnHTTPQuotedString - } = require_utils7(); + } = require_utils8(); module2.exports = (input) => { input = removeLeadingAndTrailingHTTPWhitespace(input); let position = 0; @@ -125203,7 +129082,7 @@ var require_parser2 = __commonJS({ var require_serializer2 = __commonJS({ "../../node_modules/whatwg-mimetype/lib/serializer.js"(exports2, module2) { "use strict"; - var { solelyContainsHTTPTokenCodePoints } = require_utils7(); + var { solelyContainsHTTPTokenCodePoints } = require_utils8(); module2.exports = (mimeType) => { let serialization = `${mimeType.type}/${mimeType.subtype}`; if (mimeType.parameters.size === 0) { @@ -125234,7 +129113,7 @@ var require_mime_type = __commonJS({ var { asciiLowercase, solelyContainsHTTPTokenCodePoints - } = require_utils7(); + } = require_utils8(); module2.exports = class MIMEType { constructor(string) { string = String(string); @@ -126323,7 +130202,7 @@ var require_internal_hash_files = __commonJS({ var crypto2 = __importStar2(require("crypto")); var core8 = __importStar2(require_core()); var fs5 = __importStar2(require("fs")); - var stream2 = __importStar2(require("stream")); + var stream = __importStar2(require("stream")); var util = __importStar2(require("util")); var path6 = __importStar2(require("path")); function hashFiles(globber, currentWorkspace, verbose = false) { @@ -126350,7 +130229,7 @@ var require_internal_hash_files = __commonJS({ continue; } const hash = crypto2.createHash("sha256"); - const pipeline = util.promisify(stream2.pipeline); + const pipeline = util.promisify(stream.pipeline); yield pipeline(fs5.createReadStream(file), hash); result2.write(hash.digest()); count++; @@ -126450,7 +130329,7 @@ var import_exec6 = __toESM(require_exec(), 1); // src/cache.ts var crypto = __toESM(require("node:crypto"), 1); var path5 = __toESM(require("node:path"), 1); -var cache = __toESM(require_cache3(), 1); +var cache = __toESM(require_cache4(), 1); var core4 = __toESM(require_core(), 1); var import_exec4 = __toESM(require_exec(), 1); var github3 = __toESM(require_github(), 1); @@ -126462,7 +130341,7 @@ var os = __toESM(require("node:os"), 1); var path = __toESM(require("node:path"), 1); var process2 = __toESM(require("node:process"), 1); var core = __toESM(require_core(), 1); -var yaml = __toESM(require_dist6(), 1); +var yaml = __toESM(require_dist5(), 1); var ARCHITECTURE = (() => { switch (process2.arch) { case "arm": { @@ -126513,7 +130392,7 @@ var PLATFORM = (() => { })(); var CYGWIN_MIRROR = "https://mirrors.kernel.org/sourceware/cygwin/"; var GITHUB_WORKSPACE = process2.env.GITHUB_WORKSPACE ?? process2.cwd(); -var CYGWIN_ROOT = path.join("D:", "cygwin"); +var CYGWIN_ROOT = path.join("C:", "cygwin"); var CYGWIN_ROOT_BIN = path.join(CYGWIN_ROOT, "bin"); var CYGWIN_LOCAL_PACKAGE_DIR = path.join(CYGWIN_ROOT, "packages"); var CYGWIN_BASH_ENV = path.join(CYGWIN_ROOT, "bash_env"); @@ -126523,13 +130402,13 @@ var DUNE_CACHE_ROOT = (() => { return path.join(xdgCacheHome, "dune"); } if (PLATFORM === "windows") { - return path.join("D:", "dune"); + return path.join("C:", "dune"); } return path.join(os.homedir(), ".cache", "dune"); })(); var OPAM_ROOT = (() => { if (PLATFORM === "windows") { - return path.join("D:", ".opam"); + return path.join("C:", ".opam"); } return path.join(os.homedir(), ".opam"); })(); @@ -130926,17 +134805,17 @@ __export(traversing_exports, { }); // ../../node_modules/cheerio-select/lib/esm/index.js -var import_css_what5 = __toESM(require_commonjs14(), 1); +var import_css_what5 = __toESM(require_commonjs16(), 1); // ../../node_modules/css-select/lib/esm/index.js var import_boolbase6 = __toESM(require_boolbase(), 1); // ../../node_modules/css-select/lib/esm/compile.js -var import_css_what4 = __toESM(require_commonjs14(), 1); +var import_css_what4 = __toESM(require_commonjs16(), 1); var import_boolbase5 = __toESM(require_boolbase(), 1); // ../../node_modules/css-select/lib/esm/sort.js -var import_css_what = __toESM(require_commonjs14(), 1); +var import_css_what = __toESM(require_commonjs16(), 1); var procedure = /* @__PURE__ */ new Map([ [import_css_what.SelectorType.Universal, 50], [import_css_what.SelectorType.Tag, 30], @@ -131176,7 +135055,7 @@ var attributeRules = { }; // ../../node_modules/css-select/lib/esm/pseudo-selectors/index.js -var import_css_what2 = __toESM(require_commonjs14(), 1); +var import_css_what2 = __toESM(require_commonjs16(), 1); // ../../node_modules/nth-check/lib/esm/parse.js var whitespace = /* @__PURE__ */ new Set([9, 10, 12, 13, 32]); @@ -131598,7 +135477,7 @@ function compilePseudoSelector(next2, selector, options, context3, compileToken2 } // ../../node_modules/css-select/lib/esm/general.js -var import_css_what3 = __toESM(require_commonjs14(), 1); +var import_css_what3 = __toESM(require_commonjs16(), 1); function getElementParent(node, adapter2) { const parent2 = adapter2.getParent(node); if (parent2 && adapter2.isTag(parent2)) { diff --git a/dist/post/index.cjs b/dist/post/index.cjs index 9c1855e3..0ffe3268 100644 --- a/dist/post/index.cjs +++ b/dist/post/index.cjs @@ -33280,1506 +33280,2435 @@ var require_commonjs10 = __commonJS({ } }); -// ../../node_modules/@azure/core-lro/dist/commonjs/logger.js -var require_logger2 = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/logger.js"(exports2) { +// ../../node_modules/@azure/storage-blob/dist/commonjs/log.js +var require_log5 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/log.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.logger = void 0; var logger_1 = require_commonjs2(); - exports2.logger = (0, logger_1.createClientLogger)("core-lro"); + exports2.logger = (0, logger_1.createClientLogger)("storage-blob"); } }); -// ../../node_modules/@azure/core-lro/dist/commonjs/poller/constants.js +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/RequestPolicy.js +var require_RequestPolicy = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/RequestPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BaseRequestPolicy = void 0; + var BaseRequestPolicy = class { + _nextPolicy; + _options; + /** + * The main method to implement that manipulates a request/response. + */ + constructor(_nextPolicy, _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + this._options.log(logLevel, message); + } + }; + exports2.BaseRequestPolicy = BaseRequestPolicy; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/constants.js var require_constants9 = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/poller/constants.js"(exports2) { + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/constants.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.terminalStates = exports2.POLL_INTERVAL_IN_MS = void 0; - exports2.POLL_INTERVAL_IN_MS = 2e3; - exports2.terminalStates = ["succeeded", "canceled", "failed"]; + exports2.PathStylePorts = exports2.BlobDoesNotUseCustomerSpecifiedEncryption = exports2.BlobUsesCustomerSpecifiedEncryptionMsg = exports2.StorageBlobLoggingAllowedQueryParameters = exports2.StorageBlobLoggingAllowedHeaderNames = exports2.DevelopmentConnectionString = exports2.EncryptionAlgorithmAES25 = exports2.HTTP_VERSION_1_1 = exports2.HTTP_LINE_ENDING = exports2.BATCH_MAX_PAYLOAD_IN_BYTES = exports2.BATCH_MAX_REQUEST = exports2.SIZE_1_MB = exports2.ETagAny = exports2.ETagNone = exports2.HeaderConstants = exports2.HTTPURLConnection = exports2.URLConstants = exports2.StorageOAuthScopes = exports2.REQUEST_TIMEOUT = exports2.DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = exports2.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = exports2.DEFAULT_BLOCK_BUFFER_SIZE_BYTES = exports2.BLOCK_BLOB_MAX_BLOCKS = exports2.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = exports2.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = exports2.SERVICE_VERSION = exports2.SDK_VERSION = void 0; + exports2.SDK_VERSION = "12.28.0"; + exports2.SERVICE_VERSION = "2025-07-05"; + exports2.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; + exports2.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4e3 * 1024 * 1024; + exports2.BLOCK_BLOB_MAX_BLOCKS = 5e4; + exports2.DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; + exports2.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; + exports2.DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; + exports2.REQUEST_TIMEOUT = 100 * 1e3; + exports2.StorageOAuthScopes = "https://storage.azure.com/.default"; + exports2.URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout" + } + }; + exports2.HTTPURLConnection = { + HTTP_ACCEPTED: 202, + HTTP_CONFLICT: 409, + HTTP_NOT_FOUND: 404, + HTTP_PRECON_FAILED: 412, + HTTP_RANGE_NOT_SATISFIABLE: 416 + }; + exports2.HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", + X_MS_CopySourceErrorCode: "x-ms-copy-source-error-code" + }; + exports2.ETagNone = ""; + exports2.ETagAny = "*"; + exports2.SIZE_1_MB = 1 * 1024 * 1024; + exports2.BATCH_MAX_REQUEST = 256; + exports2.BATCH_MAX_PAYLOAD_IN_BYTES = 4 * exports2.SIZE_1_MB; + exports2.HTTP_LINE_ENDING = "\r\n"; + exports2.HTTP_VERSION_1_1 = "HTTP/1.1"; + exports2.EncryptionAlgorithmAES25 = "AES256"; + exports2.DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; + exports2.StorageBlobLoggingAllowedHeaderNames = [ + "Access-Control-Allow-Origin", + "Cache-Control", + "Content-Length", + "Content-Type", + "Date", + "Request-Id", + "traceparent", + "Transfer-Encoding", + "User-Agent", + "x-ms-client-request-id", + "x-ms-date", + "x-ms-error-code", + "x-ms-request-id", + "x-ms-return-client-request-id", + "x-ms-version", + "Accept-Ranges", + "Content-Disposition", + "Content-Encoding", + "Content-Language", + "Content-MD5", + "Content-Range", + "ETag", + "Last-Modified", + "Server", + "Vary", + "x-ms-content-crc64", + "x-ms-copy-action", + "x-ms-copy-completion-time", + "x-ms-copy-id", + "x-ms-copy-progress", + "x-ms-copy-status", + "x-ms-has-immutability-policy", + "x-ms-has-legal-hold", + "x-ms-lease-state", + "x-ms-lease-status", + "x-ms-range", + "x-ms-request-server-encrypted", + "x-ms-server-encrypted", + "x-ms-snapshot", + "x-ms-source-range", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "x-ms-access-tier", + "x-ms-access-tier-change-time", + "x-ms-access-tier-inferred", + "x-ms-account-kind", + "x-ms-archive-status", + "x-ms-blob-append-offset", + "x-ms-blob-cache-control", + "x-ms-blob-committed-block-count", + "x-ms-blob-condition-appendpos", + "x-ms-blob-condition-maxsize", + "x-ms-blob-content-disposition", + "x-ms-blob-content-encoding", + "x-ms-blob-content-language", + "x-ms-blob-content-length", + "x-ms-blob-content-md5", + "x-ms-blob-content-type", + "x-ms-blob-public-access", + "x-ms-blob-sequence-number", + "x-ms-blob-type", + "x-ms-copy-destination-snapshot", + "x-ms-creation-time", + "x-ms-default-encryption-scope", + "x-ms-delete-snapshots", + "x-ms-delete-type-permanent", + "x-ms-deny-encryption-scope-override", + "x-ms-encryption-algorithm", + "x-ms-if-sequence-number-eq", + "x-ms-if-sequence-number-le", + "x-ms-if-sequence-number-lt", + "x-ms-incremental-copy", + "x-ms-lease-action", + "x-ms-lease-break-period", + "x-ms-lease-duration", + "x-ms-lease-id", + "x-ms-lease-time", + "x-ms-page-write", + "x-ms-proposed-lease-id", + "x-ms-range-get-content-md5", + "x-ms-rehydrate-priority", + "x-ms-sequence-number-action", + "x-ms-sku-name", + "x-ms-source-content-md5", + "x-ms-source-if-match", + "x-ms-source-if-modified-since", + "x-ms-source-if-none-match", + "x-ms-source-if-unmodified-since", + "x-ms-tag-count", + "x-ms-encryption-key-sha256", + "x-ms-copy-source-error-code", + "x-ms-copy-source-status-code", + "x-ms-if-tags", + "x-ms-source-if-tags" + ]; + exports2.StorageBlobLoggingAllowedQueryParameters = [ + "comp", + "maxresults", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "se", + "si", + "sip", + "sp", + "spr", + "sr", + "srt", + "ss", + "st", + "sv", + "include", + "marker", + "prefix", + "copyid", + "restype", + "blockid", + "blocklisttype", + "delimiter", + "prevsnapshot", + "ske", + "skoid", + "sks", + "skt", + "sktid", + "skv", + "snapshot" + ]; + exports2.BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; + exports2.BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; + exports2.PathStylePorts = [ + "10000", + "10001", + "10002", + "10003", + "10004", + "10100", + "10101", + "10102", + "10103", + "10104", + "11000", + "11001", + "11002", + "11003", + "11004", + "11100", + "11101", + "11102", + "11103", + "11104" + ]; } }); -// ../../node_modules/@azure/core-lro/dist/commonjs/poller/operation.js -var require_operation = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/poller/operation.js"(exports2) { +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/utils.common.js +var require_utils_common = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/utils.common.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.pollOperation = exports2.initOperation = exports2.deserializeState = void 0; - var logger_js_1 = require_logger2(); + exports2.escapeURLPath = escapeURLPath; + exports2.getValueInConnString = getValueInConnString; + exports2.extractConnectionStringParts = extractConnectionStringParts; + exports2.appendToURLPath = appendToURLPath; + exports2.setURLParameter = setURLParameter; + exports2.getURLParameter = getURLParameter; + exports2.setURLHost = setURLHost; + exports2.getURLPath = getURLPath; + exports2.getURLScheme = getURLScheme; + exports2.getURLPathAndQuery = getURLPathAndQuery; + exports2.getURLQueries = getURLQueries; + exports2.appendToURLQuery = appendToURLQuery; + exports2.truncatedISO8061Date = truncatedISO8061Date; + exports2.base64encode = base64encode; + exports2.base64decode = base64decode; + exports2.generateBlockID = generateBlockID; + exports2.delay = delay; + exports2.padStart = padStart; + exports2.sanitizeURL = sanitizeURL; + exports2.sanitizeHeaders = sanitizeHeaders; + exports2.iEqual = iEqual; + exports2.getAccountNameFromUrl = getAccountNameFromUrl; + exports2.isIpEndpointStyle = isIpEndpointStyle; + exports2.toBlobTagsString = toBlobTagsString; + exports2.toBlobTags = toBlobTags; + exports2.toTags = toTags; + exports2.toQuerySerialization = toQuerySerialization; + exports2.parseObjectReplicationRecord = parseObjectReplicationRecord; + exports2.attachCredential = attachCredential; + exports2.httpAuthorizationToString = httpAuthorizationToString; + exports2.BlobNameToString = BlobNameToString; + exports2.ConvertInternalResponseOfListBlobFlat = ConvertInternalResponseOfListBlobFlat; + exports2.ConvertInternalResponseOfListBlobHierarchy = ConvertInternalResponseOfListBlobHierarchy; + exports2.ExtractPageRangeInfoItems = ExtractPageRangeInfoItems; + exports2.EscapePath = EscapePath; + exports2.assertResponse = assertResponse; + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); var constants_js_1 = require_constants9(); - function deserializeState(serializedState) { - try { - return JSON.parse(serializedState).state; - } catch (e) { - throw new Error(`Unable to deserialize input state: ${serializedState}`); - } + function escapeURLPath(url) { + const urlParsed = new URL(url); + let path3 = urlParsed.pathname; + path3 = path3 || "/"; + path3 = escape(path3); + urlParsed.pathname = path3; + return urlParsed.toString(); } - exports2.deserializeState = deserializeState; - function setStateError(inputs) { - const { state, stateProxy, isOperationError } = inputs; - return (error2) => { - if (isOperationError(error2)) { - stateProxy.setError(state, error2); - stateProxy.setFailed(state); + function getProxyUriFromDevConnString(connectionString) { + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; + } } - throw error2; - }; - } - function appendReadableErrorMessage(currentMessage, innerMessage) { - let message = currentMessage; - if (message.slice(-1) !== ".") { - message = message + "."; } - return message + " " + innerMessage; + return proxyUri; } - function simplifyError(err) { - let message = err.message; - let code = err.code; - let curErr = err; - while (curErr.innererror) { - curErr = curErr.innererror; - code = curErr.code; - message = appendReadableErrorMessage(message, curErr.message); + function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; + } } - return { - code, - message - }; + return ""; } - function processOperationStatus(result2) { - const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } = result2; - switch (status) { - case "succeeded": { - stateProxy.setSucceeded(state); - break; - } - case "failed": { - const err = getError === null || getError === void 0 ? void 0 : getError(response); - let postfix = ""; - if (err) { - const { code, message } = simplifyError(err); - postfix = `. ${code}. ${message}`; + function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = constants_js_1.DevelopmentConnectionString; + } + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && connectionString.search("AccountKey=") !== -1) { + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); } - const errStr = `The long-running operation has failed${postfix}`; - stateProxy.setError(state, new Error(errStr)); - stateProxy.setFailed(state); - logger_js_1.logger.warning(errStr); - break; + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); + } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; } - case "canceled": { - stateProxy.setCanceled(state); - break; + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); } - } - if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || isDone === void 0 && ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status)) { - stateProxy.setResult(state, buildResult({ - response, - state, - processResult - })); + return { + kind: "AccountConnString", + url: blobEndpoint, + accountName, + accountKey, + proxyUri + }; + } else { + let accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + let accountName = getValueInConnString(connectionString, "AccountName"); + if (!accountName) { + accountName = getAccountNameFromUrl(blobEndpoint); + } + if (!blobEndpoint) { + throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + } else if (!accountSas) { + throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + } + if (accountSas.startsWith("?")) { + accountSas = accountSas.substring(1); + } + return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; } } - function buildResult(inputs) { - const { processResult, response, state } = inputs; - return processResult ? processResult(response, state) : response; + function escape(text) { + return encodeURIComponent(text).replace(/%2F/g, "/").replace(/'/g, "%27").replace(/\+/g, "%20").replace(/%25/g, "%"); } - async function initOperation(inputs) { - const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult } = inputs; - const { operationLocation, resourceLocation, metadata, response } = await init(); - if (operationLocation) - withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); - const config = { - metadata, - operationLocation, - resourceLocation - }; - logger_js_1.logger.verbose(`LRO: Operation description:`, config); - const state = stateProxy.initState(config); - const status = getOperationStatus({ response, state, operationLocation }); - processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); - return state; + function appendToURLPath(url, name) { + const urlParsed = new URL(url); + let path3 = urlParsed.pathname; + path3 = path3 ? path3.endsWith("/") ? `${path3}${name}` : `${path3}/${name}` : name; + urlParsed.pathname = path3; + return urlParsed.toString(); } - exports2.initOperation = initOperation; - async function pollOperationHelper(inputs) { - const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, isOperationError, options } = inputs; - const response = await poll(operationLocation, options).catch(setStateError({ - state, - stateProxy, - isOperationError - })); - const status = getOperationStatus(response, state); - logger_js_1.logger.verbose(`LRO: Status: - Polling from: ${state.config.operationLocation} - Operation status: ${status} - Polling status: ${constants_js_1.terminalStates.includes(status) ? "Stopped" : "Running"}`); - if (status === "succeeded") { - const resourceLocation = getResourceLocation(response, state); - if (resourceLocation !== void 0) { - return { - response: await poll(resourceLocation).catch(setStateError({ state, stateProxy, isOperationError })), - status - }; + function setURLParameter(url, name, value) { + const urlParsed = new URL(url); + const encodedName = encodeURIComponent(name); + const encodedValue = value ? encodeURIComponent(value) : void 0; + const searchString = urlParsed.search === "" ? "?" : urlParsed.search; + const searchPieces = []; + for (const pair of searchString.slice(1).split("&")) { + if (pair) { + const [key] = pair.split("=", 2); + if (key !== encodedName) { + searchPieces.push(pair); + } } } - return { response, status }; - } - async function pollOperation(inputs) { - const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, processResult, getError, updateState, setDelay, isDone, setErrorAsResult } = inputs; - const { operationLocation } = state.config; - if (operationLocation !== void 0) { - const { response, status } = await pollOperationHelper({ - poll, - getOperationStatus, - state, - stateProxy, - operationLocation, - getResourceLocation, - isOperationError, - options - }); - processOperationStatus({ - status, - response, - state, - stateProxy, - isDone, - processResult, - getError, - setErrorAsResult - }); - if (!constants_js_1.terminalStates.includes(status)) { - const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); - if (intervalInMs) - setDelay(intervalInMs); - const location = getOperationLocation === null || getOperationLocation === void 0 ? void 0 : getOperationLocation(response, state); - if (location !== void 0) { - const isUpdated = operationLocation !== location; - state.config.operationLocation = location; - withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(location, isUpdated); - } else - withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); - } - updateState === null || updateState === void 0 ? void 0 : updateState(state, response); + if (encodedValue) { + searchPieces.push(`${encodedName}=${encodedValue}`); } + urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; + return urlParsed.toString(); } - exports2.pollOperation = pollOperation; - } -}); - -// ../../node_modules/@azure/core-lro/dist/commonjs/http/operation.js -var require_operation2 = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/http/operation.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.pollHttpOperation = exports2.isOperationError = exports2.getResourceLocation = exports2.getOperationStatus = exports2.getOperationLocation = exports2.initHttpOperation = exports2.getStatusFromInitialResponse = exports2.getErrorFromResponse = exports2.parseRetryAfter = exports2.inferLroMode = void 0; - var operation_js_1 = require_operation(); - var logger_js_1 = require_logger2(); - function getOperationLocationPollingUrl(inputs) { - const { azureAsyncOperation, operationLocation } = inputs; - return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation; + function getURLParameter(url, name) { + const urlParsed = new URL(url); + return urlParsed.searchParams.get(name) ?? void 0; } - function getLocationHeader(rawResponse) { - return rawResponse.headers["location"]; + function setURLHost(url, host) { + const urlParsed = new URL(url); + urlParsed.hostname = host; + return urlParsed.toString(); } - function getOperationLocationHeader(rawResponse) { - return rawResponse.headers["operation-location"]; + function getURLPath(url) { + try { + const urlParsed = new URL(url); + return urlParsed.pathname; + } catch (e) { + return void 0; + } } - function getAzureAsyncOperationHeader(rawResponse) { - return rawResponse.headers["azure-asyncoperation"]; + function getURLScheme(url) { + try { + const urlParsed = new URL(url); + return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol; + } catch (e) { + return void 0; + } } - function findResourceLocation(inputs) { - var _a; - const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; - switch (requestMethod) { - case "PUT": { - return requestPath; - } - case "DELETE": { - return void 0; - } - case "PATCH": { - return (_a = getDefault()) !== null && _a !== void 0 ? _a : requestPath; - } - default: { - return getDefault(); - } + function getURLPathAndQuery(url) { + const urlParsed = new URL(url); + const pathString = urlParsed.pathname; + if (!pathString) { + throw new RangeError("Invalid url without valid path."); } - function getDefault() { - switch (resourceLocationConfig) { - case "azure-async-operation": { - return void 0; - } - case "original-uri": { - return requestPath; - } - case "location": - default: { - return location; - } - } + let queryString = urlParsed.search || ""; + queryString = queryString.trim(); + if (queryString !== "") { + queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; } + return `${pathString}${queryString}`; } - function inferLroMode(inputs) { - const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs; - const operationLocation = getOperationLocationHeader(rawResponse); - const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); - const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); - const location = getLocationHeader(rawResponse); - const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase(); - if (pollingUrl !== void 0) { - return { - mode: "OperationLocation", - operationLocation: pollingUrl, - resourceLocation: findResourceLocation({ - requestMethod: normalizedRequestMethod, - location, - requestPath, - resourceLocationConfig - }) - }; - } else if (location !== void 0) { - return { - mode: "ResourceLocation", - operationLocation: location + function getURLQueries(url) { + let queryString = new URL(url).search; + if (!queryString) { + return {}; + } + queryString = queryString.trim(); + queryString = queryString.startsWith("?") ? queryString.substring(1) : queryString; + let querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter((value) => { + const indexOfEqual = value.indexOf("="); + const lastIndexOfEqual = value.lastIndexOf("="); + return indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1; + }); + const queries = {}; + for (const querySubString of querySubStrings) { + const splitResults = querySubString.split("="); + const key = splitResults[0]; + const value = splitResults[1]; + queries[key] = value; + } + return queries; + } + function appendToURLQuery(url, queryParts) { + const urlParsed = new URL(url); + let query = urlParsed.search; + if (query) { + query += "&" + queryParts; + } else { + query = queryParts; + } + urlParsed.search = query; + return urlParsed.toString(); + } + function truncatedISO8061Date(date, withMilliseconds = true) { + const dateString = date.toISOString(); + return withMilliseconds ? dateString.substring(0, dateString.length - 1) + "0000Z" : dateString.substring(0, dateString.length - 5) + "Z"; + } + function base64encode(content) { + return !core_util_1.isNodeLike ? btoa(content) : Buffer.from(content).toString("base64"); + } + function base64decode(encodedString) { + return !core_util_1.isNodeLike ? atob(encodedString) : Buffer.from(encodedString, "base64").toString(); + } + function generateBlockID(blockIDPrefix, blockIndex) { + const maxSourceStringLength = 48; + const maxBlockIndexLength = 6; + const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { + blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); + } + const res = blockIDPrefix + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); + return base64encode(res); + } + async function delay(timeInMs, aborter, abortError) { + return new Promise((resolve, reject) => { + let timeout; + const abortHandler = () => { + if (timeout !== void 0) { + clearTimeout(timeout); + } + reject(abortError); }; - } else if (normalizedRequestMethod === "PUT" && requestPath) { - return { - mode: "Body", - operationLocation: requestPath + const resolveHandler = () => { + if (aborter !== void 0) { + aborter.removeEventListener("abort", abortHandler); + } + resolve(); }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== void 0) { + aborter.addEventListener("abort", abortHandler); + } + }); + } + function padStart(currentString, targetLength, padString = " ") { + if (String.prototype.padStart) { + return currentString.padStart(targetLength, padString); + } + padString = padString || " "; + if (currentString.length > targetLength) { + return currentString; } else { - return void 0; + targetLength = targetLength - currentString.length; + if (targetLength > padString.length) { + padString += padString.repeat(targetLength / padString.length); + } + return padString.slice(0, targetLength) + currentString; } } - exports2.inferLroMode = inferLroMode; - function transformStatus(inputs) { - const { status, statusCode } = inputs; - if (typeof status !== "string" && status !== void 0) { - throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); + function sanitizeURL(url) { + let safeURL = url; + if (getURLParameter(safeURL, constants_js_1.URLConstants.Parameters.SIGNATURE)) { + safeURL = setURLParameter(safeURL, constants_js_1.URLConstants.Parameters.SIGNATURE, "*****"); } - switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) { - case void 0: - return toOperationStatus(statusCode); - case "succeeded": - return "succeeded"; - case "failed": - return "failed"; - case "running": - case "accepted": - case "started": - case "canceling": - case "cancelling": - return "running"; - case "canceled": - case "cancelled": - return "canceled"; - default: { - logger_js_1.logger.verbose(`LRO: unrecognized operation status: ${status}`); - return status; + return safeURL; + } + function sanitizeHeaders(originalHeader) { + const headers = (0, core_rest_pipeline_1.createHttpHeaders)(); + for (const [name, value] of originalHeader) { + if (name.toLowerCase() === constants_js_1.HeaderConstants.AUTHORIZATION.toLowerCase()) { + headers.set(name, "*****"); + } else if (name.toLowerCase() === constants_js_1.HeaderConstants.X_MS_COPY_SOURCE) { + headers.set(name, sanitizeURL(value)); + } else { + headers.set(name, value); } } + return headers; } - function getStatus(rawResponse) { - var _a; - const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - return transformStatus({ status, statusCode: rawResponse.statusCode }); + function iEqual(str1, str2) { + return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); } - function getProvisioningState(rawResponse) { - var _a, _b; - const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - const status = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; - return transformStatus({ status, statusCode: rawResponse.statusCode }); + function getAccountNameFromUrl(url) { + const parsedUrl = new URL(url); + let accountName; + try { + if (parsedUrl.hostname.split(".")[1] === "blob") { + accountName = parsedUrl.hostname.split(".")[0]; + } else if (isIpEndpointStyle(parsedUrl)) { + accountName = parsedUrl.pathname.split("/")[1]; + } else { + accountName = ""; + } + return accountName; + } catch (error2) { + throw new Error("Unable to extract accountName with provided information."); + } } - function toOperationStatus(statusCode) { - if (statusCode === 202) { - return "running"; - } else if (statusCode < 300) { - return "succeeded"; - } else { - return "failed"; + function isIpEndpointStyle(parsedUrl) { + const host = parsedUrl.host; + return /^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || Boolean(parsedUrl.port) && constants_js_1.PathStylePorts.includes(parsedUrl.port); + } + function toBlobTagsString(tags) { + if (tags === void 0) { + return void 0; } + const tagPairs = []; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + } + } + return tagPairs.join("&"); } - function parseRetryAfter({ rawResponse }) { - const retryAfter = rawResponse.headers["retry-after"]; - if (retryAfter !== void 0) { - const retryAfterInSeconds = parseInt(retryAfter); - return isNaN(retryAfterInSeconds) ? calculatePollingIntervalFromDate(new Date(retryAfter)) : retryAfterInSeconds * 1e3; + function toBlobTags(tags) { + if (tags === void 0) { + return void 0; } - return void 0; + const res = { + blobTagSet: [] + }; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + res.blobTagSet.push({ + key, + value + }); + } + } + return res; } - exports2.parseRetryAfter = parseRetryAfter; - function getErrorFromResponse(response) { - const error2 = accessBodyProperty(response, "error"); - if (!error2) { - logger_js_1.logger.warning(`The long-running operation failed but there is no error property in the response's body`); - return; + function toTags(tags) { + if (tags === void 0) { + return void 0; } - if (!error2.code || !error2.message) { - logger_js_1.logger.warning(`The long-running operation failed but the error property in the response's body doesn't contain code or message`); - return; + const res = {}; + for (const blobTag of tags.blobTagSet) { + res[blobTag.key] = blobTag.value; } - return error2; + return res; } - exports2.getErrorFromResponse = getErrorFromResponse; - function calculatePollingIntervalFromDate(retryAfterDate) { - const timeNow = Math.floor((/* @__PURE__ */ new Date()).getTime()); - const retryAfterTime = retryAfterDate.getTime(); - if (timeNow < retryAfterTime) { - return retryAfterTime - timeNow; + function toQuerySerialization(textConfiguration) { + if (textConfiguration === void 0) { + return void 0; + } + switch (textConfiguration.kind) { + case "csv": + return { + format: { + type: "delimited", + delimitedTextConfiguration: { + columnSeparator: textConfiguration.columnSeparator || ",", + fieldQuote: textConfiguration.fieldQuote || "", + recordSeparator: textConfiguration.recordSeparator, + escapeChar: textConfiguration.escapeCharacter || "", + headersPresent: textConfiguration.hasHeaders || false + } + } + }; + case "json": + return { + format: { + type: "json", + jsonTextConfiguration: { + recordSeparator: textConfiguration.recordSeparator + } + } + }; + case "arrow": + return { + format: { + type: "arrow", + arrowConfiguration: { + schema: textConfiguration.schema + } + } + }; + case "parquet": + return { + format: { + type: "parquet" + } + }; + default: + throw Error("Invalid BlobQueryTextConfiguration."); } - return void 0; } - function getStatusFromInitialResponse(inputs) { - const { response, state, operationLocation } = inputs; - function helper() { - var _a; - const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; - switch (mode) { - case void 0: - return toOperationStatus(response.rawResponse.statusCode); - case "Body": - return getOperationStatus(response, state); - default: - return "running"; + function parseObjectReplicationRecord(objectReplicationRecord) { + if (!objectReplicationRecord) { + return void 0; + } + if ("policy-id" in objectReplicationRecord) { + return void 0; + } + const orProperties = []; + for (const key in objectReplicationRecord) { + const ids = key.split("_"); + const policyPrefix = "or-"; + if (ids[0].startsWith(policyPrefix)) { + ids[0] = ids[0].substring(policyPrefix.length); + } + const rule = { + ruleId: ids[1], + replicationStatus: objectReplicationRecord[key] + }; + const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + if (policyIndex > -1) { + orProperties[policyIndex].rules.push(rule); + } else { + orProperties.push({ + policyId: ids[0], + rules: [rule] + }); } } - const status = helper(); - return status === "running" && operationLocation === void 0 ? "succeeded" : status; + return orProperties; } - exports2.getStatusFromInitialResponse = getStatusFromInitialResponse; - async function initHttpOperation(inputs) { - const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; - return (0, operation_js_1.initOperation)({ - init: async () => { - const response = await lro.sendInitialRequest(); - const config = inferLroMode({ - rawResponse: response.rawResponse, - requestPath: lro.requestPath, - requestMethod: lro.requestMethod, - resourceLocationConfig - }); - return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); - }, - stateProxy, - processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse, - getOperationStatus: getStatusFromInitialResponse, - setErrorAsResult - }); + function attachCredential(thing, credential) { + thing.credential = credential; + return thing; } - exports2.initHttpOperation = initHttpOperation; - function getOperationLocation({ rawResponse }, state) { - var _a; - const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; - switch (mode) { - case "OperationLocation": { - return getOperationLocationPollingUrl({ - operationLocation: getOperationLocationHeader(rawResponse), - azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse) - }); - } - case "ResourceLocation": { - return getLocationHeader(rawResponse); - } - case "Body": - default: { - return void 0; - } + function httpAuthorizationToString(httpAuthorization) { + return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : void 0; + } + function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); + } else { + return name.content; } } - exports2.getOperationLocation = getOperationLocation; - function getOperationStatus({ rawResponse }, state) { - var _a; - const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; - switch (mode) { - case "OperationLocation": { - return getStatus(rawResponse); + function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return { + ...internalResponse, + segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = { + ...blobItemInteral, + name: BlobNameToString(blobItemInteral.name) + }; + return blobItem; + }) } - case "ResourceLocation": { - return toOperationStatus(rawResponse.statusCode); + }; + } + function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + return { + ...internalResponse, + segment: { + blobPrefixes: internalResponse.segment.blobPrefixes?.map((blobPrefixInternal) => { + const blobPrefix = { + ...blobPrefixInternal, + name: BlobNameToString(blobPrefixInternal.name) + }; + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = { + ...blobItemInteral, + name: BlobNameToString(blobItemInteral.name) + }; + return blobItem; + }) } - case "Body": { - return getProvisioningState(rawResponse); + }; + } + function* ExtractPageRangeInfoItems(getPageRangesSegment) { + let pageRange = []; + let clearRange = []; + if (getPageRangesSegment.pageRange) + pageRange = getPageRangesSegment.pageRange; + if (getPageRangesSegment.clearRange) + clearRange = getPageRangesSegment.clearRange; + let pageRangeIndex = 0; + let clearRangeIndex = 0; + while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { + if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false + }; + ++pageRangeIndex; + } else { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true + }; + ++clearRangeIndex; } - default: - throw new Error(`Internal error: Unexpected operation mode: ${mode}`); } - } - exports2.getOperationStatus = getOperationStatus; - function accessBodyProperty({ flatResponse, rawResponse }, prop) { - var _a, _b; - return (_a = flatResponse === null || flatResponse === void 0 ? void 0 : flatResponse[prop]) !== null && _a !== void 0 ? _a : (_b = rawResponse.body) === null || _b === void 0 ? void 0 : _b[prop]; - } - function getResourceLocation(res, state) { - const loc = accessBodyProperty(res, "resourceLocation"); - if (loc && typeof loc === "string") { - state.config.resourceLocation = loc; + for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false + }; + } + for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true + }; } - return state.config.resourceLocation; } - exports2.getResourceLocation = getResourceLocation; - function isOperationError(e) { - return e.name === "RestError"; + function EscapePath(blobName) { + const split = blobName.split("/"); + for (let i = 0; i < split.length; i++) { + split[i] = encodeURIComponent(split[i]); + } + return split.join("/"); } - exports2.isOperationError = isOperationError; - async function pollHttpOperation(inputs) { - const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult } = inputs; - return (0, operation_js_1.pollOperation)({ - state, - stateProxy, - setDelay, - processResult: processResult ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) : ({ flatResponse }) => flatResponse, - getError: getErrorFromResponse, - updateState, - getPollingInterval: parseRetryAfter, - getOperationLocation, - getOperationStatus, - isOperationError, - getResourceLocation, - options, - /** - * The expansion here is intentional because `lro` could be an object that - * references an inner this, so we need to preserve a reference to it. - */ - poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), - setErrorAsResult - }); + function assertResponse(response) { + if (`_response` in response) { + return response; + } + throw new TypeError(`Unexpected response object ${response}`); } - exports2.pollHttpOperation = pollHttpOperation; } }); -// ../../node_modules/@azure/core-lro/dist/commonjs/poller/poller.js -var require_poller = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/poller/poller.js"(exports2) { +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicyType.js +var require_StorageRetryPolicyType = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicyType.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.buildCreatePoller = void 0; - var operation_js_1 = require_operation(); + exports2.StorageRetryPolicyType = void 0; + var StorageRetryPolicyType; + (function(StorageRetryPolicyType2) { + StorageRetryPolicyType2[StorageRetryPolicyType2["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + StorageRetryPolicyType2[StorageRetryPolicyType2["FIXED"] = 1] = "FIXED"; + })(StorageRetryPolicyType || (exports2.StorageRetryPolicyType = StorageRetryPolicyType = {})); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicy.js +var require_StorageRetryPolicy = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageRetryPolicy = void 0; + exports2.NewRetryPolicyFactory = NewRetryPolicyFactory; + var abort_controller_1 = require_commonjs3(); + var RequestPolicy_js_1 = require_RequestPolicy(); var constants_js_1 = require_constants9(); - var core_util_1 = require_commonjs4(); - var createStateProxy = () => ({ + var utils_common_js_1 = require_utils_common(); + var log_js_1 = require_log5(); + var StorageRetryPolicyType_js_1 = require_StorageRetryPolicyType(); + function NewRetryPolicyFactory(retryOptions) { + return { + create: (nextPolicy, options) => { + return new StorageRetryPolicy(nextPolicy, options, retryOptions); + } + }; + } + var DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1e3, + maxTries: 4, + retryDelayInMs: 4 * 1e3, + retryPolicyType: StorageRetryPolicyType_js_1.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: void 0 + // Use server side default timeout strategy + }; + var RETRY_ABORT_ERROR = new abort_controller_1.AbortError("The operation was aborted."); + var StorageRetryPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { /** - * The state at this point is created to be of type OperationState. - * It will be updated later to be of type TState when the - * customer-provided callback, `updateState`, is called during polling. + * RetryOptions. */ - initState: (config) => ({ status: "running", config }), - setCanceled: (state) => state.status = "canceled", - setError: (state, error2) => state.error = error2, - setResult: (state, result2) => state.result = result2, - setRunning: (state) => state.status = "running", - setSucceeded: (state) => state.status = "succeeded", - setFailed: (state) => state.status = "failed", - getError: (state) => state.error, - getResult: (state) => state.result, - isCanceled: (state) => state.status === "canceled", - isFailed: (state) => state.status === "failed", - isRunning: (state) => state.status === "running", - isSucceeded: (state) => state.status === "succeeded" - }); - function buildCreatePoller(inputs) { - const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful } = inputs; - return async ({ init, poll }, options) => { - const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, restoreFrom } = options || {}; - const stateProxy = createStateProxy(); - const withOperationLocation = withOperationLocationCallback ? /* @__PURE__ */ (() => { - let called = false; - return (operationLocation, isUpdated) => { - if (isUpdated) - withOperationLocationCallback(operationLocation); - else if (!called) - withOperationLocationCallback(operationLocation); - called = true; - }; - })() : void 0; - const state = restoreFrom ? (0, operation_js_1.deserializeState)(restoreFrom) : await (0, operation_js_1.initOperation)({ - init, - stateProxy, - processResult, - getOperationStatus: getStatusFromInitialResponse, - withOperationLocation, - setErrorAsResult: !resolveOnUnsuccessful - }); - let resultPromise; - const abortController = new AbortController(); - const handlers = /* @__PURE__ */ new Map(); - const handleProgressEvents = async () => handlers.forEach((h) => h(state)); - const cancelErrMsg = "Operation was canceled"; - let currentPollIntervalInMs = intervalInMs; - const poller = { - getOperationState: () => state, - getResult: () => state.result, - isDone: () => ["succeeded", "failed", "canceled"].includes(state.status), - isStopped: () => resultPromise === void 0, - stopPolling: () => { - abortController.abort(); - }, - toString: () => JSON.stringify({ - state - }), - onProgress: (callback) => { - const s = Symbol(); - handlers.set(s, callback); - return () => handlers.delete(s); - }, - pollUntilDone: (pollOptions) => resultPromise !== null && resultPromise !== void 0 ? resultPromise : resultPromise = (async () => { - const { abortSignal: inputAbortSignal } = pollOptions || {}; - function abortListener() { - abortController.abort(); - } - const abortSignal = abortController.signal; - if (inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.aborted) { - abortController.abort(); - } else if (!abortSignal.aborted) { - inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.addEventListener("abort", abortListener, { once: true }); - } - try { - if (!poller.isDone()) { - await poller.poll({ abortSignal }); - while (!poller.isDone()) { - await (0, core_util_1.delay)(currentPollIntervalInMs, { abortSignal }); - await poller.poll({ abortSignal }); - } - } - } finally { - inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.removeEventListener("abort", abortListener); - } - if (resolveOnUnsuccessful) { - return poller.getResult(); - } else { - switch (state.status) { - case "succeeded": - return poller.getResult(); - case "canceled": - throw new Error(cancelErrMsg); - case "failed": - throw state.error; - case "notStarted": - case "running": - throw new Error(`Polling completed without succeeding or failing`); - } - } - })().finally(() => { - resultPromise = void 0; - }), - async poll(pollOptions) { - if (resolveOnUnsuccessful) { - if (poller.isDone()) - return; - } else { - switch (state.status) { - case "succeeded": - return; - case "canceled": - throw new Error(cancelErrMsg); - case "failed": - throw state.error; - } - } - await (0, operation_js_1.pollOperation)({ - poll, - state, - stateProxy, - getOperationLocation, - isOperationError, - withOperationLocation, - getPollingInterval, - getOperationStatus: getStatusFromPollResponse, - getResourceLocation, - processResult, - getError, - updateState, - options: pollOptions, - setDelay: (pollIntervalInMs) => { - currentPollIntervalInMs = pollIntervalInMs; - }, - setErrorAsResult: !resolveOnUnsuccessful - }); - await handleProgressEvents(); - if (!resolveOnUnsuccessful) { - switch (state.status) { - case "canceled": - throw new Error(cancelErrMsg); - case "failed": - throw state.error; - } - } - } - }; - return poller; - }; - } - exports2.buildCreatePoller = buildCreatePoller; - } -}); - -// ../../node_modules/@azure/core-lro/dist/commonjs/http/poller.js -var require_poller2 = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/http/poller.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createHttpPoller = void 0; - var operation_js_1 = require_operation2(); - var poller_js_1 = require_poller(); - async function createHttpPoller(lro, options) { - const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false } = options || {}; - return (0, poller_js_1.buildCreatePoller)({ - getStatusFromInitialResponse: operation_js_1.getStatusFromInitialResponse, - getStatusFromPollResponse: operation_js_1.getOperationStatus, - isOperationError: operation_js_1.isOperationError, - getOperationLocation: operation_js_1.getOperationLocation, - getResourceLocation: operation_js_1.getResourceLocation, - getPollingInterval: operation_js_1.parseRetryAfter, - getError: operation_js_1.getErrorFromResponse, - resolveOnUnsuccessful - })({ - init: async () => { - const response = await lro.sendInitialRequest(); - const config = (0, operation_js_1.inferLroMode)({ - rawResponse: response.rawResponse, - requestPath: lro.requestPath, - requestMethod: lro.requestMethod, - resourceLocationConfig - }); - return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); - }, - poll: lro.sendPollRequest - }, { - intervalInMs, - withOperationLocation, - restoreFrom, - updateState, - processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse - }); - } - exports2.createHttpPoller = createHttpPoller; - } -}); - -// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js -var require_operation3 = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.GenericPollOperation = void 0; - var operation_js_1 = require_operation2(); - var logger_js_1 = require_logger2(); - var createStateProxy = () => ({ - initState: (config) => ({ config, isStarted: true }), - setCanceled: (state) => state.isCancelled = true, - setError: (state, error2) => state.error = error2, - setResult: (state, result2) => state.result = result2, - setRunning: (state) => state.isStarted = true, - setSucceeded: (state) => state.isCompleted = true, - setFailed: () => { - }, - getError: (state) => state.error, - getResult: (state) => state.result, - isCanceled: (state) => !!state.isCancelled, - isFailed: (state) => !!state.error, - isRunning: (state) => !!state.isStarted, - isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error) - }); - var GenericPollOperation = class { - constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { - this.state = state; - this.lro = lro; - this.setErrorAsResult = setErrorAsResult; - this.lroResourceLocationConfig = lroResourceLocationConfig; - this.processResult = processResult; - this.updateState = updateState; - this.isDone = isDone; - } - setPollerConfig(pollerConfig) { - this.pollerConfig = pollerConfig; - } - async update(options) { - var _a; - const stateProxy = createStateProxy(); - if (!this.state.isStarted) { - this.state = Object.assign(Object.assign({}, this.state), await (0, operation_js_1.initHttpOperation)({ - lro: this.lro, - stateProxy, - resourceLocationConfig: this.lroResourceLocationConfig, - processResult: this.processResult, - setErrorAsResult: this.setErrorAsResult - })); - } - const updateState = this.updateState; - const isDone = this.isDone; - if (!this.state.isCompleted && this.state.error === void 0) { - await (0, operation_js_1.pollHttpOperation)({ - lro: this.lro, - state: this.state, - stateProxy, - processResult: this.processResult, - updateState: updateState ? (state, { rawResponse }) => updateState(state, rawResponse) : void 0, - isDone: isDone ? ({ flatResponse }, state) => isDone(flatResponse, state) : void 0, - options, - setDelay: (intervalInMs) => { - this.pollerConfig.intervalInMs = intervalInMs; - }, - setErrorAsResult: this.setErrorAsResult - }); - } - (_a = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a === void 0 ? void 0 : _a.call(options, this.state); - return this; - } - async cancel() { - logger_js_1.logger.error("`cancelOperation` is deprecated because it wasn't implemented"); - return this; - } + retryOptions; /** - * Serializes the Poller operation. + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - */ - toString() { - return JSON.stringify({ - state: this.state - }); - } - }; - exports2.GenericPollOperation = GenericPollOperation; - } -}); - -// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js -var require_poller3 = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Poller = exports2.PollerCancelledError = exports2.PollerStoppedError = void 0; - var PollerStoppedError = class _PollerStoppedError extends Error { - constructor(message) { - super(message); - this.name = "PollerStoppedError"; - Object.setPrototypeOf(this, _PollerStoppedError.prototype); - } - }; - exports2.PollerStoppedError = PollerStoppedError; - var PollerCancelledError = class _PollerCancelledError extends Error { - constructor(message) { - super(message); - this.name = "PollerCancelledError"; - Object.setPrototypeOf(this, _PollerCancelledError.prototype); + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { + super(nextPolicy, options); + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType ? retryOptions.retryPolicyType : DEFAULT_RETRY_OPTIONS.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 ? Math.floor(retryOptions.maxTries) : DEFAULT_RETRY_OPTIONS.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 ? retryOptions.tryTimeoutInMs : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) : DEFAULT_RETRY_OPTIONS.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost : DEFAULT_RETRY_OPTIONS.secondaryHost + }; } - }; - exports2.PollerCancelledError = PollerCancelledError; - var Poller = class { /** - * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. - * - * When writing an implementation of a Poller, this implementation needs to deal with the initialization - * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's - * operation has already been defined, at least its basic properties. The code below shows how to approach - * the definition of the constructor of a new custom poller. - * - * ```ts - * export class MyPoller extends Poller { - * constructor({ - * // Anything you might need outside of the basics - * }) { - * let state: MyOperationState = { - * privateProperty: private, - * publicProperty: public, - * }; - * - * const operation = { - * state, - * update, - * cancel, - * toString - * } - * - * // Sending the operation to the parent's constructor. - * super(operation); - * - * // You can assign more local properties here. - * } - * } - * ``` - * - * Inside of this constructor, a new promise is created. This will be used to - * tell the user when the poller finishes (see `pollUntilDone()`). The promise's - * resolve and reject methods are also used internally to control when to resolve - * or reject anyone waiting for the poller to finish. - * - * The constructor of a custom implementation of a poller is where any serialized version of - * a previous poller's operation should be deserialized into the operation sent to the - * base constructor. For example: - * - * ```ts - * export class MyPoller extends Poller { - * constructor( - * baseOperation: string | undefined - * ) { - * let state: MyOperationState = {}; - * if (baseOperation) { - * state = { - * ...JSON.parse(baseOperation).state, - * ...state - * }; - * } - * const operation = { - * state, - * // ... - * } - * super(operation); - * } - * } - * ``` + * Sends request. * - * @param operation - Must contain the basic properties of `PollOperation`. + * @param request - */ - constructor(operation) { - this.resolveOnUnsuccessful = false; - this.stopped = true; - this.pollProgressCallbacks = []; - this.operation = operation; - this.promise = new Promise((resolve, reject) => { - this.resolve = resolve; - this.reject = reject; - }); - this.promise.catch(() => { - }); + async sendRequest(request) { + return this.attemptSendRequest(request, false, 1); } /** - * Starts a loop that will break only if the poller is done - * or if the poller is stopped. + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. */ - async startPolling(pollOptions = {}) { - if (this.stopped) { - this.stopped = false; + async attemptSendRequest(request, secondaryHas404, attempt) { + const newRequest = request.clone(); + const isPrimaryRetry = secondaryHas404 || !this.retryOptions.secondaryHost || !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = (0, utils_common_js_1.setURLHost)(newRequest.url, this.retryOptions.secondaryHost); } - while (!this.isStopped() && !this.isDone()) { - await this.poll(pollOptions); - await this.delay(); + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = (0, utils_common_js_1.setURLParameter)(newRequest.url, constants_js_1.URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1e3).toString()); + } + let response; + try { + log_js_1.logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; + } + secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; + } catch (err) { + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; + } } + await this.delay(isPrimaryRetry, attempt, request.abortSignal); + return this.attemptSendRequest(request, secondaryHas404, ++attempt); } /** - * pollOnce does one polling, by calling to the update method of the underlying - * poll operation to make any relevant change effective. - * - * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * Decide whether to retry according to last HTTP response and retry counters. * - * @param options - Optional properties passed to the operation's update method. + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - */ - async pollOnce(options = {}) { - if (!this.isDone()) { - this.operation = await this.operation.update({ - abortSignal: options.abortSignal, - fireProgress: this.fireProgress.bind(this) - }); + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + log_js_1.logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions.maxTries}, no further try.`); + return false; } - this.processUpdatedState(); + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR" + // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || err.message.toUpperCase().includes(retriableError) || err.code && err.code.toString().toUpperCase() === retriableError) { + log_js_1.logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + } + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + log_js_1.logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + if (statusCode === 503 || statusCode === 500) { + log_js_1.logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + if (err?.code === "PARSE_ERROR" && err?.message.startsWith(`Error "Error: Unclosed root tag`)) { + log_js_1.logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + return false; } /** - * fireProgress calls the functions passed in via onProgress the method of the poller. - * - * It loops over all of the callbacks received from onProgress, and executes them, sending them - * the current operation state. + * Delay a calculated time between retries. * - * @param state - The current operation state. + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - */ - fireProgress(state) { - for (const callback of this.pollProgressCallbacks) { - callback(state); + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case StorageRetryPolicyType_js_1.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case StorageRetryPolicyType_js_1.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } + } else { + delayTimeInMs = Math.random() * 1e3; } + log_js_1.logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return (0, utils_common_js_1.delay)(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); } + }; + exports2.StorageRetryPolicy = StorageRetryPolicy; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/StorageRetryPolicyFactory.js +var require_StorageRetryPolicyFactory = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/StorageRetryPolicyFactory.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageRetryPolicyFactory = exports2.StorageRetryPolicy = exports2.StorageRetryPolicyType = void 0; + var StorageRetryPolicy_js_1 = require_StorageRetryPolicy(); + Object.defineProperty(exports2, "StorageRetryPolicy", { enumerable: true, get: function() { + return StorageRetryPolicy_js_1.StorageRetryPolicy; + } }); + var StorageRetryPolicyType_js_1 = require_StorageRetryPolicyType(); + Object.defineProperty(exports2, "StorageRetryPolicyType", { enumerable: true, get: function() { + return StorageRetryPolicyType_js_1.StorageRetryPolicyType; + } }); + var StorageRetryPolicyFactory = class { + retryOptions; /** - * Invokes the underlying operation's cancel method. + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - */ - async cancelOnce(options = {}) { - this.operation = await this.operation.cancel(options); + constructor(retryOptions) { + this.retryOptions = retryOptions; } /** - * Returns a promise that will resolve once a single polling request finishes. - * It does this by calling the update method of the Poller's operation. - * - * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * Creates a StorageRetryPolicy object. * - * @param options - Optional properties passed to the operation's update method. + * @param nextPolicy - + * @param options - */ - poll(options = {}) { - if (!this.pollOncePromise) { - this.pollOncePromise = this.pollOnce(options); - const clearPollOncePromise = () => { - this.pollOncePromise = void 0; - }; - this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); - } - return this.pollOncePromise; - } - processUpdatedState() { - if (this.operation.state.error) { - this.stopped = true; - if (!this.resolveOnUnsuccessful) { - this.reject(this.operation.state.error); - throw this.operation.state.error; - } - } - if (this.operation.state.isCancelled) { - this.stopped = true; - if (!this.resolveOnUnsuccessful) { - const error2 = new PollerCancelledError("Operation was canceled"); - this.reject(error2); - throw error2; - } - } - if (this.isDone() && this.resolve) { - this.resolve(this.getResult()); - } - } - /** - * Returns a promise that will resolve once the underlying operation is completed. - */ - async pollUntilDone(pollOptions = {}) { - if (this.stopped) { - this.startPolling(pollOptions).catch(this.reject); - } - this.processUpdatedState(); - return this.promise; - } - /** - * Invokes the provided callback after each polling is completed, - * sending the current state of the poller's operation. - * - * It returns a method that can be used to stop receiving updates on the given callback function. - */ - onProgress(callback) { - this.pollProgressCallbacks.push(callback); - return () => { - this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); - }; - } - /** - * Returns true if the poller has finished polling. - */ - isDone() { - const state = this.operation.state; - return Boolean(state.isCompleted || state.isCancelled || state.error); - } - /** - * Stops the poller from continuing to poll. - */ - stopPolling() { - if (!this.stopped) { - this.stopped = true; - if (this.reject) { - this.reject(new PollerStoppedError("This poller is already stopped")); - } - } - } - /** - * Returns true if the poller is stopped. - */ - isStopped() { - return this.stopped; - } - /** - * Attempts to cancel the underlying operation. - * - * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. - * - * If it's called again before it finishes, it will throw an error. - * - * @param options - Optional properties passed to the operation's update method. - */ - cancelOperation(options = {}) { - if (!this.cancelPromise) { - this.cancelPromise = this.cancelOnce(options); - } else if (options.abortSignal) { - throw new Error("A cancel request is currently pending"); - } - return this.cancelPromise; - } - /** - * Returns the state of the operation. - * - * Even though TState will be the same type inside any of the methods of any extension of the Poller class, - * implementations of the pollers can customize what's shared with the public by writing their own - * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller - * and a public type representing a safe to share subset of the properties of the internal state. - * Their definition of getOperationState can then return their public type. - * - * Example: - * - * ```ts - * // Let's say we have our poller's operation state defined as: - * interface MyOperationState extends PollOperationState { - * privateProperty?: string; - * publicProperty?: string; - * } - * - * // To allow us to have a true separation of public and private state, we have to define another interface: - * interface PublicState extends PollOperationState { - * publicProperty?: string; - * } - * - * // Then, we define our Poller as follows: - * export class MyPoller extends Poller { - * // ... More content is needed here ... - * - * public getOperationState(): PublicState { - * const state: PublicState = this.operation.state; - * return { - * // Properties from PollOperationState - * isStarted: state.isStarted, - * isCompleted: state.isCompleted, - * isCancelled: state.isCancelled, - * error: state.error, - * result: state.result, - * - * // The only other property needed by PublicState. - * publicProperty: state.publicProperty - * } - * } - * } - * ``` - * - * You can see this in the tests of this repository, go to the file: - * `../test/utils/testPoller.ts` - * and look for the getOperationState implementation. - */ - getOperationState() { - return this.operation.state; - } - /** - * Returns the result value of the operation, - * regardless of the state of the poller. - * It can return undefined or an incomplete form of the final TResult value - * depending on the implementation. - */ - getResult() { - const state = this.operation.state; - return state.result; - } - /** - * Returns a serialized version of the poller's operation - * by invoking the operation's toString method. - */ - toString() { - return this.operation.toString(); + create(nextPolicy, options) { + return new StorageRetryPolicy_js_1.StorageRetryPolicy(nextPolicy, options, this.retryOptions); } }; - exports2.Poller = Poller; + exports2.StorageRetryPolicyFactory = StorageRetryPolicyFactory; } }); -// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js -var require_lroEngine = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js"(exports2) { +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/CredentialPolicy.js +var require_CredentialPolicy = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/CredentialPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.LroEngine = void 0; - var operation_js_1 = require_operation3(); - var constants_js_1 = require_constants9(); - var poller_js_1 = require_poller3(); - var operation_js_2 = require_operation(); - var LroEngine = class extends poller_js_1.Poller { - constructor(lro, options) { - const { intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState } = options || {}; - const state = resumeFrom ? (0, operation_js_2.deserializeState)(resumeFrom) : {}; - const operation = new operation_js_1.GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); - super(operation); - this.resolveOnUnsuccessful = resolveOnUnsuccessful; - this.config = { intervalInMs }; - operation.setPollerConfig(this.config); + exports2.CredentialPolicy = void 0; + var RequestPolicy_js_1 = require_RequestPolicy(); + var CredentialPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request) { + return this._nextPolicy.sendRequest(this.signRequest(request)); } /** - * The method used by the poller to wait before attempting to update its operation. + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - */ - delay() { - return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); + signRequest(request) { + return request; } }; - exports2.LroEngine = LroEngine; - } -}); - -// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js -var require_lroEngine2 = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.LroEngine = void 0; - var lroEngine_js_1 = require_lroEngine(); - Object.defineProperty(exports2, "LroEngine", { enumerable: true, get: function() { - return lroEngine_js_1.LroEngine; - } }); - } -}); - -// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js -var require_pollOperation = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - } -}); - -// ../../node_modules/@azure/core-lro/dist/commonjs/index.js -var require_commonjs11 = __commonJS({ - "../../node_modules/@azure/core-lro/dist/commonjs/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createHttpPoller = void 0; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var poller_js_1 = require_poller2(); - Object.defineProperty(exports2, "createHttpPoller", { enumerable: true, get: function() { - return poller_js_1.createHttpPoller; - } }); - tslib_1.__exportStar(require_lroEngine2(), exports2); - tslib_1.__exportStar(require_poller3(), exports2); - tslib_1.__exportStar(require_pollOperation(), exports2); + exports2.CredentialPolicy = CredentialPolicy; } }); -// ../../node_modules/@azure/storage-blob/dist/index.js -var require_dist4 = __commonJS({ - "../../node_modules/@azure/storage-blob/dist/index.js"(exports2) { +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/SharedKeyComparator.js +var require_SharedKeyComparator = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/SharedKeyComparator.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - var coreRestPipeline = require_commonjs6(); - var tslib = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var coreAuth = require_commonjs7(); - var coreUtil = require_commonjs4(); - var coreHttpCompat = require_commonjs9(); - var coreClient = require_commonjs8(); - var coreXml = require_commonjs10(); - var logger$1 = require_commonjs2(); - var abortController = require_commonjs3(); - var crypto2 = require("crypto"); - var coreTracing = require_commonjs5(); - var stream = require("stream"); - var coreLro = require_commonjs11(); - var events = require("events"); - var fs = require("fs"); - var util = require("util"); - var buffer = require("buffer"); - function _interopNamespaceDefault(e) { - var n = /* @__PURE__ */ Object.create(null); - if (e) { - Object.keys(e).forEach(function(k) { - if (k !== "default") { - var d = Object.getOwnPropertyDescriptor(e, k); - Object.defineProperty(n, k, d.get ? d : { - enumerable: true, - get: function() { - return e[k]; - } - }); - } - }); - } - n.default = e; - return Object.freeze(n); - } - var coreHttpCompat__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreHttpCompat); - var coreClient__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreClient); - var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs); - var util__namespace = /* @__PURE__ */ _interopNamespaceDefault(util); - var logger = logger$1.createClientLogger("storage-blob"); - var BaseRequestPolicy = class { - /** - * The main method to implement that manipulates a request/response. - */ - constructor(_nextPolicy, _options) { - this._nextPolicy = _nextPolicy; - this._options = _options; - } - /** - * Get whether or not a log with the provided log level should be logged. - * @param logLevel - The log level of the log that will be logged. - * @returns Whether or not a log with the provided log level should be logged. - */ - shouldLog(logLevel) { - return this._options.shouldLog(logLevel); - } - /** - * Attempt to log the provided message to the provided logger. If no logger was provided or if - * the log level does not meat the logger's threshold, then nothing will be logged. - * @param logLevel - The log level of this log. - * @param message - The message of this log. - */ - log(logLevel, message) { - this._options.log(logLevel, message); - } - }; - var SDK_VERSION = "12.27.0"; - var SERVICE_VERSION = "2025-05-05"; - var BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; - var BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4e3 * 1024 * 1024; - var BLOCK_BLOB_MAX_BLOCKS = 5e4; - var DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; - var DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; - var DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; - var REQUEST_TIMEOUT = 100 * 1e3; - var StorageOAuthScopes = "https://storage.azure.com/.default"; - var URLConstants = { - Parameters: { - FORCE_BROWSER_NO_CACHE: "_", - SNAPSHOT: "snapshot", - VERSIONID: "versionid", - TIMEOUT: "timeout" - } - }; - var HTTPURLConnection = { - HTTP_ACCEPTED: 202 - }; - var HeaderConstants = { - AUTHORIZATION: "Authorization", - CONTENT_ENCODING: "Content-Encoding", - CONTENT_ID: "Content-ID", - CONTENT_LANGUAGE: "Content-Language", - CONTENT_LENGTH: "Content-Length", - CONTENT_MD5: "Content-Md5", - CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", - CONTENT_TYPE: "Content-Type", - COOKIE: "Cookie", - DATE: "date", - IF_MATCH: "if-match", - IF_MODIFIED_SINCE: "if-modified-since", - IF_NONE_MATCH: "if-none-match", - IF_UNMODIFIED_SINCE: "if-unmodified-since", - PREFIX_FOR_STORAGE: "x-ms-", - RANGE: "Range", - X_MS_DATE: "x-ms-date", - X_MS_ERROR_CODE: "x-ms-error-code", - X_MS_VERSION: "x-ms-version" - }; - var ETagNone = ""; - var ETagAny = "*"; - var SIZE_1_MB = 1 * 1024 * 1024; - var BATCH_MAX_REQUEST = 256; - var BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; - var HTTP_LINE_ENDING = "\r\n"; - var HTTP_VERSION_1_1 = "HTTP/1.1"; - var EncryptionAlgorithmAES25 = "AES256"; - var DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; - var StorageBlobLoggingAllowedHeaderNames = [ - "Access-Control-Allow-Origin", - "Cache-Control", - "Content-Length", - "Content-Type", - "Date", - "Request-Id", - "traceparent", - "Transfer-Encoding", - "User-Agent", - "x-ms-client-request-id", - "x-ms-date", - "x-ms-error-code", - "x-ms-request-id", - "x-ms-return-client-request-id", - "x-ms-version", - "Accept-Ranges", - "Content-Disposition", - "Content-Encoding", - "Content-Language", - "Content-MD5", - "Content-Range", - "ETag", - "Last-Modified", - "Server", - "Vary", - "x-ms-content-crc64", - "x-ms-copy-action", - "x-ms-copy-completion-time", - "x-ms-copy-id", - "x-ms-copy-progress", - "x-ms-copy-status", - "x-ms-has-immutability-policy", - "x-ms-has-legal-hold", - "x-ms-lease-state", - "x-ms-lease-status", - "x-ms-range", - "x-ms-request-server-encrypted", - "x-ms-server-encrypted", - "x-ms-snapshot", - "x-ms-source-range", - "If-Match", - "If-Modified-Since", - "If-None-Match", - "If-Unmodified-Since", - "x-ms-access-tier", - "x-ms-access-tier-change-time", - "x-ms-access-tier-inferred", - "x-ms-account-kind", - "x-ms-archive-status", - "x-ms-blob-append-offset", - "x-ms-blob-cache-control", - "x-ms-blob-committed-block-count", - "x-ms-blob-condition-appendpos", - "x-ms-blob-condition-maxsize", - "x-ms-blob-content-disposition", - "x-ms-blob-content-encoding", - "x-ms-blob-content-language", - "x-ms-blob-content-length", - "x-ms-blob-content-md5", - "x-ms-blob-content-type", - "x-ms-blob-public-access", - "x-ms-blob-sequence-number", - "x-ms-blob-type", - "x-ms-copy-destination-snapshot", - "x-ms-creation-time", - "x-ms-default-encryption-scope", - "x-ms-delete-snapshots", - "x-ms-delete-type-permanent", - "x-ms-deny-encryption-scope-override", - "x-ms-encryption-algorithm", - "x-ms-if-sequence-number-eq", - "x-ms-if-sequence-number-le", - "x-ms-if-sequence-number-lt", - "x-ms-incremental-copy", - "x-ms-lease-action", - "x-ms-lease-break-period", - "x-ms-lease-duration", - "x-ms-lease-id", - "x-ms-lease-time", - "x-ms-page-write", - "x-ms-proposed-lease-id", - "x-ms-range-get-content-md5", - "x-ms-rehydrate-priority", - "x-ms-sequence-number-action", - "x-ms-sku-name", - "x-ms-source-content-md5", - "x-ms-source-if-match", - "x-ms-source-if-modified-since", - "x-ms-source-if-none-match", - "x-ms-source-if-unmodified-since", - "x-ms-tag-count", - "x-ms-encryption-key-sha256", - "x-ms-copy-source-error-code", - "x-ms-copy-source-status-code", - "x-ms-if-tags", - "x-ms-source-if-tags" - ]; - var StorageBlobLoggingAllowedQueryParameters = [ - "comp", - "maxresults", - "rscc", - "rscd", - "rsce", - "rscl", - "rsct", - "se", - "si", - "sip", - "sp", - "spr", - "sr", - "srt", - "ss", - "st", - "sv", - "include", - "marker", - "prefix", - "copyid", - "restype", - "blockid", - "blocklisttype", - "delimiter", - "prevsnapshot", - "ske", - "skoid", - "sks", - "skt", - "sktid", - "skv", - "snapshot" - ]; - var BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; - var BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; - var PathStylePorts = [ - "10000", - "10001", - "10002", - "10003", - "10004", - "10100", - "10101", - "10102", - "10103", - "10104", - "11000", - "11001", - "11002", - "11003", - "11004", - "11100", - "11101", - "11102", - "11103", - "11104" - ]; - function escapeURLPath(url2) { - const urlParsed = new URL(url2); - let path3 = urlParsed.pathname; - path3 = path3 || "/"; - path3 = escape(path3); - urlParsed.pathname = path3; - return urlParsed.toString(); - } - function getProxyUriFromDevConnString(connectionString) { - let proxyUri = ""; - if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { - const matchCredentials = connectionString.split(";"); - for (const element of matchCredentials) { - if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { - proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; - } - } - } - return proxyUri; - } - function getValueInConnString(connectionString, argument) { - const elements = connectionString.split(";"); - for (const element of elements) { - if (element.trim().startsWith(argument)) { - return element.trim().match(argument + "=(.*)")[1]; - } - } - return ""; - } - function extractConnectionStringParts(connectionString) { - let proxyUri = ""; - if (connectionString.startsWith("UseDevelopmentStorage=true")) { - proxyUri = getProxyUriFromDevConnString(connectionString); - connectionString = DevelopmentConnectionString; - } - let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); - blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; - if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && connectionString.search("AccountKey=") !== -1) { - let defaultEndpointsProtocol = ""; - let accountName = ""; - let accountKey = Buffer.from("accountKey", "base64"); - let endpointSuffix = ""; - accountName = getValueInConnString(connectionString, "AccountName"); - accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); - if (!blobEndpoint) { - defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); - const protocol = defaultEndpointsProtocol.toLowerCase(); - if (protocol !== "https" && protocol !== "http") { - throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); - } - endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); - if (!endpointSuffix) { - throw new Error("Invalid EndpointSuffix in the provided Connection String"); - } - blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; - } - if (!accountName) { - throw new Error("Invalid AccountName in the provided Connection String"); - } else if (accountKey.length === 0) { - throw new Error("Invalid AccountKey in the provided Connection String"); - } - return { - kind: "AccountConnString", - url: blobEndpoint, + exports2.compareHeader = compareHeader; + var table_lv0 = new Uint32Array([ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 1820, + 0, + 1823, + 1825, + 1827, + 1829, + 0, + 0, + 0, + 1837, + 2051, + 0, + 0, + 1843, + 0, + 3331, + 3354, + 3356, + 3358, + 3360, + 3362, + 3364, + 3366, + 3368, + 3370, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 3586, + 3593, + 3594, + 3610, + 3617, + 3619, + 3621, + 3628, + 3634, + 3637, + 3638, + 3656, + 3665, + 3696, + 3708, + 3710, + 3721, + 3722, + 3729, + 3737, + 3743, + 3746, + 3748, + 3750, + 3751, + 3753, + 0, + 0, + 0, + 1859, + 1860, + 1864, + 3586, + 3593, + 3594, + 3610, + 3617, + 3619, + 3621, + 3628, + 3634, + 3637, + 3638, + 3656, + 3665, + 3696, + 3708, + 3710, + 3721, + 3722, + 3729, + 3737, + 3743, + 3746, + 3748, + 3750, + 3751, + 3753, + 0, + 1868, + 0, + 1872, + 0 + ]); + var table_lv2 = new Uint32Array([ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 18, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ]); + var table_lv4 = new Uint32Array([ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 32786, + 0, + 0, + 0, + 0, + 0, + 33298, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ]); + function compareHeader(lhs, rhs) { + if (isLessThan(lhs, rhs)) + return -1; + return 1; + } + function isLessThan(lhs, rhs) { + const tables = [table_lv0, table_lv2, table_lv4]; + let curr_level = 0; + let i = 0; + let j = 0; + while (curr_level < tables.length) { + if (curr_level === tables.length - 1 && i !== j) { + return i > j; + } + const weight1 = i < lhs.length ? tables[curr_level][lhs[i].charCodeAt(0)] : 1; + const weight2 = j < rhs.length ? tables[curr_level][rhs[j].charCodeAt(0)] : 1; + if (weight1 === 1 && weight2 === 1) { + i = 0; + j = 0; + ++curr_level; + } else if (weight1 === weight2) { + ++i; + ++j; + } else if (weight1 === 0) { + ++i; + } else if (weight2 === 0) { + ++j; + } else { + return weight1 < weight2; + } + } + return false; + } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageSharedKeyCredentialPolicy.js +var require_StorageSharedKeyCredentialPolicy = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageSharedKeyCredentialPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageSharedKeyCredentialPolicy = void 0; + var constants_js_1 = require_constants9(); + var utils_common_js_1 = require_utils_common(); + var CredentialPolicy_js_1 = require_CredentialPolicy(); + var SharedKeyComparator_js_1 = require_SharedKeyComparator(); + var StorageSharedKeyCredentialPolicy = class extends CredentialPolicy_js_1.CredentialPolicy { + /** + * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy + */ + factory; + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy, options, factory) { + super(nextPolicy, options); + this.factory = factory; + } + /** + * Signs request. + * + * @param request - + */ + signRequest(request) { + request.headers.set(constants_js_1.HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); + if (request.body && (typeof request.body === "string" || request.body !== void 0) && request.body.length > 0) { + request.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + const stringToSign = [ + request.method.toUpperCase(), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.DATE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.RANGE) + ].join("\n") + "\n" + this.getCanonicalizedHeadersString(request) + this.getCanonicalizedResourceString(request); + const signature = this.factory.computeHMACSHA256(stringToSign); + request.headers.set(constants_js_1.HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + return request; + } + /** + * Retrieve header value according to shared key sign rules. + * @see https://learn.microsoft.com/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + if (headerName === constants_js_1.HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; + } + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + getCanonicalizedHeadersString(request) { + let headersArray = request.headers.headersArray().filter((value) => { + return value.name.toLowerCase().startsWith(constants_js_1.HeaderConstants.PREFIX_FOR_STORAGE); + }); + headersArray.sort((a, b) => { + return (0, SharedKeyComparator_js_1.compareHeader)(a.name.toLowerCase(), b.name.toLowerCase()); + }); + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name.toLowerCase().trimRight()}:${header.value.trimLeft()} +`; + }); + return canonicalizedHeadersStringToSign; + } + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + getCanonicalizedResourceString(request) { + const path3 = (0, utils_common_js_1.getURLPath)(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${this.factory.accountName}${path3}`; + const queries = (0, utils_common_js_1.getURLQueries)(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += ` +${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; + } + }; + exports2.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/credentials/Credential.js +var require_Credential = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/credentials/Credential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Credential = void 0; + var Credential = class { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); + } + }; + exports2.Credential = Credential; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/credentials/StorageSharedKeyCredential.js +var require_StorageSharedKeyCredential = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/credentials/StorageSharedKeyCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageSharedKeyCredential = void 0; + var node_crypto_1 = require("node:crypto"); + var StorageSharedKeyCredentialPolicy_js_1 = require_StorageSharedKeyCredentialPolicy(); + var Credential_js_1 = require_Credential(); + var StorageSharedKeyCredential = class extends Credential_js_1.Credential { + /** + * Azure Storage account name; readonly. + */ + accountName; + /** + * Azure Storage account key; readonly. + */ + accountKey; + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName, accountKey) { + super(); + this.accountName = accountName; + this.accountKey = Buffer.from(accountKey, "base64"); + } + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageSharedKeyCredentialPolicy_js_1.StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + return (0, node_crypto_1.createHmac)("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + } + }; + exports2.StorageSharedKeyCredential = StorageSharedKeyCredential; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/AnonymousCredentialPolicy.js +var require_AnonymousCredentialPolicy = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/AnonymousCredentialPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AnonymousCredentialPolicy = void 0; + var CredentialPolicy_js_1 = require_CredentialPolicy(); + var AnonymousCredentialPolicy = class extends CredentialPolicy_js_1.CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + }; + exports2.AnonymousCredentialPolicy = AnonymousCredentialPolicy; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/credentials/AnonymousCredential.js +var require_AnonymousCredential = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/credentials/AnonymousCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AnonymousCredential = void 0; + var AnonymousCredentialPolicy_js_1 = require_AnonymousCredentialPolicy(); + var Credential_js_1 = require_Credential(); + var AnonymousCredential = class extends Credential_js_1.Credential { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new AnonymousCredentialPolicy_js_1.AnonymousCredentialPolicy(nextPolicy, options); + } + }; + exports2.AnonymousCredential = AnonymousCredential; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/BuffersStream.js +var require_BuffersStream = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/BuffersStream.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BuffersStream = void 0; + var node_stream_1 = require("node:stream"); + var BuffersStream = class extends node_stream_1.Readable { + buffers; + byteLength; + /** + * The offset of data to be read in the current buffer. + */ + byteOffsetInCurrentBuffer; + /** + * The index of buffer to be read in the array of buffers. + */ + bufferIndex; + /** + * The total length of data already read. + */ + pushedBytesLength; + /** + * Creates an instance of BuffersStream that will emit the data + * contained in the array of buffers. + * + * @param buffers - Array of buffers containing the data + * @param byteLength - The total length of data contained in the buffers + */ + constructor(buffers, byteLength, options) { + super(options); + this.buffers = buffers; + this.byteLength = byteLength; + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex = 0; + this.pushedBytesLength = 0; + let buffersLength = 0; + for (const buf of this.buffers) { + buffersLength += buf.byteLength; + } + if (buffersLength < this.byteLength) { + throw new Error("Data size shouldn't be larger than the total length of buffers."); + } + } + /** + * Internal _read() that will be called when the stream wants to pull more data in. + * + * @param size - Optional. The size of data to be read + */ + _read(size) { + if (this.pushedBytesLength >= this.byteLength) { + this.push(null); + } + if (!size) { + size = this.readableHighWaterMark; + } + const outBuffers = []; + let i = 0; + while (i < size && this.pushedBytesLength < this.byteLength) { + const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + if (remaining > size - i) { + const end = this.byteOffsetInCurrentBuffer + size - i; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + this.pushedBytesLength += size - i; + this.byteOffsetInCurrentBuffer = end; + i = size; + break; + } else { + const end = this.byteOffsetInCurrentBuffer + remaining; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + if (remaining === remainingCapacityInThisBuffer) { + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex++; + } else { + this.byteOffsetInCurrentBuffer = end; + } + this.pushedBytesLength += remaining; + i += remaining; + } + } + if (outBuffers.length > 1) { + this.push(Buffer.concat(outBuffers)); + } else if (outBuffers.length === 1) { + this.push(outBuffers[0]); + } + } + }; + exports2.BuffersStream = BuffersStream; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/PooledBuffer.js +var require_PooledBuffer = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/PooledBuffer.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.PooledBuffer = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var BuffersStream_js_1 = require_BuffersStream(); + var node_buffer_1 = tslib_1.__importDefault(require("node:buffer")); + var maxBufferLength = node_buffer_1.default.constants.MAX_LENGTH; + var PooledBuffer = class { + /** + * Internal buffers used to keep the data. + * Each buffer has a length of the maxBufferLength except last one. + */ + buffers = []; + /** + * The total size of internal buffers. + */ + capacity; + /** + * The total size of data contained in internal buffers. + */ + _size; + /** + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; + } + constructor(capacity, buffers, totalLength) { + this.capacity = capacity; + this._size = 0; + const bufferNum = Math.ceil(capacity / maxBufferLength); + for (let i = 0; i < bufferNum; i++) { + let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + if (len === 0) { + len = maxBufferLength; + } + this.buffers.push(Buffer.allocUnsafe(len)); + } + if (buffers) { + this.fill(buffers, totalLength); + } + } + /** + * Fill the internal buffers with data in the input buffers serially + * with respect to the total length and the total capacity of the internal buffers. + * Data copied will be shift out of the input buffers. + * + * @param buffers - Input buffers containing the data to be filled in the pooled buffer + * @param totalLength - Total length of the data to be filled in. + * + */ + fill(buffers, totalLength) { + this._size = Math.min(this.capacity, totalLength); + let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + while (totalCopiedNum < this._size) { + const source = buffers[i]; + const target = this.buffers[j]; + const copiedNum = source.copy(target, targetOffset, sourceOffset); + totalCopiedNum += copiedNum; + sourceOffset += copiedNum; + targetOffset += copiedNum; + if (sourceOffset === source.length) { + i++; + sourceOffset = 0; + } + if (targetOffset === target.length) { + j++; + targetOffset = 0; + } + } + buffers.splice(0, i); + if (buffers.length > 0) { + buffers[0] = buffers[0].slice(sourceOffset); + } + } + /** + * Get the readable stream assembled from all the data in the internal buffers. + * + */ + getReadableStream() { + return new BuffersStream_js_1.BuffersStream(this.buffers, this.size); + } + }; + exports2.PooledBuffer = PooledBuffer; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/BufferScheduler.js +var require_BufferScheduler = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/BufferScheduler.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BufferScheduler = void 0; + var events_1 = require("events"); + var PooledBuffer_js_1 = require_PooledBuffer(); + var BufferScheduler = class { + /** + * Size of buffers in incoming and outgoing queues. This class will try to align + * data read from Readable stream into buffer chunks with bufferSize defined. + */ + bufferSize; + /** + * How many buffers can be created or maintained. + */ + maxBuffers; + /** + * A Node.js Readable stream. + */ + readable; + /** + * OutgoingHandler is an async function triggered by BufferScheduler when there + * are available buffers in outgoing array. + */ + outgoingHandler; + /** + * An internal event emitter. + */ + emitter = new events_1.EventEmitter(); + /** + * Concurrency of executing outgoingHandlers. (0 lesser than concurrency lesser than or equal to maxBuffers) + */ + concurrency; + /** + * An internal offset marker to track data offset in bytes of next outgoingHandler. + */ + offset = 0; + /** + * An internal marker to track whether stream is end. + */ + isStreamEnd = false; + /** + * An internal marker to track whether stream or outgoingHandler returns error. + */ + isError = false; + /** + * How many handlers are executing. + */ + executingOutgoingHandlers = 0; + /** + * Encoding of the input Readable stream which has string data type instead of Buffer. + */ + encoding; + /** + * How many buffers have been allocated. + */ + numBuffers = 0; + /** + * Because this class doesn't know how much data every time stream pops, which + * is defined by highWaterMarker of the stream. So BufferScheduler will cache + * data received from the stream, when data in unresolvedDataArray exceeds the + * blockSize defined, it will try to concat a blockSize of buffer, fill into available + * buffers from incoming and push to outgoing array. + */ + unresolvedDataArray = []; + /** + * How much data consisted in unresolvedDataArray. + */ + unresolvedLength = 0; + /** + * The array includes all the available buffers can be used to fill data from stream. + */ + incoming = []; + /** + * The array (queue) includes all the buffers filled from stream data. + */ + outgoing = []; + /** + * Creates an instance of BufferScheduler. + * + * @param readable - A Node.js Readable stream + * @param bufferSize - Buffer size of every maintained buffer + * @param maxBuffers - How many buffers can be allocated + * @param outgoingHandler - An async function scheduled to be + * triggered when a buffer fully filled + * with stream data + * @param concurrency - Concurrency of executing outgoingHandlers (>0) + * @param encoding - [Optional] Encoding of Readable stream when it's a string stream + */ + constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + if (bufferSize <= 0) { + throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); + } + if (maxBuffers <= 0) { + throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); + } + if (concurrency <= 0) { + throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); + } + this.bufferSize = bufferSize; + this.maxBuffers = maxBuffers; + this.readable = readable; + this.outgoingHandler = outgoingHandler; + this.concurrency = concurrency; + this.encoding = encoding; + } + /** + * Start the scheduler, will return error when stream of any of the outgoingHandlers + * returns error. + * + */ + async do() { + return new Promise((resolve, reject) => { + this.readable.on("data", (data) => { + data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; + this.appendUnresolvedData(data); + if (!this.resolveData()) { + this.readable.pause(); + } + }); + this.readable.on("error", (err) => { + this.emitter.emit("error", err); + }); + this.readable.on("end", () => { + this.isStreamEnd = true; + this.emitter.emit("checkEnd"); + }); + this.emitter.on("error", (err) => { + this.isError = true; + this.readable.pause(); + reject(err); + }); + this.emitter.on("checkEnd", () => { + if (this.outgoing.length > 0) { + this.triggerOutgoingHandlers(); + return; + } + if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { + if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { + const buffer = this.shiftBufferFromUnresolvedDataArray(); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset).then(resolve).catch(reject); + } else if (this.unresolvedLength >= this.bufferSize) { + return; + } else { + resolve(); + } + } + }); + }); + } + /** + * Insert a new data into unresolved array. + * + * @param data - + */ + appendUnresolvedData(data) { + this.unresolvedDataArray.push(data); + this.unresolvedLength += data.length; + } + /** + * Try to shift a buffer with size in blockSize. The buffer returned may be less + * than blockSize when data in unresolvedDataArray is less than bufferSize. + * + */ + shiftBufferFromUnresolvedDataArray(buffer) { + if (!buffer) { + buffer = new PooledBuffer_js_1.PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); + } else { + buffer.fill(this.unresolvedDataArray, this.unresolvedLength); + } + this.unresolvedLength -= buffer.size; + return buffer; + } + /** + * Resolve data in unresolvedDataArray. For every buffer with size in blockSize + * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, + * then push it into outgoing to be handled by outgoing handler. + * + * Return false when available buffers in incoming are not enough, else true. + * + * @returns Return false when buffers in incoming are not enough, else true. + */ + resolveData() { + while (this.unresolvedLength >= this.bufferSize) { + let buffer; + if (this.incoming.length > 0) { + buffer = this.incoming.shift(); + this.shiftBufferFromUnresolvedDataArray(buffer); + } else { + if (this.numBuffers < this.maxBuffers) { + buffer = this.shiftBufferFromUnresolvedDataArray(); + this.numBuffers++; + } else { + return false; + } + } + this.outgoing.push(buffer); + this.triggerOutgoingHandlers(); + } + return true; + } + /** + * Try to trigger a outgoing handler for every buffer in outgoing. Stop when + * concurrency reaches. + */ + async triggerOutgoingHandlers() { + let buffer; + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); + } + } while (buffer); + } + /** + * Trigger a outgoing handler for a buffer shifted from outgoing. + * + * @param buffer - + */ + async triggerOutgoingHandler(buffer) { + const bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + try { + await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); + } catch (err) { + this.emitter.emit("error", err); + return; + } + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); + } + /** + * Return buffer used by outgoing handler into incoming. + * + * @param buffer - + */ + reuseBuffer(buffer) { + this.incoming.push(buffer); + if (!this.isError && this.resolveData() && !this.isStreamEnd) { + this.readable.resume(); + } + } + }; + exports2.BufferScheduler = BufferScheduler; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/cache.js +var require_cache2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/cache.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getCachedDefaultHttpClient = getCachedDefaultHttpClient; + var core_rest_pipeline_1 = require_commonjs6(); + var _defaultHttpClient; + function getCachedDefaultHttpClient() { + if (!_defaultHttpClient) { + _defaultHttpClient = (0, core_rest_pipeline_1.createDefaultHttpClient)(); + } + return _defaultHttpClient; + } + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/RequestPolicy.js +var require_RequestPolicy2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/RequestPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BaseRequestPolicy = void 0; + var BaseRequestPolicy = class { + _nextPolicy; + _options; + /** + * The main method to implement that manipulates a request/response. + */ + constructor(_nextPolicy, _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + this._options.log(logLevel, message); + } + }; + exports2.BaseRequestPolicy = BaseRequestPolicy; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/utils/constants.js +var require_constants10 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/utils/constants.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.PathStylePorts = exports2.DevelopmentConnectionString = exports2.HeaderConstants = exports2.URLConstants = exports2.SDK_VERSION = void 0; + exports2.SDK_VERSION = "1.0.0"; + exports2.URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout" + } + }; + exports2.HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", + X_MS_CopySourceErrorCode: "x-ms-copy-source-error-code" + }; + exports2.DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; + exports2.PathStylePorts = [ + "10000", + "10001", + "10002", + "10003", + "10004", + "10100", + "10101", + "10102", + "10103", + "10104", + "11000", + "11001", + "11002", + "11003", + "11004", + "11100", + "11101", + "11102", + "11103", + "11104" + ]; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/utils/utils.common.js +var require_utils_common2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/utils/utils.common.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.escapeURLPath = escapeURLPath; + exports2.getValueInConnString = getValueInConnString; + exports2.extractConnectionStringParts = extractConnectionStringParts; + exports2.appendToURLPath = appendToURLPath; + exports2.setURLParameter = setURLParameter; + exports2.getURLParameter = getURLParameter; + exports2.setURLHost = setURLHost; + exports2.getURLPath = getURLPath; + exports2.getURLScheme = getURLScheme; + exports2.getURLPathAndQuery = getURLPathAndQuery; + exports2.getURLQueries = getURLQueries; + exports2.appendToURLQuery = appendToURLQuery; + exports2.truncatedISO8061Date = truncatedISO8061Date; + exports2.base64encode = base64encode; + exports2.base64decode = base64decode; + exports2.generateBlockID = generateBlockID; + exports2.delay = delay; + exports2.padStart = padStart; + exports2.sanitizeURL = sanitizeURL; + exports2.sanitizeHeaders = sanitizeHeaders; + exports2.iEqual = iEqual; + exports2.getAccountNameFromUrl = getAccountNameFromUrl; + exports2.isIpEndpointStyle = isIpEndpointStyle; + exports2.attachCredential = attachCredential; + exports2.httpAuthorizationToString = httpAuthorizationToString; + exports2.EscapePath = EscapePath; + exports2.assertResponse = assertResponse; + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants10(); + function escapeURLPath(url) { + const urlParsed = new URL(url); + let path3 = urlParsed.pathname; + path3 = path3 || "/"; + path3 = escape(path3); + urlParsed.pathname = path3; + return urlParsed.toString(); + } + function getProxyUriFromDevConnString(connectionString) { + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; + } + } + } + return proxyUri; + } + function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; + } + } + return ""; + } + function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = constants_js_1.DevelopmentConnectionString; + } + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && connectionString.search("AccountKey=") !== -1) { + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); + } + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); + } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + } + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); + } + return { + kind: "AccountConnString", + url: blobEndpoint, accountName, accountKey, proxyUri @@ -34804,15 +35733,15 @@ var require_dist4 = __commonJS({ function escape(text) { return encodeURIComponent(text).replace(/%2F/g, "/").replace(/'/g, "%27").replace(/\+/g, "%20").replace(/%25/g, "%"); } - function appendToURLPath(url2, name) { - const urlParsed = new URL(url2); + function appendToURLPath(url, name) { + const urlParsed = new URL(url); let path3 = urlParsed.pathname; path3 = path3 ? path3.endsWith("/") ? `${path3}${name}` : `${path3}/${name}` : name; urlParsed.pathname = path3; return urlParsed.toString(); } - function setURLParameter(url2, name, value) { - const urlParsed = new URL(url2); + function setURLParameter(url, name, value) { + const urlParsed = new URL(url); const encodedName = encodeURIComponent(name); const encodedValue = value ? encodeURIComponent(value) : void 0; const searchString = urlParsed.search === "" ? "?" : urlParsed.search; @@ -34831,34 +35760,33 @@ var require_dist4 = __commonJS({ urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; return urlParsed.toString(); } - function getURLParameter(url2, name) { - var _a; - const urlParsed = new URL(url2); - return (_a = urlParsed.searchParams.get(name)) !== null && _a !== void 0 ? _a : void 0; + function getURLParameter(url, name) { + const urlParsed = new URL(url); + return urlParsed.searchParams.get(name) ?? void 0; } - function setURLHost(url2, host) { - const urlParsed = new URL(url2); + function setURLHost(url, host) { + const urlParsed = new URL(url); urlParsed.hostname = host; return urlParsed.toString(); } - function getURLPath(url2) { + function getURLPath(url) { try { - const urlParsed = new URL(url2); + const urlParsed = new URL(url); return urlParsed.pathname; } catch (e) { return void 0; } } - function getURLScheme(url2) { + function getURLScheme(url) { try { - const urlParsed = new URL(url2); + const urlParsed = new URL(url); return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol; } catch (e) { return void 0; } } - function getURLPathAndQuery(url2) { - const urlParsed = new URL(url2); + function getURLPathAndQuery(url) { + const urlParsed = new URL(url); const pathString = urlParsed.pathname; if (!pathString) { throw new RangeError("Invalid url without valid path."); @@ -34870,8 +35798,8 @@ var require_dist4 = __commonJS({ } return `${pathString}${queryString}`; } - function getURLQueries(url2) { - let queryString = new URL(url2).search; + function getURLQueries(url) { + let queryString = new URL(url).search; if (!queryString) { return {}; } @@ -34892,8 +35820,8 @@ var require_dist4 = __commonJS({ } return queries; } - function appendToURLQuery(url2, queryParts) { - const urlParsed = new URL(url2); + function appendToURLQuery(url, queryParts) { + const urlParsed = new URL(url); let query = urlParsed.search; if (query) { query += "&" + queryParts; @@ -34908,7 +35836,10 @@ var require_dist4 = __commonJS({ return withMilliseconds ? dateString.substring(0, dateString.length - 1) + "0000Z" : dateString.substring(0, dateString.length - 5) + "Z"; } function base64encode(content) { - return !coreUtil.isNode ? btoa(content) : Buffer.from(content).toString("base64"); + return !core_util_1.isNodeLike ? btoa(content) : Buffer.from(content).toString("base64"); + } + function base64decode(encodedString) { + return !core_util_1.isNodeLike ? atob(encodedString) : Buffer.from(encodedString, "base64").toString(); } function generateBlockID(blockIDPrefix, blockIndex) { const maxSourceStringLength = 48; @@ -34956,11 +35887,31 @@ var require_dist4 = __commonJS({ return padString.slice(0, targetLength) + currentString; } } + function sanitizeURL(url) { + let safeURL = url; + if (getURLParameter(safeURL, constants_js_1.URLConstants.Parameters.SIGNATURE)) { + safeURL = setURLParameter(safeURL, constants_js_1.URLConstants.Parameters.SIGNATURE, "*****"); + } + return safeURL; + } + function sanitizeHeaders(originalHeader) { + const headers = (0, core_rest_pipeline_1.createHttpHeaders)(); + for (const [name, value] of originalHeader) { + if (name.toLowerCase() === constants_js_1.HeaderConstants.AUTHORIZATION.toLowerCase()) { + headers.set(name, "*****"); + } else if (name.toLowerCase() === constants_js_1.HeaderConstants.X_MS_COPY_SOURCE) { + headers.set(name, sanitizeURL(value)); + } else { + headers.set(name, value); + } + } + return headers; + } function iEqual(str1, str2) { return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); } - function getAccountNameFromUrl(url2) { - const parsedUrl = new URL(url2); + function getAccountNameFromUrl(url) { + const parsedUrl = new URL(url); let accountName; try { if (parsedUrl.hostname.split(".")[1] === "blob") { @@ -34977,197 +35928,15 @@ var require_dist4 = __commonJS({ } function isIpEndpointStyle(parsedUrl) { const host = parsedUrl.host; - return /^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || Boolean(parsedUrl.port) && PathStylePorts.includes(parsedUrl.port); - } - function toBlobTagsString(tags2) { - if (tags2 === void 0) { - return void 0; - } - const tagPairs = []; - for (const key in tags2) { - if (Object.prototype.hasOwnProperty.call(tags2, key)) { - const value = tags2[key]; - tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); - } - } - return tagPairs.join("&"); - } - function toBlobTags(tags2) { - if (tags2 === void 0) { - return void 0; - } - const res = { - blobTagSet: [] - }; - for (const key in tags2) { - if (Object.prototype.hasOwnProperty.call(tags2, key)) { - const value = tags2[key]; - res.blobTagSet.push({ - key, - value - }); - } - } - return res; - } - function toTags(tags2) { - if (tags2 === void 0) { - return void 0; - } - const res = {}; - for (const blobTag of tags2.blobTagSet) { - res[blobTag.key] = blobTag.value; - } - return res; - } - function toQuerySerialization(textConfiguration) { - if (textConfiguration === void 0) { - return void 0; - } - switch (textConfiguration.kind) { - case "csv": - return { - format: { - type: "delimited", - delimitedTextConfiguration: { - columnSeparator: textConfiguration.columnSeparator || ",", - fieldQuote: textConfiguration.fieldQuote || "", - recordSeparator: textConfiguration.recordSeparator, - escapeChar: textConfiguration.escapeCharacter || "", - headersPresent: textConfiguration.hasHeaders || false - } - } - }; - case "json": - return { - format: { - type: "json", - jsonTextConfiguration: { - recordSeparator: textConfiguration.recordSeparator - } - } - }; - case "arrow": - return { - format: { - type: "arrow", - arrowConfiguration: { - schema: textConfiguration.schema - } - } - }; - case "parquet": - return { - format: { - type: "parquet" - } - }; - default: - throw Error("Invalid BlobQueryTextConfiguration."); - } + return /^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || Boolean(parsedUrl.port) && constants_js_1.PathStylePorts.includes(parsedUrl.port); } - function parseObjectReplicationRecord(objectReplicationRecord) { - if (!objectReplicationRecord) { - return void 0; - } - if ("policy-id" in objectReplicationRecord) { - return void 0; - } - const orProperties = []; - for (const key in objectReplicationRecord) { - const ids = key.split("_"); - const policyPrefix = "or-"; - if (ids[0].startsWith(policyPrefix)) { - ids[0] = ids[0].substring(policyPrefix.length); - } - const rule = { - ruleId: ids[1], - replicationStatus: objectReplicationRecord[key] - }; - const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); - if (policyIndex > -1) { - orProperties[policyIndex].rules.push(rule); - } else { - orProperties.push({ - policyId: ids[0], - rules: [rule] - }); - } - } - return orProperties; + function attachCredential(thing, credential) { + thing.credential = credential; + return thing; } function httpAuthorizationToString(httpAuthorization) { return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : void 0; } - function BlobNameToString(name) { - if (name.encoded) { - return decodeURIComponent(name.content); - } else { - return name.content; - } - } - function ConvertInternalResponseOfListBlobFlat(internalResponse) { - return Object.assign(Object.assign({}, internalResponse), { segment: { - blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); - return blobItem; - }) - } }); - } - function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { - var _a; - return Object.assign(Object.assign({}, internalResponse), { segment: { - blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { - const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); - return blobPrefix; - }), - blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); - return blobItem; - }) - } }); - } - function* ExtractPageRangeInfoItems(getPageRangesSegment) { - let pageRange = []; - let clearRange = []; - if (getPageRangesSegment.pageRange) - pageRange = getPageRangesSegment.pageRange; - if (getPageRangesSegment.clearRange) - clearRange = getPageRangesSegment.clearRange; - let pageRangeIndex = 0; - let clearRangeIndex = 0; - while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { - if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { - yield { - start: pageRange[pageRangeIndex].start, - end: pageRange[pageRangeIndex].end, - isClear: false - }; - ++pageRangeIndex; - } else { - yield { - start: clearRange[clearRangeIndex].start, - end: clearRange[clearRangeIndex].end, - isClear: true - }; - ++clearRangeIndex; - } - } - for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { - yield { - start: pageRange[pageRangeIndex].start, - end: pageRange[pageRangeIndex].end, - isClear: false - }; - } - for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { - yield { - start: clearRange[clearRangeIndex].start, - end: clearRange[clearRangeIndex].end, - isClear: true - }; - } - } function EscapePath(blobName) { const split = blobName.split("/"); for (let i = 0; i < split.length; i++) { @@ -35181,196 +35950,179 @@ var require_dist4 = __commonJS({ } throw new TypeError(`Unexpected response object ${response}`); } - exports2.StorageRetryPolicyType = void 0; - (function(StorageRetryPolicyType2) { - StorageRetryPolicyType2[StorageRetryPolicyType2["EXPONENTIAL"] = 0] = "EXPONENTIAL"; - StorageRetryPolicyType2[StorageRetryPolicyType2["FIXED"] = 1] = "FIXED"; - })(exports2.StorageRetryPolicyType || (exports2.StorageRetryPolicyType = {})); - var DEFAULT_RETRY_OPTIONS$1 = { - maxRetryDelayInMs: 120 * 1e3, - maxTries: 4, - retryDelayInMs: 4 * 1e3, - retryPolicyType: exports2.StorageRetryPolicyType.EXPONENTIAL, - secondaryHost: "", - tryTimeoutInMs: void 0 - // Use server side default timeout strategy - }; - var RETRY_ABORT_ERROR$1 = new abortController.AbortError("The operation was aborted."); - var StorageRetryPolicy = class extends BaseRequestPolicy { + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageBrowserPolicy.js +var require_StorageBrowserPolicy = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageBrowserPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageBrowserPolicy = void 0; + var RequestPolicy_js_1 = require_RequestPolicy2(); + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants10(); + var utils_common_js_1 = require_utils_common2(); + var StorageBrowserPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { /** - * Creates an instance of RetryPolicy. - * + * Creates an instance of StorageBrowserPolicy. * @param nextPolicy - * @param options - - * @param retryOptions - */ - constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS$1) { + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { super(nextPolicy, options); - this.retryOptions = { - retryPolicyType: retryOptions.retryPolicyType ? retryOptions.retryPolicyType : DEFAULT_RETRY_OPTIONS$1.retryPolicyType, - maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 ? Math.floor(retryOptions.maxTries) : DEFAULT_RETRY_OPTIONS$1.maxTries, - tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 ? retryOptions.tryTimeoutInMs : DEFAULT_RETRY_OPTIONS$1.tryTimeoutInMs, - retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs) : DEFAULT_RETRY_OPTIONS$1.retryDelayInMs, - maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs, - secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost : DEFAULT_RETRY_OPTIONS$1.secondaryHost - }; } /** - * Sends request. + * Sends out request. * * @param request - */ async sendRequest(request) { - return this.attemptSendRequest(request, false, 1); - } - /** - * Decide and perform next retry. Won't mutate request parameter. - * - * @param request - - * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then - * the resource was not found. This may be due to replication delay. So, in this - * case, we'll never try the secondary again for this operation. - * @param attempt - How many retries has been attempted to performed, starting from 1, which includes - * the attempt will be performed by this method call. - */ - async attemptSendRequest(request, secondaryHas404, attempt) { - const newRequest = request.clone(); - const isPrimaryRetry = secondaryHas404 || !this.retryOptions.secondaryHost || !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || attempt % 2 === 1; - if (!isPrimaryRetry) { - newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); - } - if (this.retryOptions.tryTimeoutInMs) { - newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1e3).toString()); + if (core_util_1.isNodeLike) { + return this._nextPolicy.sendRequest(request); } - let response; - try { - logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); - response = await this._nextPolicy.sendRequest(newRequest); - if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { - return response; - } - secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; - } catch (err) { - logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); - if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { - throw err; - } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = (0, utils_common_js_1.setURLParameter)(request.url, constants_js_1.URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); } - await this.delay(isPrimaryRetry, attempt, request.abortSignal); - return this.attemptSendRequest(request, secondaryHas404, ++attempt); + request.headers.remove(constants_js_1.HeaderConstants.COOKIE); + request.headers.remove(constants_js_1.HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); } + }; + exports2.StorageBrowserPolicy = StorageBrowserPolicy; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/StorageBrowserPolicyFactory.js +var require_StorageBrowserPolicyFactory = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/StorageBrowserPolicyFactory.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageBrowserPolicyFactory = exports2.StorageBrowserPolicy = void 0; + var StorageBrowserPolicy_js_1 = require_StorageBrowserPolicy(); + Object.defineProperty(exports2, "StorageBrowserPolicy", { enumerable: true, get: function() { + return StorageBrowserPolicy_js_1.StorageBrowserPolicy; + } }); + var StorageBrowserPolicyFactory = class { /** - * Decide whether to retry according to last HTTP response and retry counters. + * Creates a StorageBrowserPolicyFactory object. * - * @param isPrimaryRetry - - * @param attempt - - * @param response - - * @param err - + * @param nextPolicy - + * @param options - */ - shouldRetry(isPrimaryRetry, attempt, response, err) { - if (attempt >= this.retryOptions.maxTries) { - logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions.maxTries}, no further try.`); - return false; - } - const retriableErrors2 = [ - "ETIMEDOUT", - "ESOCKETTIMEDOUT", - "ECONNREFUSED", - "ECONNRESET", - "ENOENT", - "ENOTFOUND", - "TIMEOUT", - "EPIPE", - "REQUEST_SEND_ERROR" - // For default xhr based http client provided in ms-rest-js - ]; - if (err) { - for (const retriableError of retriableErrors2) { - if (err.name.toUpperCase().includes(retriableError) || err.message.toUpperCase().includes(retriableError) || err.code && err.code.toString().toUpperCase() === retriableError) { - logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); - return true; - } - } - } - if (response || err) { - const statusCode = response ? response.status : err ? err.statusCode : 0; - if (!isPrimaryRetry && statusCode === 404) { - logger.info(`RetryPolicy: Secondary access with 404, will retry.`); - return true; - } - if (statusCode === 503 || statusCode === 500) { - logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); - return true; - } - } - if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { - logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); - return true; - } - return false; + create(nextPolicy, options) { + return new StorageBrowserPolicy_js_1.StorageBrowserPolicy(nextPolicy, options); } + }; + exports2.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/CredentialPolicy.js +var require_CredentialPolicy2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/CredentialPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.CredentialPolicy = void 0; + var RequestPolicy_js_1 = require_RequestPolicy2(); + var CredentialPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { /** - * Delay a calculated time between retries. + * Sends out request. * - * @param isPrimaryRetry - - * @param attempt - - * @param abortSignal - + * @param request - */ - async delay(isPrimaryRetry, attempt, abortSignal) { - let delayTimeInMs = 0; - if (isPrimaryRetry) { - switch (this.retryOptions.retryPolicyType) { - case exports2.StorageRetryPolicyType.EXPONENTIAL: - delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); - break; - case exports2.StorageRetryPolicyType.FIXED: - delayTimeInMs = this.retryOptions.retryDelayInMs; - break; - } - } else { - delayTimeInMs = Math.random() * 1e3; - } - logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); - return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR$1); + sendRequest(request) { + return this._nextPolicy.sendRequest(this.signRequest(request)); } - }; - var StorageRetryPolicyFactory = class { /** - * Creates an instance of StorageRetryPolicyFactory. - * @param retryOptions - + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - */ - constructor(retryOptions) { - this.retryOptions = retryOptions; + signRequest(request) { + return request; } + }; + exports2.CredentialPolicy = CredentialPolicy; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/AnonymousCredentialPolicy.js +var require_AnonymousCredentialPolicy2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/AnonymousCredentialPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AnonymousCredentialPolicy = void 0; + var CredentialPolicy_js_1 = require_CredentialPolicy2(); + var AnonymousCredentialPolicy = class extends CredentialPolicy_js_1.CredentialPolicy { /** - * Creates a StorageRetryPolicy object. - * + * Creates an instance of AnonymousCredentialPolicy. * @param nextPolicy - * @param options - */ - create(nextPolicy, options) { - return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); } }; - var CredentialPolicy = class extends BaseRequestPolicy { + exports2.AnonymousCredentialPolicy = AnonymousCredentialPolicy; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/credentials/Credential.js +var require_Credential2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/credentials/Credential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Credential = void 0; + var Credential = class { /** - * Sends out request. + * Creates a RequestPolicy object. * - * @param request - + * @param _nextPolicy - + * @param _options - */ - sendRequest(request) { - return this._nextPolicy.sendRequest(this.signRequest(request)); + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); } + }; + exports2.Credential = Credential; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/credentials/AnonymousCredential.js +var require_AnonymousCredential2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/credentials/AnonymousCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AnonymousCredential = void 0; + var AnonymousCredentialPolicy_js_1 = require_AnonymousCredentialPolicy2(); + var Credential_js_1 = require_Credential2(); + var AnonymousCredential = class extends Credential_js_1.Credential { /** - * Child classes must implement this method with request signing. This method - * will be executed in {@link sendRequest}. + * Creates an {@link AnonymousCredentialPolicy} object. * - * @param request - + * @param nextPolicy - + * @param options - */ - signRequest(request) { - return request; + create(nextPolicy, options) { + return new AnonymousCredentialPolicy_js_1.AnonymousCredentialPolicy(nextPolicy, options); } }; + exports2.AnonymousCredential = AnonymousCredential; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/utils/SharedKeyComparator.js +var require_SharedKeyComparator2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/utils/SharedKeyComparator.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.compareHeader = compareHeader; var table_lv0 = new Uint32Array([ 0, 0, @@ -35794,7 +36546,24 @@ var require_dist4 = __commonJS({ } return false; } - var StorageSharedKeyCredentialPolicy = class extends CredentialPolicy { + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageSharedKeyCredentialPolicy.js +var require_StorageSharedKeyCredentialPolicy2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageSharedKeyCredentialPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageSharedKeyCredentialPolicy = void 0; + var constants_js_1 = require_constants10(); + var utils_common_js_1 = require_utils_common2(); + var CredentialPolicy_js_1 = require_CredentialPolicy2(); + var SharedKeyComparator_js_1 = require_SharedKeyComparator2(); + var StorageSharedKeyCredentialPolicy = class extends CredentialPolicy_js_1.CredentialPolicy { + /** + * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy + */ + factory; /** * Creates an instance of StorageSharedKeyCredentialPolicy. * @param nextPolicy - @@ -35811,26 +36580,26 @@ var require_dist4 = __commonJS({ * @param request - */ signRequest(request) { - request.headers.set(HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); + request.headers.set(constants_js_1.HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); if (request.body && (typeof request.body === "string" || request.body !== void 0) && request.body.length > 0) { - request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + request.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } const stringToSign = [ request.method.toUpperCase(), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), - this.getHeaderValueToSign(request, HeaderConstants.DATE), - this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), - this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), - this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), - this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), - this.getHeaderValueToSign(request, HeaderConstants.RANGE) + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.DATE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request, constants_js_1.HeaderConstants.RANGE) ].join("\n") + "\n" + this.getCanonicalizedHeadersString(request) + this.getCanonicalizedResourceString(request); const signature = this.factory.computeHMACSHA256(stringToSign); - request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + request.headers.set(constants_js_1.HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); return request; } /** @@ -35845,7 +36614,7 @@ var require_dist4 = __commonJS({ if (!value) { return ""; } - if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + if (headerName === constants_js_1.HeaderConstants.CONTENT_LENGTH && value === "0") { return ""; } return value; @@ -35865,10 +36634,10 @@ var require_dist4 = __commonJS({ */ getCanonicalizedHeadersString(request) { let headersArray = request.headers.headersArray().filter((value) => { - return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); + return value.name.toLowerCase().startsWith(constants_js_1.HeaderConstants.PREFIX_FOR_STORAGE); }); headersArray.sort((a, b) => { - return compareHeader(a.name.toLowerCase(), b.name.toLowerCase()); + return (0, SharedKeyComparator_js_1.compareHeader)(a.name.toLowerCase(), b.name.toLowerCase()); }); headersArray = headersArray.filter((value, index, array) => { if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { @@ -35889,10 +36658,10 @@ var require_dist4 = __commonJS({ * @param request - */ getCanonicalizedResourceString(request) { - const path3 = getURLPath(request.url) || "/"; + const path3 = (0, utils_common_js_1.getURLPath)(request.url) || "/"; let canonicalizedResourceString = ""; canonicalizedResourceString += `/${this.factory.accountName}${path3}`; - const queries = getURLQueries(request.url); + const queries = (0, utils_common_js_1.getURLQueries)(request.url); const lowercaseQueries = {}; if (queries) { const queryKeys = []; @@ -35912,18 +36681,28 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return canonicalizedResourceString; } }; - var Credential = class { + exports2.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/credentials/StorageSharedKeyCredential.js +var require_StorageSharedKeyCredential2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/credentials/StorageSharedKeyCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageSharedKeyCredential = void 0; + var node_crypto_1 = require("node:crypto"); + var StorageSharedKeyCredentialPolicy_js_1 = require_StorageSharedKeyCredentialPolicy2(); + var Credential_js_1 = require_Credential2(); + var StorageSharedKeyCredential = class extends Credential_js_1.Credential { /** - * Creates a RequestPolicy object. - * - * @param _nextPolicy - - * @param _options - + * Azure Storage account name; readonly. */ - create(_nextPolicy, _options) { - throw new Error("Method should be implemented in children classes."); - } - }; - var StorageSharedKeyCredential = class extends Credential { + accountName; + /** + * Azure Storage account key; readonly. + */ + accountKey; /** * Creates an instance of StorageSharedKeyCredential. * @param accountName - @@ -35941,7 +36720,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - */ create(nextPolicy, options) { - return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + return new StorageSharedKeyCredentialPolicy_js_1.StorageSharedKeyCredentialPolicy(nextPolicy, options, this); } /** * Generates a hash signature for an HTTP request or for a SAS. @@ -35949,67 +36728,631 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param stringToSign - */ computeHMACSHA256(stringToSign) { - return crypto2.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + return (0, node_crypto_1.createHmac)("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); } }; - var AnonymousCredentialPolicy = class extends CredentialPolicy { + exports2.StorageSharedKeyCredential = StorageSharedKeyCredential; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/log.js +var require_log6 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/log.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.logger = void 0; + var logger_1 = require_commonjs2(); + exports2.logger = (0, logger_1.createClientLogger)("storage-common"); + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicyType.js +var require_StorageRetryPolicyType2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicyType.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageRetryPolicyType = void 0; + var StorageRetryPolicyType; + (function(StorageRetryPolicyType2) { + StorageRetryPolicyType2[StorageRetryPolicyType2["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + StorageRetryPolicyType2[StorageRetryPolicyType2["FIXED"] = 1] = "FIXED"; + })(StorageRetryPolicyType || (exports2.StorageRetryPolicyType = StorageRetryPolicyType = {})); + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicy.js +var require_StorageRetryPolicy2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageRetryPolicy = void 0; + exports2.NewRetryPolicyFactory = NewRetryPolicyFactory; + var abort_controller_1 = require_commonjs3(); + var RequestPolicy_js_1 = require_RequestPolicy2(); + var constants_js_1 = require_constants10(); + var utils_common_js_1 = require_utils_common2(); + var log_js_1 = require_log6(); + var StorageRetryPolicyType_js_1 = require_StorageRetryPolicyType2(); + function NewRetryPolicyFactory(retryOptions) { + return { + create: (nextPolicy, options) => { + return new StorageRetryPolicy(nextPolicy, options, retryOptions); + } + }; + } + var DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1e3, + maxTries: 4, + retryDelayInMs: 4 * 1e3, + retryPolicyType: StorageRetryPolicyType_js_1.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: void 0 + // Use server side default timeout strategy + }; + var RETRY_ABORT_ERROR = new abort_controller_1.AbortError("The operation was aborted."); + var StorageRetryPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { /** - * Creates an instance of AnonymousCredentialPolicy. + * RetryOptions. + */ + retryOptions; + /** + * Creates an instance of RetryPolicy. + * * @param nextPolicy - * @param options - + * @param retryOptions - */ - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { super(nextPolicy, options); + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType ? retryOptions.retryPolicyType : DEFAULT_RETRY_OPTIONS.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 ? Math.floor(retryOptions.maxTries) : DEFAULT_RETRY_OPTIONS.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 ? retryOptions.tryTimeoutInMs : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) : DEFAULT_RETRY_OPTIONS.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost : DEFAULT_RETRY_OPTIONS.secondaryHost + }; + } + /** + * Sends request. + * + * @param request - + */ + async sendRequest(request) { + return this.attemptSendRequest(request, false, 1); + } + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + async attemptSendRequest(request, secondaryHas404, attempt) { + const newRequest = request.clone(); + const isPrimaryRetry = secondaryHas404 || !this.retryOptions.secondaryHost || !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = (0, utils_common_js_1.setURLHost)(newRequest.url, this.retryOptions.secondaryHost); + } + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = (0, utils_common_js_1.setURLParameter)(newRequest.url, constants_js_1.URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1e3).toString()); + } + let response; + try { + log_js_1.logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; + } + secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; + } catch (err) { + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; + } + } + await this.delay(isPrimaryRetry, attempt, request.abortSignal); + return this.attemptSendRequest(request, secondaryHas404, ++attempt); + } + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + log_js_1.logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions.maxTries}, no further try.`); + return false; + } + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR" + // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || err.message.toUpperCase().includes(retriableError) || err.code && err.code.toString().toUpperCase() === retriableError) { + log_js_1.logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + } + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + log_js_1.logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + if (statusCode === 503 || statusCode === 500) { + log_js_1.logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + if (err?.code === "PARSE_ERROR" && err?.message.startsWith(`Error "Error: Unclosed root tag`)) { + log_js_1.logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + return false; + } + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case StorageRetryPolicyType_js_1.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case StorageRetryPolicyType_js_1.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } + } else { + delayTimeInMs = Math.random() * 1e3; + } + log_js_1.logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return (0, utils_common_js_1.delay)(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); } }; - var AnonymousCredential = class extends Credential { + exports2.StorageRetryPolicy = StorageRetryPolicy; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/StorageRetryPolicyFactory.js +var require_StorageRetryPolicyFactory2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/StorageRetryPolicyFactory.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageRetryPolicyFactory = exports2.StorageRetryPolicy = exports2.StorageRetryPolicyType = void 0; + var StorageRetryPolicy_js_1 = require_StorageRetryPolicy2(); + Object.defineProperty(exports2, "StorageRetryPolicy", { enumerable: true, get: function() { + return StorageRetryPolicy_js_1.StorageRetryPolicy; + } }); + var StorageRetryPolicyType_js_1 = require_StorageRetryPolicyType2(); + Object.defineProperty(exports2, "StorageRetryPolicyType", { enumerable: true, get: function() { + return StorageRetryPolicyType_js_1.StorageRetryPolicyType; + } }); + var StorageRetryPolicyFactory = class { + retryOptions; /** - * Creates an {@link AnonymousCredentialPolicy} object. + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions) { + this.retryOptions = retryOptions; + } + /** + * Creates a StorageRetryPolicy object. * * @param nextPolicy - * @param options - */ create(nextPolicy, options) { - return new AnonymousCredentialPolicy(nextPolicy, options); + return new StorageRetryPolicy_js_1.StorageRetryPolicy(nextPolicy, options, this.retryOptions); } }; - var _defaultHttpClient; - function getCachedDefaultHttpClient() { - if (!_defaultHttpClient) { - _defaultHttpClient = coreRestPipeline.createDefaultHttpClient(); + exports2.StorageRetryPolicyFactory = StorageRetryPolicyFactory; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageBrowserPolicyV2.js +var require_StorageBrowserPolicyV2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageBrowserPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageBrowserPolicyName = void 0; + exports2.storageBrowserPolicy = storageBrowserPolicy; + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants10(); + var utils_common_js_1 = require_utils_common2(); + exports2.storageBrowserPolicyName = "storageBrowserPolicy"; + function storageBrowserPolicy() { + return { + name: exports2.storageBrowserPolicyName, + async sendRequest(request, next) { + if (core_util_1.isNodeLike) { + return next(request); + } + if (request.method === "GET" || request.method === "HEAD") { + request.url = (0, utils_common_js_1.setURLParameter)(request.url, constants_js_1.URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); + } + request.headers.delete(constants_js_1.HeaderConstants.COOKIE); + request.headers.delete(constants_js_1.HeaderConstants.CONTENT_LENGTH); + return next(request); + } + }; + } + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageCorrectContentLengthPolicy.js +var require_StorageCorrectContentLengthPolicy = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageCorrectContentLengthPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageCorrectContentLengthPolicyName = void 0; + exports2.storageCorrectContentLengthPolicy = storageCorrectContentLengthPolicy; + var constants_js_1 = require_constants10(); + exports2.storageCorrectContentLengthPolicyName = "StorageCorrectContentLengthPolicy"; + function storageCorrectContentLengthPolicy() { + function correctContentLength(request) { + if (request.body && (typeof request.body === "string" || Buffer.isBuffer(request.body)) && request.body.length > 0) { + request.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } } - return _defaultHttpClient; + return { + name: exports2.storageCorrectContentLengthPolicyName, + async sendRequest(request, next) { + correctContentLength(request); + return next(request); + } + }; + } + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicyV2.js +var require_StorageRetryPolicyV2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageRetryPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageRetryPolicyName = void 0; + exports2.storageRetryPolicy = storageRetryPolicy; + var abort_controller_1 = require_commonjs3(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); + var StorageRetryPolicyFactory_js_1 = require_StorageRetryPolicyFactory2(); + var constants_js_1 = require_constants10(); + var utils_common_js_1 = require_utils_common2(); + var log_js_1 = require_log6(); + exports2.storageRetryPolicyName = "storageRetryPolicy"; + var DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1e3, + maxTries: 4, + retryDelayInMs: 4 * 1e3, + retryPolicyType: StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: void 0 + // Use server side default timeout strategy + }; + var retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR" + ]; + var RETRY_ABORT_ERROR = new abort_controller_1.AbortError("The operation was aborted."); + function storageRetryPolicy(options = {}) { + const retryPolicyType = options.retryPolicyType ?? DEFAULT_RETRY_OPTIONS.retryPolicyType; + const maxTries = options.maxTries ?? DEFAULT_RETRY_OPTIONS.maxTries; + const retryDelayInMs = options.retryDelayInMs ?? DEFAULT_RETRY_OPTIONS.retryDelayInMs; + const maxRetryDelayInMs = options.maxRetryDelayInMs ?? DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; + const secondaryHost = options.secondaryHost ?? DEFAULT_RETRY_OPTIONS.secondaryHost; + const tryTimeoutInMs = options.tryTimeoutInMs ?? DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; + function shouldRetry({ isPrimaryRetry, attempt, response, error: error2 }) { + if (attempt >= maxTries) { + log_js_1.logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); + return false; + } + if (error2) { + for (const retriableError of retriableErrors) { + if (error2.name.toUpperCase().includes(retriableError) || error2.message.toUpperCase().includes(retriableError) || error2.code && error2.code.toString().toUpperCase() === retriableError) { + log_js_1.logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + if (error2?.code === "PARSE_ERROR" && error2?.message.startsWith(`Error "Error: Unclosed root tag`)) { + log_js_1.logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + } + if (response || error2) { + const statusCode = response?.status ?? error2?.statusCode ?? 0; + if (!isPrimaryRetry && statusCode === 404) { + log_js_1.logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + if (statusCode === 503 || statusCode === 500) { + log_js_1.logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + return false; + } + function calculateDelay(isPrimaryRetry, attempt) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (retryPolicyType) { + case StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * retryDelayInMs, maxRetryDelayInMs); + break; + case StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.FIXED: + delayTimeInMs = retryDelayInMs; + break; + } + } else { + delayTimeInMs = Math.random() * 1e3; + } + log_js_1.logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delayTimeInMs; + } + return { + name: exports2.storageRetryPolicyName, + async sendRequest(request, next) { + if (tryTimeoutInMs) { + request.url = (0, utils_common_js_1.setURLParameter)(request.url, constants_js_1.URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1e3))); + } + const primaryUrl = request.url; + const secondaryUrl = secondaryHost ? (0, utils_common_js_1.setURLHost)(request.url, secondaryHost) : void 0; + let secondaryHas404 = false; + let attempt = 1; + let retryAgain = true; + let response; + let error2; + while (retryAgain) { + const isPrimaryRetry = secondaryHas404 || !secondaryUrl || !["GET", "HEAD", "OPTIONS"].includes(request.method) || attempt % 2 === 1; + request.url = isPrimaryRetry ? primaryUrl : secondaryUrl; + response = void 0; + error2 = void 0; + try { + log_js_1.logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await next(request); + secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; + } catch (e) { + if ((0, core_rest_pipeline_1.isRestError)(e)) { + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); + error2 = e; + } else { + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${(0, core_util_1.getErrorMessage)(e)}`); + throw e; + } + } + retryAgain = shouldRetry({ isPrimaryRetry, attempt, response, error: error2 }); + if (retryAgain) { + await (0, utils_common_js_1.delay)(calculateDelay(isPrimaryRetry, attempt), request.abortSignal, RETRY_ABORT_ERROR); + } + attempt++; + } + if (response) { + return response; + } + throw error2 ?? new core_rest_pipeline_1.RestError("RetryPolicy failed without known error."); + } + }; + } + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageSharedKeyCredentialPolicyV2.js +var require_StorageSharedKeyCredentialPolicyV2 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/policies/StorageSharedKeyCredentialPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageSharedKeyCredentialPolicyName = void 0; + exports2.storageSharedKeyCredentialPolicy = storageSharedKeyCredentialPolicy; + var node_crypto_1 = require("node:crypto"); + var constants_js_1 = require_constants10(); + var utils_common_js_1 = require_utils_common2(); + var SharedKeyComparator_js_1 = require_SharedKeyComparator2(); + exports2.storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; + function storageSharedKeyCredentialPolicy(options) { + function signRequest(request) { + request.headers.set(constants_js_1.HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); + if (request.body && (typeof request.body === "string" || Buffer.isBuffer(request.body)) && request.body.length > 0) { + request.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + const stringToSign = [ + request.method.toUpperCase(), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_LANGUAGE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_ENCODING), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_LENGTH), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_MD5), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_TYPE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.DATE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_MODIFIED_SINCE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_MATCH), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_NONE_MATCH), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_UNMODIFIED_SINCE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.RANGE) + ].join("\n") + "\n" + getCanonicalizedHeadersString(request) + getCanonicalizedResourceString(request); + const signature = (0, node_crypto_1.createHmac)("sha256", options.accountKey).update(stringToSign, "utf8").digest("base64"); + request.headers.set(constants_js_1.HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); + } + function getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + if (headerName === constants_js_1.HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; + } + function getCanonicalizedHeadersString(request) { + let headersArray = []; + for (const [name, value] of request.headers) { + if (name.toLowerCase().startsWith(constants_js_1.HeaderConstants.PREFIX_FOR_STORAGE)) { + headersArray.push({ name, value }); + } + } + headersArray.sort((a, b) => { + return (0, SharedKeyComparator_js_1.compareHeader)(a.name.toLowerCase(), b.name.toLowerCase()); + }); + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name.toLowerCase().trimRight()}:${header.value.trimLeft()} +`; + }); + return canonicalizedHeadersStringToSign; + } + function getCanonicalizedResourceString(request) { + const path3 = (0, utils_common_js_1.getURLPath)(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${options.accountName}${path3}`; + const queries = (0, utils_common_js_1.getURLQueries)(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += ` +${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; + } + return { + name: exports2.storageSharedKeyCredentialPolicyName, + async sendRequest(request, next) { + signRequest(request); + return next(request); + } + }; } - var storageBrowserPolicyName = "storageBrowserPolicy"; + } +}); + +// ../../node_modules/@azure/storage-common/dist/commonjs/index.js +var require_commonjs11 = __commonJS({ + "../../node_modules/@azure/storage-common/dist/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BaseRequestPolicy = exports2.getCachedDefaultHttpClient = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + tslib_1.__exportStar(require_BufferScheduler(), exports2); + var cache_js_1 = require_cache2(); + Object.defineProperty(exports2, "getCachedDefaultHttpClient", { enumerable: true, get: function() { + return cache_js_1.getCachedDefaultHttpClient; + } }); + tslib_1.__exportStar(require_StorageBrowserPolicyFactory(), exports2); + tslib_1.__exportStar(require_AnonymousCredential2(), exports2); + tslib_1.__exportStar(require_Credential2(), exports2); + tslib_1.__exportStar(require_StorageSharedKeyCredential2(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicyFactory2(), exports2); + var RequestPolicy_js_1 = require_RequestPolicy2(); + Object.defineProperty(exports2, "BaseRequestPolicy", { enumerable: true, get: function() { + return RequestPolicy_js_1.BaseRequestPolicy; + } }); + tslib_1.__exportStar(require_AnonymousCredentialPolicy2(), exports2); + tslib_1.__exportStar(require_CredentialPolicy2(), exports2); + tslib_1.__exportStar(require_StorageBrowserPolicy(), exports2); + tslib_1.__exportStar(require_StorageBrowserPolicyV2(), exports2); + tslib_1.__exportStar(require_StorageCorrectContentLengthPolicy(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicyType2(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicy2(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicyV2(), exports2); + tslib_1.__exportStar(require_StorageSharedKeyCredentialPolicy2(), exports2); + tslib_1.__exportStar(require_StorageSharedKeyCredentialPolicyV2(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicyFactory2(), exports2); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageBrowserPolicyV2.js +var require_StorageBrowserPolicyV22 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageBrowserPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageBrowserPolicyName = void 0; + exports2.storageBrowserPolicy = storageBrowserPolicy; + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants9(); + var utils_common_js_1 = require_utils_common(); + exports2.storageBrowserPolicyName = "storageBrowserPolicy"; function storageBrowserPolicy() { return { - name: storageBrowserPolicyName, + name: exports2.storageBrowserPolicyName, async sendRequest(request, next) { - if (coreUtil.isNode) { + if (core_util_1.isNodeLike) { return next(request); } if (request.method === "GET" || request.method === "HEAD") { - request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); + request.url = (0, utils_common_js_1.setURLParameter)(request.url, constants_js_1.URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); } - request.headers.delete(HeaderConstants.COOKIE); - request.headers.delete(HeaderConstants.CONTENT_LENGTH); + request.headers.delete(constants_js_1.HeaderConstants.COOKIE); + request.headers.delete(constants_js_1.HeaderConstants.CONTENT_LENGTH); return next(request); } }; } - var storageRetryPolicyName = "storageRetryPolicy"; - var StorageRetryPolicyType; - (function(StorageRetryPolicyType2) { - StorageRetryPolicyType2[StorageRetryPolicyType2["EXPONENTIAL"] = 0] = "EXPONENTIAL"; - StorageRetryPolicyType2[StorageRetryPolicyType2["FIXED"] = 1] = "FIXED"; - })(StorageRetryPolicyType || (StorageRetryPolicyType = {})); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicyV2.js +var require_StorageRetryPolicyV22 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageRetryPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageRetryPolicyName = void 0; + exports2.storageRetryPolicy = storageRetryPolicy; + var abort_controller_1 = require_commonjs3(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); + var StorageRetryPolicyFactory_js_1 = require_StorageRetryPolicyFactory(); + var constants_js_1 = require_constants9(); + var utils_common_js_1 = require_utils_common(); + var log_js_1 = require_log5(); + exports2.storageRetryPolicyName = "storageRetryPolicy"; var DEFAULT_RETRY_OPTIONS = { maxRetryDelayInMs: 120 * 1e3, maxTries: 4, retryDelayInMs: 4 * 1e3, - retryPolicyType: StorageRetryPolicyType.EXPONENTIAL, + retryPolicyType: StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.EXPONENTIAL, secondaryHost: "", tryTimeoutInMs: void 0 // Use server side default timeout strategy @@ -36025,41 +37368,39 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; "EPIPE", "REQUEST_SEND_ERROR" ]; - var RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); + var RETRY_ABORT_ERROR = new abort_controller_1.AbortError("The operation was aborted."); function storageRetryPolicy(options = {}) { - var _a, _b, _c, _d, _e, _f; - const retryPolicyType = (_a = options.retryPolicyType) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_OPTIONS.retryPolicyType; - const maxTries = (_b = options.maxTries) !== null && _b !== void 0 ? _b : DEFAULT_RETRY_OPTIONS.maxTries; - const retryDelayInMs = (_c = options.retryDelayInMs) !== null && _c !== void 0 ? _c : DEFAULT_RETRY_OPTIONS.retryDelayInMs; - const maxRetryDelayInMs = (_d = options.maxRetryDelayInMs) !== null && _d !== void 0 ? _d : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; - const secondaryHost = (_e = options.secondaryHost) !== null && _e !== void 0 ? _e : DEFAULT_RETRY_OPTIONS.secondaryHost; - const tryTimeoutInMs = (_f = options.tryTimeoutInMs) !== null && _f !== void 0 ? _f : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; + const retryPolicyType = options.retryPolicyType ?? DEFAULT_RETRY_OPTIONS.retryPolicyType; + const maxTries = options.maxTries ?? DEFAULT_RETRY_OPTIONS.maxTries; + const retryDelayInMs = options.retryDelayInMs ?? DEFAULT_RETRY_OPTIONS.retryDelayInMs; + const maxRetryDelayInMs = options.maxRetryDelayInMs ?? DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; + const secondaryHost = options.secondaryHost ?? DEFAULT_RETRY_OPTIONS.secondaryHost; + const tryTimeoutInMs = options.tryTimeoutInMs ?? DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; function shouldRetry({ isPrimaryRetry, attempt, response, error: error2 }) { - var _a2, _b2; if (attempt >= maxTries) { - logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); + log_js_1.logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); return false; } if (error2) { for (const retriableError of retriableErrors) { if (error2.name.toUpperCase().includes(retriableError) || error2.message.toUpperCase().includes(retriableError) || error2.code && error2.code.toString().toUpperCase() === retriableError) { - logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + log_js_1.logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); return true; } } - if ((error2 === null || error2 === void 0 ? void 0 : error2.code) === "PARSE_ERROR" && (error2 === null || error2 === void 0 ? void 0 : error2.message.startsWith(`Error "Error: Unclosed root tag`))) { - logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + if (error2?.code === "PARSE_ERROR" && error2?.message.startsWith(`Error "Error: Unclosed root tag`)) { + log_js_1.logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); return true; } } if (response || error2) { - const statusCode = (_b2 = (_a2 = response === null || response === void 0 ? void 0 : response.status) !== null && _a2 !== void 0 ? _a2 : error2 === null || error2 === void 0 ? void 0 : error2.statusCode) !== null && _b2 !== void 0 ? _b2 : 0; + const statusCode = response?.status ?? error2?.statusCode ?? 0; if (!isPrimaryRetry && statusCode === 404) { - logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + log_js_1.logger.info(`RetryPolicy: Secondary access with 404, will retry.`); return true; } if (statusCode === 503 || statusCode === 500) { - logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + log_js_1.logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); return true; } } @@ -36069,27 +37410,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; let delayTimeInMs = 0; if (isPrimaryRetry) { switch (retryPolicyType) { - case StorageRetryPolicyType.EXPONENTIAL: + case StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.EXPONENTIAL: delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * retryDelayInMs, maxRetryDelayInMs); break; - case StorageRetryPolicyType.FIXED: + case StorageRetryPolicyFactory_js_1.StorageRetryPolicyType.FIXED: delayTimeInMs = retryDelayInMs; break; } } else { delayTimeInMs = Math.random() * 1e3; } - logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + log_js_1.logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); return delayTimeInMs; } return { - name: storageRetryPolicyName, + name: exports2.storageRetryPolicyName, async sendRequest(request, next) { if (tryTimeoutInMs) { - request.url = setURLParameter(request.url, URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1e3))); + request.url = (0, utils_common_js_1.setURLParameter)(request.url, constants_js_1.URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1e3))); } const primaryUrl = request.url; - const secondaryUrl = secondaryHost ? setURLHost(request.url, secondaryHost) : void 0; + const secondaryUrl = secondaryHost ? (0, utils_common_js_1.setURLHost)(request.url, secondaryHost) : void 0; let secondaryHas404 = false; let attempt = 1; let retryAgain = true; @@ -36101,61 +37442,75 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; response = void 0; error2 = void 0; try { - logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + log_js_1.logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); response = await next(request); secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; } catch (e) { - if (coreRestPipeline.isRestError(e)) { - logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); + if ((0, core_rest_pipeline_1.isRestError)(e)) { + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); error2 = e; } else { - logger.error(`RetryPolicy: Caught error, message: ${coreUtil.getErrorMessage(e)}`); + log_js_1.logger.error(`RetryPolicy: Caught error, message: ${(0, core_util_1.getErrorMessage)(e)}`); throw e; } } retryAgain = shouldRetry({ isPrimaryRetry, attempt, response, error: error2 }); if (retryAgain) { - await delay(calculateDelay(isPrimaryRetry, attempt), request.abortSignal, RETRY_ABORT_ERROR); + await (0, utils_common_js_1.delay)(calculateDelay(isPrimaryRetry, attempt), request.abortSignal, RETRY_ABORT_ERROR); } attempt++; } if (response) { return response; } - throw error2 !== null && error2 !== void 0 ? error2 : new coreRestPipeline.RestError("RetryPolicy failed without known error."); + throw error2 ?? new core_rest_pipeline_1.RestError("RetryPolicy failed without known error."); } }; } - var storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageSharedKeyCredentialPolicyV2.js +var require_StorageSharedKeyCredentialPolicyV22 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageSharedKeyCredentialPolicyV2.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageSharedKeyCredentialPolicyName = void 0; + exports2.storageSharedKeyCredentialPolicy = storageSharedKeyCredentialPolicy; + var node_crypto_1 = require("node:crypto"); + var constants_js_1 = require_constants9(); + var utils_common_js_1 = require_utils_common(); + var SharedKeyComparator_js_1 = require_SharedKeyComparator(); + exports2.storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; function storageSharedKeyCredentialPolicy(options) { function signRequest(request) { - request.headers.set(HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); + request.headers.set(constants_js_1.HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); if (request.body && (typeof request.body === "string" || Buffer.isBuffer(request.body)) && request.body.length > 0) { - request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + request.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } const stringToSign = [ request.method.toUpperCase(), - getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), - getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), - getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), - getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), - getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), - getHeaderValueToSign(request, HeaderConstants.DATE), - getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), - getHeaderValueToSign(request, HeaderConstants.IF_MATCH), - getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), - getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), - getHeaderValueToSign(request, HeaderConstants.RANGE) + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_LANGUAGE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_ENCODING), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_LENGTH), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_MD5), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.CONTENT_TYPE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.DATE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_MODIFIED_SINCE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_MATCH), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_NONE_MATCH), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.IF_UNMODIFIED_SINCE), + getHeaderValueToSign(request, constants_js_1.HeaderConstants.RANGE) ].join("\n") + "\n" + getCanonicalizedHeadersString(request) + getCanonicalizedResourceString(request); - const signature = crypto2.createHmac("sha256", options.accountKey).update(stringToSign, "utf8").digest("base64"); - request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); + const signature = (0, node_crypto_1.createHmac)("sha256", options.accountKey).update(stringToSign, "utf8").digest("base64"); + request.headers.set(constants_js_1.HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); } function getHeaderValueToSign(request, headerName) { const value = request.headers.get(headerName); if (!value) { return ""; } - if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + if (headerName === constants_js_1.HeaderConstants.CONTENT_LENGTH && value === "0") { return ""; } return value; @@ -36163,12 +37518,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; function getCanonicalizedHeadersString(request) { let headersArray = []; for (const [name, value] of request.headers) { - if (name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE)) { + if (name.toLowerCase().startsWith(constants_js_1.HeaderConstants.PREFIX_FOR_STORAGE)) { headersArray.push({ name, value }); } } headersArray.sort((a, b) => { - return compareHeader(a.name.toLowerCase(), b.name.toLowerCase()); + return (0, SharedKeyComparator_js_1.compareHeader)(a.name.toLowerCase(), b.name.toLowerCase()); }); headersArray = headersArray.filter((value, index, array) => { if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { @@ -36184,10 +37539,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request) { - const path3 = getURLPath(request.url) || "/"; + const path3 = (0, utils_common_js_1.getURLPath)(request.url) || "/"; let canonicalizedResourceString = ""; canonicalizedResourceString += `/${options.accountName}${path3}`; - const queries = getURLQueries(request.url); + const queries = (0, utils_common_js_1.getURLQueries)(request.url); const lowercaseQueries = {}; if (queries) { const queryKeys = []; @@ -36207,14 +37562,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return canonicalizedResourceString; } return { - name: storageSharedKeyCredentialPolicyName, + name: exports2.storageSharedKeyCredentialPolicyName, async sendRequest(request, next) { signRequest(request); return next(request); } }; } - var StorageBrowserPolicy = class extends BaseRequestPolicy { + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageBrowserPolicy.js +var require_StorageBrowserPolicy2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageBrowserPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageBrowserPolicy = void 0; + var RequestPolicy_js_1 = require_RequestPolicy(); + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants9(); + var utils_common_js_1 = require_utils_common(); + var StorageBrowserPolicy = class extends RequestPolicy_js_1.BaseRequestPolicy { /** * Creates an instance of StorageBrowserPolicy. * @param nextPolicy - @@ -36231,17 +37599,31 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param request - */ async sendRequest(request) { - if (coreUtil.isNode) { + if (core_util_1.isNodeLike) { return this._nextPolicy.sendRequest(request); } if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { - request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); + request.url = (0, utils_common_js_1.setURLParameter)(request.url, constants_js_1.URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); } - request.headers.remove(HeaderConstants.COOKIE); - request.headers.remove(HeaderConstants.CONTENT_LENGTH); + request.headers.remove(constants_js_1.HeaderConstants.COOKIE); + request.headers.remove(constants_js_1.HeaderConstants.CONTENT_LENGTH); return this._nextPolicy.sendRequest(request); } }; + exports2.StorageBrowserPolicy = StorageBrowserPolicy; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/StorageBrowserPolicyFactory.js +var require_StorageBrowserPolicyFactory2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/StorageBrowserPolicyFactory.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageBrowserPolicyFactory = exports2.StorageBrowserPolicy = void 0; + var StorageBrowserPolicy_js_1 = require_StorageBrowserPolicy2(); + Object.defineProperty(exports2, "StorageBrowserPolicy", { enumerable: true, get: function() { + return StorageBrowserPolicy_js_1.StorageBrowserPolicy; + } }); var StorageBrowserPolicyFactory = class { /** * Creates a StorageBrowserPolicyFactory object. @@ -36250,24 +37632,68 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - */ create(nextPolicy, options) { - return new StorageBrowserPolicy(nextPolicy, options); + return new StorageBrowserPolicy_js_1.StorageBrowserPolicy(nextPolicy, options); } }; - var storageCorrectContentLengthPolicyName = "StorageCorrectContentLengthPolicy"; + exports2.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageCorrectContentLengthPolicy.js +var require_StorageCorrectContentLengthPolicy2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/policies/StorageCorrectContentLengthPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.storageCorrectContentLengthPolicyName = void 0; + exports2.storageCorrectContentLengthPolicy = storageCorrectContentLengthPolicy; + var constants_js_1 = require_constants9(); + exports2.storageCorrectContentLengthPolicyName = "StorageCorrectContentLengthPolicy"; function storageCorrectContentLengthPolicy() { function correctContentLength(request) { if (request.body && (typeof request.body === "string" || Buffer.isBuffer(request.body)) && request.body.length > 0) { - request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + request.headers.set(constants_js_1.HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } } return { - name: storageCorrectContentLengthPolicyName, + name: exports2.storageCorrectContentLengthPolicyName, async sendRequest(request, next) { correctContentLength(request); return next(request); } }; } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/Pipeline.js +var require_Pipeline = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/Pipeline.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Pipeline = exports2.StorageOAuthScopes = void 0; + exports2.isPipelineLike = isPipelineLike; + exports2.newPipeline = newPipeline; + exports2.getCoreClientOptions = getCoreClientOptions; + exports2.getCredentialFromPipeline = getCredentialFromPipeline; + var core_http_compat_1 = require_commonjs9(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_client_1 = require_commonjs8(); + var core_xml_1 = require_commonjs10(); + var core_auth_1 = require_commonjs7(); + var log_js_1 = require_log5(); + var StorageRetryPolicyFactory_js_1 = require_StorageRetryPolicyFactory(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var constants_js_1 = require_constants9(); + Object.defineProperty(exports2, "StorageOAuthScopes", { enumerable: true, get: function() { + return constants_js_1.StorageOAuthScopes; + } }); + var storage_common_1 = require_commonjs11(); + var StorageBrowserPolicyV2_js_1 = require_StorageBrowserPolicyV22(); + var StorageRetryPolicyV2_js_1 = require_StorageRetryPolicyV22(); + var StorageSharedKeyCredentialPolicyV2_js_1 = require_StorageSharedKeyCredentialPolicyV22(); + var StorageBrowserPolicyFactory_js_1 = require_StorageBrowserPolicyFactory2(); + var StorageCorrectContentLengthPolicy_js_1 = require_StorageCorrectContentLengthPolicy2(); function isPipelineLike(pipeline) { if (!pipeline || typeof pipeline !== "object") { return false; @@ -36276,6 +37702,14 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return Array.isArray(castPipeline.factories) && typeof castPipeline.options === "object" && typeof castPipeline.toServiceClientOptions === "function"; } var Pipeline = class { + /** + * A list of chained request policy factories. + */ + factories; + /** + * Configures pipeline logger and HTTP client. + */ + options; /** * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. * @@ -36299,9 +37733,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; }; } }; + exports2.Pipeline = Pipeline; function newPipeline(credential, pipelineOptions = {}) { if (!credential) { - credential = new AnonymousCredential(); + credential = new AnonymousCredential_js_1.AnonymousCredential(); } const pipeline = new Pipeline([], pipelineOptions); pipeline._credential = credential; @@ -36324,7 +37759,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (novelFactories.length) { const hasInjector = novelFactories.some((factory) => isInjectorPolicyFactory(factory)); return { - wrappedPolicies: coreHttpCompat.createRequestPolicyFactoryPolicy(novelFactories), + wrappedPolicies: (0, core_http_compat_1.createRequestPolicyFactoryPolicy)(novelFactories), afterRetry: hasInjector }; } @@ -36332,75 +37767,85 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return void 0; } function getCoreClientOptions(pipeline) { - var _a; - const _b = pipeline.options, { httpClient: v1Client } = _b, restOptions = tslib.__rest(_b, ["httpClient"]); + const { httpClient: v1Client, ...restOptions } = pipeline.options; let httpClient = pipeline._coreHttpClient; if (!httpClient) { - httpClient = v1Client ? coreHttpCompat.convertHttpClient(v1Client) : getCachedDefaultHttpClient(); + httpClient = v1Client ? (0, core_http_compat_1.convertHttpClient)(v1Client) : (0, storage_common_1.getCachedDefaultHttpClient)(); pipeline._coreHttpClient = httpClient; } let corePipeline = pipeline._corePipeline; if (!corePipeline) { - const packageDetails = `azsdk-js-azure-storage-blob/${SDK_VERSION}`; + const packageDetails = `azsdk-js-azure-storage-blob/${constants_js_1.SDK_VERSION}`; const userAgentPrefix = restOptions.userAgentOptions && restOptions.userAgentOptions.userAgentPrefix ? `${restOptions.userAgentOptions.userAgentPrefix} ${packageDetails}` : `${packageDetails}`; - corePipeline = coreClient.createClientPipeline(Object.assign(Object.assign({}, restOptions), { loggingOptions: { - additionalAllowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, - additionalAllowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, - logger: logger.info - }, userAgentOptions: { - userAgentPrefix - }, serializationOptions: { - stringifyXML: coreXml.stringifyXML, - serializerOptions: { - xml: { - // Use customized XML char key of "#" so we can deserialize metadata - // with "_" key - xmlCharKey: "#" + corePipeline = (0, core_client_1.createClientPipeline)({ + ...restOptions, + loggingOptions: { + additionalAllowedHeaderNames: constants_js_1.StorageBlobLoggingAllowedHeaderNames, + additionalAllowedQueryParameters: constants_js_1.StorageBlobLoggingAllowedQueryParameters, + logger: log_js_1.logger.info + }, + userAgentOptions: { + userAgentPrefix + }, + serializationOptions: { + stringifyXML: core_xml_1.stringifyXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#" + } } - } - }, deserializationOptions: { - parseXML: coreXml.parseXML, - serializerOptions: { - xml: { - // Use customized XML char key of "#" so we can deserialize metadata - // with "_" key - xmlCharKey: "#" + }, + deserializationOptions: { + parseXML: core_xml_1.parseXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#" + } } } - } })); + }); corePipeline.removePolicy({ phase: "Retry" }); - corePipeline.removePolicy({ name: coreRestPipeline.decompressResponsePolicyName }); - corePipeline.addPolicy(storageCorrectContentLengthPolicy()); - corePipeline.addPolicy(storageRetryPolicy(restOptions.retryOptions), { phase: "Retry" }); - corePipeline.addPolicy(storageBrowserPolicy()); + corePipeline.removePolicy({ name: core_rest_pipeline_1.decompressResponsePolicyName }); + corePipeline.addPolicy((0, StorageCorrectContentLengthPolicy_js_1.storageCorrectContentLengthPolicy)()); + corePipeline.addPolicy((0, StorageRetryPolicyV2_js_1.storageRetryPolicy)(restOptions.retryOptions), { phase: "Retry" }); + corePipeline.addPolicy((0, StorageBrowserPolicyV2_js_1.storageBrowserPolicy)()); const downlevelResults = processDownlevelPipeline(pipeline); if (downlevelResults) { corePipeline.addPolicy(downlevelResults.wrappedPolicies, downlevelResults.afterRetry ? { afterPhase: "Retry" } : void 0); } const credential = getCredentialFromPipeline(pipeline); - if (coreAuth.isTokenCredential(credential)) { - corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ + if ((0, core_auth_1.isTokenCredential)(credential)) { + corePipeline.addPolicy((0, core_rest_pipeline_1.bearerTokenAuthenticationPolicy)({ credential, - scopes: (_a = restOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes, - challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge } + scopes: restOptions.audience ?? constants_js_1.StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: core_client_1.authorizeRequestOnTenantChallenge } }), { phase: "Sign" }); - } else if (credential instanceof StorageSharedKeyCredential) { - corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ + } else if (credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential) { + corePipeline.addPolicy((0, StorageSharedKeyCredentialPolicyV2_js_1.storageSharedKeyCredentialPolicy)({ accountName: credential.accountName, accountKey: credential.accountKey }), { phase: "Sign" }); } pipeline._corePipeline = corePipeline; } - return Object.assign(Object.assign({}, restOptions), { allowInsecureConnection: true, httpClient, pipeline: corePipeline }); + return { + ...restOptions, + allowInsecureConnection: true, + httpClient, + pipeline: corePipeline + }; } function getCredentialFromPipeline(pipeline) { if (pipeline._credential) { return pipeline._credential; } - let credential = new AnonymousCredential(); + let credential = new AnonymousCredential_js_1.AnonymousCredential(); for (const factory of pipeline.factories) { - if (coreAuth.isTokenCredential(factory.credential)) { + if ((0, core_auth_1.isTokenCredential)(factory.credential)) { credential = factory.credential; } else if (isStorageSharedKeyCredential(factory)) { return factory; @@ -36409,28 +37854,28 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return credential; } function isStorageSharedKeyCredential(factory) { - if (factory instanceof StorageSharedKeyCredential) { + if (factory instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential) { return true; } return factory.constructor.name === "StorageSharedKeyCredential"; } function isAnonymousCredential(factory) { - if (factory instanceof AnonymousCredential) { + if (factory instanceof AnonymousCredential_js_1.AnonymousCredential) { return true; } return factory.constructor.name === "AnonymousCredential"; } function isCoreHttpBearerTokenFactory(factory) { - return coreAuth.isTokenCredential(factory.credential); + return (0, core_auth_1.isTokenCredential)(factory.credential); } function isStorageBrowserPolicyFactory(factory) { - if (factory instanceof StorageBrowserPolicyFactory) { + if (factory instanceof StorageBrowserPolicyFactory_js_1.StorageBrowserPolicyFactory) { return true; } return factory.constructor.name === "StorageBrowserPolicyFactory"; } function isStorageRetryPolicyFactory(factory) { - if (factory instanceof StorageRetryPolicyFactory) { + if (factory instanceof StorageRetryPolicyFactory_js_1.StorageRetryPolicyFactory) { return true; } return factory.constructor.name === "StorageRetryPolicyFactory"; @@ -36473,7 +37918,160 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return policyName.startsWith(knownPolicyName); }); } - var BlobServiceProperties = { + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/index.js +var require_models = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.KnownStorageErrorCode = exports2.KnownBlobExpiryOptions = exports2.KnownFileShareTokenIntent = exports2.KnownEncryptionAlgorithmType = void 0; + var KnownEncryptionAlgorithmType; + (function(KnownEncryptionAlgorithmType2) { + KnownEncryptionAlgorithmType2["AES256"] = "AES256"; + })(KnownEncryptionAlgorithmType || (exports2.KnownEncryptionAlgorithmType = KnownEncryptionAlgorithmType = {})); + var KnownFileShareTokenIntent; + (function(KnownFileShareTokenIntent2) { + KnownFileShareTokenIntent2["Backup"] = "backup"; + })(KnownFileShareTokenIntent || (exports2.KnownFileShareTokenIntent = KnownFileShareTokenIntent = {})); + var KnownBlobExpiryOptions; + (function(KnownBlobExpiryOptions2) { + KnownBlobExpiryOptions2["NeverExpire"] = "NeverExpire"; + KnownBlobExpiryOptions2["RelativeToCreation"] = "RelativeToCreation"; + KnownBlobExpiryOptions2["RelativeToNow"] = "RelativeToNow"; + KnownBlobExpiryOptions2["Absolute"] = "Absolute"; + })(KnownBlobExpiryOptions || (exports2.KnownBlobExpiryOptions = KnownBlobExpiryOptions = {})); + var KnownStorageErrorCode; + (function(KnownStorageErrorCode2) { + KnownStorageErrorCode2["AccountAlreadyExists"] = "AccountAlreadyExists"; + KnownStorageErrorCode2["AccountBeingCreated"] = "AccountBeingCreated"; + KnownStorageErrorCode2["AccountIsDisabled"] = "AccountIsDisabled"; + KnownStorageErrorCode2["AuthenticationFailed"] = "AuthenticationFailed"; + KnownStorageErrorCode2["AuthorizationFailure"] = "AuthorizationFailure"; + KnownStorageErrorCode2["ConditionHeadersNotSupported"] = "ConditionHeadersNotSupported"; + KnownStorageErrorCode2["ConditionNotMet"] = "ConditionNotMet"; + KnownStorageErrorCode2["EmptyMetadataKey"] = "EmptyMetadataKey"; + KnownStorageErrorCode2["InsufficientAccountPermissions"] = "InsufficientAccountPermissions"; + KnownStorageErrorCode2["InternalError"] = "InternalError"; + KnownStorageErrorCode2["InvalidAuthenticationInfo"] = "InvalidAuthenticationInfo"; + KnownStorageErrorCode2["InvalidHeaderValue"] = "InvalidHeaderValue"; + KnownStorageErrorCode2["InvalidHttpVerb"] = "InvalidHttpVerb"; + KnownStorageErrorCode2["InvalidInput"] = "InvalidInput"; + KnownStorageErrorCode2["InvalidMd5"] = "InvalidMd5"; + KnownStorageErrorCode2["InvalidMetadata"] = "InvalidMetadata"; + KnownStorageErrorCode2["InvalidQueryParameterValue"] = "InvalidQueryParameterValue"; + KnownStorageErrorCode2["InvalidRange"] = "InvalidRange"; + KnownStorageErrorCode2["InvalidResourceName"] = "InvalidResourceName"; + KnownStorageErrorCode2["InvalidUri"] = "InvalidUri"; + KnownStorageErrorCode2["InvalidXmlDocument"] = "InvalidXmlDocument"; + KnownStorageErrorCode2["InvalidXmlNodeValue"] = "InvalidXmlNodeValue"; + KnownStorageErrorCode2["Md5Mismatch"] = "Md5Mismatch"; + KnownStorageErrorCode2["MetadataTooLarge"] = "MetadataTooLarge"; + KnownStorageErrorCode2["MissingContentLengthHeader"] = "MissingContentLengthHeader"; + KnownStorageErrorCode2["MissingRequiredQueryParameter"] = "MissingRequiredQueryParameter"; + KnownStorageErrorCode2["MissingRequiredHeader"] = "MissingRequiredHeader"; + KnownStorageErrorCode2["MissingRequiredXmlNode"] = "MissingRequiredXmlNode"; + KnownStorageErrorCode2["MultipleConditionHeadersNotSupported"] = "MultipleConditionHeadersNotSupported"; + KnownStorageErrorCode2["OperationTimedOut"] = "OperationTimedOut"; + KnownStorageErrorCode2["OutOfRangeInput"] = "OutOfRangeInput"; + KnownStorageErrorCode2["OutOfRangeQueryParameterValue"] = "OutOfRangeQueryParameterValue"; + KnownStorageErrorCode2["RequestBodyTooLarge"] = "RequestBodyTooLarge"; + KnownStorageErrorCode2["ResourceTypeMismatch"] = "ResourceTypeMismatch"; + KnownStorageErrorCode2["RequestUrlFailedToParse"] = "RequestUrlFailedToParse"; + KnownStorageErrorCode2["ResourceAlreadyExists"] = "ResourceAlreadyExists"; + KnownStorageErrorCode2["ResourceNotFound"] = "ResourceNotFound"; + KnownStorageErrorCode2["ServerBusy"] = "ServerBusy"; + KnownStorageErrorCode2["UnsupportedHeader"] = "UnsupportedHeader"; + KnownStorageErrorCode2["UnsupportedXmlNode"] = "UnsupportedXmlNode"; + KnownStorageErrorCode2["UnsupportedQueryParameter"] = "UnsupportedQueryParameter"; + KnownStorageErrorCode2["UnsupportedHttpVerb"] = "UnsupportedHttpVerb"; + KnownStorageErrorCode2["AppendPositionConditionNotMet"] = "AppendPositionConditionNotMet"; + KnownStorageErrorCode2["BlobAlreadyExists"] = "BlobAlreadyExists"; + KnownStorageErrorCode2["BlobImmutableDueToPolicy"] = "BlobImmutableDueToPolicy"; + KnownStorageErrorCode2["BlobNotFound"] = "BlobNotFound"; + KnownStorageErrorCode2["BlobOverwritten"] = "BlobOverwritten"; + KnownStorageErrorCode2["BlobTierInadequateForContentLength"] = "BlobTierInadequateForContentLength"; + KnownStorageErrorCode2["BlobUsesCustomerSpecifiedEncryption"] = "BlobUsesCustomerSpecifiedEncryption"; + KnownStorageErrorCode2["BlockCountExceedsLimit"] = "BlockCountExceedsLimit"; + KnownStorageErrorCode2["BlockListTooLong"] = "BlockListTooLong"; + KnownStorageErrorCode2["CannotChangeToLowerTier"] = "CannotChangeToLowerTier"; + KnownStorageErrorCode2["CannotVerifyCopySource"] = "CannotVerifyCopySource"; + KnownStorageErrorCode2["ContainerAlreadyExists"] = "ContainerAlreadyExists"; + KnownStorageErrorCode2["ContainerBeingDeleted"] = "ContainerBeingDeleted"; + KnownStorageErrorCode2["ContainerDisabled"] = "ContainerDisabled"; + KnownStorageErrorCode2["ContainerNotFound"] = "ContainerNotFound"; + KnownStorageErrorCode2["ContentLengthLargerThanTierLimit"] = "ContentLengthLargerThanTierLimit"; + KnownStorageErrorCode2["CopyAcrossAccountsNotSupported"] = "CopyAcrossAccountsNotSupported"; + KnownStorageErrorCode2["CopyIdMismatch"] = "CopyIdMismatch"; + KnownStorageErrorCode2["FeatureVersionMismatch"] = "FeatureVersionMismatch"; + KnownStorageErrorCode2["IncrementalCopyBlobMismatch"] = "IncrementalCopyBlobMismatch"; + KnownStorageErrorCode2["IncrementalCopyOfEarlierVersionSnapshotNotAllowed"] = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed"; + KnownStorageErrorCode2["IncrementalCopySourceMustBeSnapshot"] = "IncrementalCopySourceMustBeSnapshot"; + KnownStorageErrorCode2["InfiniteLeaseDurationRequired"] = "InfiniteLeaseDurationRequired"; + KnownStorageErrorCode2["InvalidBlobOrBlock"] = "InvalidBlobOrBlock"; + KnownStorageErrorCode2["InvalidBlobTier"] = "InvalidBlobTier"; + KnownStorageErrorCode2["InvalidBlobType"] = "InvalidBlobType"; + KnownStorageErrorCode2["InvalidBlockId"] = "InvalidBlockId"; + KnownStorageErrorCode2["InvalidBlockList"] = "InvalidBlockList"; + KnownStorageErrorCode2["InvalidOperation"] = "InvalidOperation"; + KnownStorageErrorCode2["InvalidPageRange"] = "InvalidPageRange"; + KnownStorageErrorCode2["InvalidSourceBlobType"] = "InvalidSourceBlobType"; + KnownStorageErrorCode2["InvalidSourceBlobUrl"] = "InvalidSourceBlobUrl"; + KnownStorageErrorCode2["InvalidVersionForPageBlobOperation"] = "InvalidVersionForPageBlobOperation"; + KnownStorageErrorCode2["LeaseAlreadyPresent"] = "LeaseAlreadyPresent"; + KnownStorageErrorCode2["LeaseAlreadyBroken"] = "LeaseAlreadyBroken"; + KnownStorageErrorCode2["LeaseIdMismatchWithBlobOperation"] = "LeaseIdMismatchWithBlobOperation"; + KnownStorageErrorCode2["LeaseIdMismatchWithContainerOperation"] = "LeaseIdMismatchWithContainerOperation"; + KnownStorageErrorCode2["LeaseIdMismatchWithLeaseOperation"] = "LeaseIdMismatchWithLeaseOperation"; + KnownStorageErrorCode2["LeaseIdMissing"] = "LeaseIdMissing"; + KnownStorageErrorCode2["LeaseIsBreakingAndCannotBeAcquired"] = "LeaseIsBreakingAndCannotBeAcquired"; + KnownStorageErrorCode2["LeaseIsBreakingAndCannotBeChanged"] = "LeaseIsBreakingAndCannotBeChanged"; + KnownStorageErrorCode2["LeaseIsBrokenAndCannotBeRenewed"] = "LeaseIsBrokenAndCannotBeRenewed"; + KnownStorageErrorCode2["LeaseLost"] = "LeaseLost"; + KnownStorageErrorCode2["LeaseNotPresentWithBlobOperation"] = "LeaseNotPresentWithBlobOperation"; + KnownStorageErrorCode2["LeaseNotPresentWithContainerOperation"] = "LeaseNotPresentWithContainerOperation"; + KnownStorageErrorCode2["LeaseNotPresentWithLeaseOperation"] = "LeaseNotPresentWithLeaseOperation"; + KnownStorageErrorCode2["MaxBlobSizeConditionNotMet"] = "MaxBlobSizeConditionNotMet"; + KnownStorageErrorCode2["NoAuthenticationInformation"] = "NoAuthenticationInformation"; + KnownStorageErrorCode2["NoPendingCopyOperation"] = "NoPendingCopyOperation"; + KnownStorageErrorCode2["OperationNotAllowedOnIncrementalCopyBlob"] = "OperationNotAllowedOnIncrementalCopyBlob"; + KnownStorageErrorCode2["PendingCopyOperation"] = "PendingCopyOperation"; + KnownStorageErrorCode2["PreviousSnapshotCannotBeNewer"] = "PreviousSnapshotCannotBeNewer"; + KnownStorageErrorCode2["PreviousSnapshotNotFound"] = "PreviousSnapshotNotFound"; + KnownStorageErrorCode2["PreviousSnapshotOperationNotSupported"] = "PreviousSnapshotOperationNotSupported"; + KnownStorageErrorCode2["SequenceNumberConditionNotMet"] = "SequenceNumberConditionNotMet"; + KnownStorageErrorCode2["SequenceNumberIncrementTooLarge"] = "SequenceNumberIncrementTooLarge"; + KnownStorageErrorCode2["SnapshotCountExceeded"] = "SnapshotCountExceeded"; + KnownStorageErrorCode2["SnapshotOperationRateExceeded"] = "SnapshotOperationRateExceeded"; + KnownStorageErrorCode2["SnapshotsPresent"] = "SnapshotsPresent"; + KnownStorageErrorCode2["SourceConditionNotMet"] = "SourceConditionNotMet"; + KnownStorageErrorCode2["SystemInUse"] = "SystemInUse"; + KnownStorageErrorCode2["TargetConditionNotMet"] = "TargetConditionNotMet"; + KnownStorageErrorCode2["UnauthorizedBlobOverwrite"] = "UnauthorizedBlobOverwrite"; + KnownStorageErrorCode2["BlobBeingRehydrated"] = "BlobBeingRehydrated"; + KnownStorageErrorCode2["BlobArchived"] = "BlobArchived"; + KnownStorageErrorCode2["BlobNotArchived"] = "BlobNotArchived"; + KnownStorageErrorCode2["AuthorizationSourceIPMismatch"] = "AuthorizationSourceIPMismatch"; + KnownStorageErrorCode2["AuthorizationProtocolMismatch"] = "AuthorizationProtocolMismatch"; + KnownStorageErrorCode2["AuthorizationPermissionMismatch"] = "AuthorizationPermissionMismatch"; + KnownStorageErrorCode2["AuthorizationServiceMismatch"] = "AuthorizationServiceMismatch"; + KnownStorageErrorCode2["AuthorizationResourceTypeMismatch"] = "AuthorizationResourceTypeMismatch"; + KnownStorageErrorCode2["BlobAccessTierNotSupportedForAccountType"] = "BlobAccessTierNotSupportedForAccountType"; + })(KnownStorageErrorCode || (exports2.KnownStorageErrorCode = KnownStorageErrorCode = {})); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/mappers.js +var require_mappers = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/mappers.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ServiceGetUserDelegationKeyHeaders = exports2.ServiceListContainersSegmentExceptionHeaders = exports2.ServiceListContainersSegmentHeaders = exports2.ServiceGetStatisticsExceptionHeaders = exports2.ServiceGetStatisticsHeaders = exports2.ServiceGetPropertiesExceptionHeaders = exports2.ServiceGetPropertiesHeaders = exports2.ServiceSetPropertiesExceptionHeaders = exports2.ServiceSetPropertiesHeaders = exports2.ArrowField = exports2.ArrowConfiguration = exports2.JsonTextConfiguration = exports2.DelimitedTextConfiguration = exports2.QueryFormat = exports2.QuerySerialization = exports2.QueryRequest = exports2.ClearRange = exports2.PageRange = exports2.PageList = exports2.Block = exports2.BlockList = exports2.BlockLookupList = exports2.BlobPrefix = exports2.BlobHierarchyListSegment = exports2.ListBlobsHierarchySegmentResponse = exports2.BlobPropertiesInternal = exports2.BlobName = exports2.BlobItemInternal = exports2.BlobFlatListSegment = exports2.ListBlobsFlatSegmentResponse = exports2.AccessPolicy = exports2.SignedIdentifier = exports2.BlobTag = exports2.BlobTags = exports2.FilterBlobItem = exports2.FilterBlobSegment = exports2.UserDelegationKey = exports2.KeyInfo = exports2.ContainerProperties = exports2.ContainerItem = exports2.ListContainersSegmentResponse = exports2.GeoReplication = exports2.BlobServiceStatistics = exports2.StorageError = exports2.StaticWebsite = exports2.CorsRule = exports2.Metrics = exports2.RetentionPolicy = exports2.Logging = exports2.BlobServiceProperties = void 0; + exports2.BlobUndeleteHeaders = exports2.BlobDeleteExceptionHeaders = exports2.BlobDeleteHeaders = exports2.BlobGetPropertiesExceptionHeaders = exports2.BlobGetPropertiesHeaders = exports2.BlobDownloadExceptionHeaders = exports2.BlobDownloadHeaders = exports2.ContainerGetAccountInfoExceptionHeaders = exports2.ContainerGetAccountInfoHeaders = exports2.ContainerListBlobHierarchySegmentExceptionHeaders = exports2.ContainerListBlobHierarchySegmentHeaders = exports2.ContainerListBlobFlatSegmentExceptionHeaders = exports2.ContainerListBlobFlatSegmentHeaders = exports2.ContainerChangeLeaseExceptionHeaders = exports2.ContainerChangeLeaseHeaders = exports2.ContainerBreakLeaseExceptionHeaders = exports2.ContainerBreakLeaseHeaders = exports2.ContainerRenewLeaseExceptionHeaders = exports2.ContainerRenewLeaseHeaders = exports2.ContainerReleaseLeaseExceptionHeaders = exports2.ContainerReleaseLeaseHeaders = exports2.ContainerAcquireLeaseExceptionHeaders = exports2.ContainerAcquireLeaseHeaders = exports2.ContainerFilterBlobsExceptionHeaders = exports2.ContainerFilterBlobsHeaders = exports2.ContainerSubmitBatchExceptionHeaders = exports2.ContainerSubmitBatchHeaders = exports2.ContainerRenameExceptionHeaders = exports2.ContainerRenameHeaders = exports2.ContainerRestoreExceptionHeaders = exports2.ContainerRestoreHeaders = exports2.ContainerSetAccessPolicyExceptionHeaders = exports2.ContainerSetAccessPolicyHeaders = exports2.ContainerGetAccessPolicyExceptionHeaders = exports2.ContainerGetAccessPolicyHeaders = exports2.ContainerSetMetadataExceptionHeaders = exports2.ContainerSetMetadataHeaders = exports2.ContainerDeleteExceptionHeaders = exports2.ContainerDeleteHeaders = exports2.ContainerGetPropertiesExceptionHeaders = exports2.ContainerGetPropertiesHeaders = exports2.ContainerCreateExceptionHeaders = exports2.ContainerCreateHeaders = exports2.ServiceFilterBlobsExceptionHeaders = exports2.ServiceFilterBlobsHeaders = exports2.ServiceSubmitBatchExceptionHeaders = exports2.ServiceSubmitBatchHeaders = exports2.ServiceGetAccountInfoExceptionHeaders = exports2.ServiceGetAccountInfoHeaders = exports2.ServiceGetUserDelegationKeyExceptionHeaders = void 0; + exports2.PageBlobGetPageRangesHeaders = exports2.PageBlobUploadPagesFromURLExceptionHeaders = exports2.PageBlobUploadPagesFromURLHeaders = exports2.PageBlobClearPagesExceptionHeaders = exports2.PageBlobClearPagesHeaders = exports2.PageBlobUploadPagesExceptionHeaders = exports2.PageBlobUploadPagesHeaders = exports2.PageBlobCreateExceptionHeaders = exports2.PageBlobCreateHeaders = exports2.BlobSetTagsExceptionHeaders = exports2.BlobSetTagsHeaders = exports2.BlobGetTagsExceptionHeaders = exports2.BlobGetTagsHeaders = exports2.BlobQueryExceptionHeaders = exports2.BlobQueryHeaders = exports2.BlobGetAccountInfoExceptionHeaders = exports2.BlobGetAccountInfoHeaders = exports2.BlobSetTierExceptionHeaders = exports2.BlobSetTierHeaders = exports2.BlobAbortCopyFromURLExceptionHeaders = exports2.BlobAbortCopyFromURLHeaders = exports2.BlobCopyFromURLExceptionHeaders = exports2.BlobCopyFromURLHeaders = exports2.BlobStartCopyFromURLExceptionHeaders = exports2.BlobStartCopyFromURLHeaders = exports2.BlobCreateSnapshotExceptionHeaders = exports2.BlobCreateSnapshotHeaders = exports2.BlobBreakLeaseExceptionHeaders = exports2.BlobBreakLeaseHeaders = exports2.BlobChangeLeaseExceptionHeaders = exports2.BlobChangeLeaseHeaders = exports2.BlobRenewLeaseExceptionHeaders = exports2.BlobRenewLeaseHeaders = exports2.BlobReleaseLeaseExceptionHeaders = exports2.BlobReleaseLeaseHeaders = exports2.BlobAcquireLeaseExceptionHeaders = exports2.BlobAcquireLeaseHeaders = exports2.BlobSetMetadataExceptionHeaders = exports2.BlobSetMetadataHeaders = exports2.BlobSetLegalHoldExceptionHeaders = exports2.BlobSetLegalHoldHeaders = exports2.BlobDeleteImmutabilityPolicyExceptionHeaders = exports2.BlobDeleteImmutabilityPolicyHeaders = exports2.BlobSetImmutabilityPolicyExceptionHeaders = exports2.BlobSetImmutabilityPolicyHeaders = exports2.BlobSetHttpHeadersExceptionHeaders = exports2.BlobSetHttpHeadersHeaders = exports2.BlobSetExpiryExceptionHeaders = exports2.BlobSetExpiryHeaders = exports2.BlobUndeleteExceptionHeaders = void 0; + exports2.BlockBlobGetBlockListExceptionHeaders = exports2.BlockBlobGetBlockListHeaders = exports2.BlockBlobCommitBlockListExceptionHeaders = exports2.BlockBlobCommitBlockListHeaders = exports2.BlockBlobStageBlockFromURLExceptionHeaders = exports2.BlockBlobStageBlockFromURLHeaders = exports2.BlockBlobStageBlockExceptionHeaders = exports2.BlockBlobStageBlockHeaders = exports2.BlockBlobPutBlobFromUrlExceptionHeaders = exports2.BlockBlobPutBlobFromUrlHeaders = exports2.BlockBlobUploadExceptionHeaders = exports2.BlockBlobUploadHeaders = exports2.AppendBlobSealExceptionHeaders = exports2.AppendBlobSealHeaders = exports2.AppendBlobAppendBlockFromUrlExceptionHeaders = exports2.AppendBlobAppendBlockFromUrlHeaders = exports2.AppendBlobAppendBlockExceptionHeaders = exports2.AppendBlobAppendBlockHeaders = exports2.AppendBlobCreateExceptionHeaders = exports2.AppendBlobCreateHeaders = exports2.PageBlobCopyIncrementalExceptionHeaders = exports2.PageBlobCopyIncrementalHeaders = exports2.PageBlobUpdateSequenceNumberExceptionHeaders = exports2.PageBlobUpdateSequenceNumberHeaders = exports2.PageBlobResizeExceptionHeaders = exports2.PageBlobResizeHeaders = exports2.PageBlobGetPageRangesDiffExceptionHeaders = exports2.PageBlobGetPageRangesDiffHeaders = exports2.PageBlobGetPageRangesExceptionHeaders = void 0; + exports2.BlobServiceProperties = { serializedName: "BlobServiceProperties", xmlName: "StorageServiceProperties", type: { @@ -36545,7 +38143,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var Logging = { + exports2.Logging = { serializedName: "Logging", type: { name: "Composite", @@ -36594,7 +38192,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var RetentionPolicy = { + exports2.RetentionPolicy = { serializedName: "RetentionPolicy", type: { name: "Composite", @@ -36621,7 +38219,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var Metrics = { + exports2.Metrics = { serializedName: "Metrics", type: { name: "Composite", @@ -36660,7 +38258,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var CorsRule = { + exports2.CorsRule = { serializedName: "CorsRule", type: { name: "Composite", @@ -36712,7 +38310,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var StaticWebsite = { + exports2.StaticWebsite = { serializedName: "StaticWebsite", type: { name: "Composite", @@ -36750,7 +38348,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var StorageError = { + exports2.StorageError = { serializedName: "StorageError", type: { name: "Composite", @@ -36780,7 +38378,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobServiceStatistics = { + exports2.BlobServiceStatistics = { serializedName: "BlobServiceStatistics", xmlName: "StorageServiceStats", type: { @@ -36798,7 +38396,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var GeoReplication = { + exports2.GeoReplication = { serializedName: "GeoReplication", type: { name: "Composite", @@ -36824,7 +38422,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ListContainersSegmentResponse = { + exports2.ListContainersSegmentResponse = { serializedName: "ListContainersSegmentResponse", xmlName: "EnumerationResults", type: { @@ -36887,7 +38485,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerItem = { + exports2.ContainerItem = { serializedName: "ContainerItem", xmlName: "Container", type: { @@ -36935,7 +38533,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerProperties = { + exports2.ContainerProperties = { serializedName: "ContainerProperties", type: { name: "Composite", @@ -37047,7 +38645,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var KeyInfo = { + exports2.KeyInfo = { serializedName: "KeyInfo", type: { name: "Composite", @@ -37072,7 +38670,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var UserDelegationKey = { + exports2.UserDelegationKey = { serializedName: "UserDelegationKey", type: { name: "Composite", @@ -37137,7 +38735,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var FilterBlobSegment = { + exports2.FilterBlobSegment = { serializedName: "FilterBlobSegment", xmlName: "EnumerationResults", type: { @@ -37187,7 +38785,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var FilterBlobItem = { + exports2.FilterBlobItem = { serializedName: "FilterBlobItem", xmlName: "Blob", type: { @@ -37221,7 +38819,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobTags = { + exports2.BlobTags = { serializedName: "BlobTags", xmlName: "Tags", type: { @@ -37247,7 +38845,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobTag = { + exports2.BlobTag = { serializedName: "BlobTag", xmlName: "Tag", type: { @@ -37273,7 +38871,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var SignedIdentifier = { + exports2.SignedIdentifier = { serializedName: "SignedIdentifier", xmlName: "SignedIdentifier", type: { @@ -37299,7 +38897,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var AccessPolicy = { + exports2.AccessPolicy = { serializedName: "AccessPolicy", type: { name: "Composite", @@ -37329,7 +38927,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ListBlobsFlatSegmentResponse = { + exports2.ListBlobsFlatSegmentResponse = { serializedName: "ListBlobsFlatSegmentResponse", xmlName: "EnumerationResults", type: { @@ -37393,7 +38991,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobFlatListSegment = { + exports2.BlobFlatListSegment = { serializedName: "BlobFlatListSegment", xmlName: "Blobs", type: { @@ -37418,7 +39016,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobItemInternal = { + exports2.BlobItemInternal = { serializedName: "BlobItemInternal", xmlName: "Blob", type: { @@ -37505,7 +39103,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobName = { + exports2.BlobName = { serializedName: "BlobName", type: { name: "Composite", @@ -37530,7 +39128,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobPropertiesInternal = { + exports2.BlobPropertiesInternal = { serializedName: "BlobPropertiesInternal", xmlName: "Properties", type: { @@ -37857,7 +39455,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ListBlobsHierarchySegmentResponse = { + exports2.ListBlobsHierarchySegmentResponse = { serializedName: "ListBlobsHierarchySegmentResponse", xmlName: "EnumerationResults", type: { @@ -37928,7 +39526,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobHierarchyListSegment = { + exports2.BlobHierarchyListSegment = { serializedName: "BlobHierarchyListSegment", xmlName: "Blobs", type: { @@ -37967,7 +39565,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobPrefix = { + exports2.BlobPrefix = { serializedName: "BlobPrefix", type: { name: "Composite", @@ -37984,7 +39582,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlockLookupList = { + exports2.BlockLookupList = { serializedName: "BlockLookupList", xmlName: "BlockList", type: { @@ -38033,7 +39631,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlockList = { + exports2.BlockList = { serializedName: "BlockList", type: { name: "Composite", @@ -38072,7 +39670,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var Block = { + exports2.Block = { serializedName: "Block", type: { name: "Composite", @@ -38097,7 +39695,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageList = { + exports2.PageList = { serializedName: "PageList", type: { name: "Composite", @@ -38141,7 +39739,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageRange = { + exports2.PageRange = { serializedName: "PageRange", xmlName: "PageRange", type: { @@ -38167,7 +39765,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ClearRange = { + exports2.ClearRange = { serializedName: "ClearRange", xmlName: "ClearRange", type: { @@ -38193,7 +39791,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var QueryRequest = { + exports2.QueryRequest = { serializedName: "QueryRequest", xmlName: "QueryRequest", type: { @@ -38235,7 +39833,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var QuerySerialization = { + exports2.QuerySerialization = { serializedName: "QuerySerialization", type: { name: "Composite", @@ -38252,7 +39850,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var QueryFormat = { + exports2.QueryFormat = { serializedName: "QueryFormat", type: { name: "Composite", @@ -38302,7 +39900,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var DelimitedTextConfiguration = { + exports2.DelimitedTextConfiguration = { serializedName: "DelimitedTextConfiguration", xmlName: "DelimitedTextConfiguration", type: { @@ -38347,7 +39945,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var JsonTextConfiguration = { + exports2.JsonTextConfiguration = { serializedName: "JsonTextConfiguration", xmlName: "JsonTextConfiguration", type: { @@ -38364,7 +39962,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ArrowConfiguration = { + exports2.ArrowConfiguration = { serializedName: "ArrowConfiguration", xmlName: "ArrowConfiguration", type: { @@ -38390,7 +39988,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ArrowField = { + exports2.ArrowField = { serializedName: "ArrowField", xmlName: "Field", type: { @@ -38429,7 +40027,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceSetPropertiesHeaders = { + exports2.ServiceSetPropertiesHeaders = { serializedName: "Service_setPropertiesHeaders", type: { name: "Composite", @@ -38466,7 +40064,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceSetPropertiesExceptionHeaders = { + exports2.ServiceSetPropertiesExceptionHeaders = { serializedName: "Service_setPropertiesExceptionHeaders", type: { name: "Composite", @@ -38482,7 +40080,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceGetPropertiesHeaders = { + exports2.ServiceGetPropertiesHeaders = { serializedName: "Service_getPropertiesHeaders", type: { name: "Composite", @@ -38519,7 +40117,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceGetPropertiesExceptionHeaders = { + exports2.ServiceGetPropertiesExceptionHeaders = { serializedName: "Service_getPropertiesExceptionHeaders", type: { name: "Composite", @@ -38535,7 +40133,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceGetStatisticsHeaders = { + exports2.ServiceGetStatisticsHeaders = { serializedName: "Service_getStatisticsHeaders", type: { name: "Composite", @@ -38579,7 +40177,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceGetStatisticsExceptionHeaders = { + exports2.ServiceGetStatisticsExceptionHeaders = { serializedName: "Service_getStatisticsExceptionHeaders", type: { name: "Composite", @@ -38595,7 +40193,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceListContainersSegmentHeaders = { + exports2.ServiceListContainersSegmentHeaders = { serializedName: "Service_listContainersSegmentHeaders", type: { name: "Composite", @@ -38632,7 +40230,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceListContainersSegmentExceptionHeaders = { + exports2.ServiceListContainersSegmentExceptionHeaders = { serializedName: "Service_listContainersSegmentExceptionHeaders", type: { name: "Composite", @@ -38648,7 +40246,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceGetUserDelegationKeyHeaders = { + exports2.ServiceGetUserDelegationKeyHeaders = { serializedName: "Service_getUserDelegationKeyHeaders", type: { name: "Composite", @@ -38692,7 +40290,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceGetUserDelegationKeyExceptionHeaders = { + exports2.ServiceGetUserDelegationKeyExceptionHeaders = { serializedName: "Service_getUserDelegationKeyExceptionHeaders", type: { name: "Composite", @@ -38708,7 +40306,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceGetAccountInfoHeaders = { + exports2.ServiceGetAccountInfoHeaders = { serializedName: "Service_getAccountInfoHeaders", type: { name: "Composite", @@ -38787,7 +40385,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceGetAccountInfoExceptionHeaders = { + exports2.ServiceGetAccountInfoExceptionHeaders = { serializedName: "Service_getAccountInfoExceptionHeaders", type: { name: "Composite", @@ -38803,7 +40401,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceSubmitBatchHeaders = { + exports2.ServiceSubmitBatchHeaders = { serializedName: "Service_submitBatchHeaders", type: { name: "Composite", @@ -38847,7 +40445,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceSubmitBatchExceptionHeaders = { + exports2.ServiceSubmitBatchExceptionHeaders = { serializedName: "Service_submitBatchExceptionHeaders", type: { name: "Composite", @@ -38863,7 +40461,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceFilterBlobsHeaders = { + exports2.ServiceFilterBlobsHeaders = { serializedName: "Service_filterBlobsHeaders", type: { name: "Composite", @@ -38907,7 +40505,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ServiceFilterBlobsExceptionHeaders = { + exports2.ServiceFilterBlobsExceptionHeaders = { serializedName: "Service_filterBlobsExceptionHeaders", type: { name: "Composite", @@ -38923,7 +40521,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerCreateHeaders = { + exports2.ContainerCreateHeaders = { serializedName: "Container_createHeaders", type: { name: "Composite", @@ -38981,7 +40579,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerCreateExceptionHeaders = { + exports2.ContainerCreateExceptionHeaders = { serializedName: "Container_createExceptionHeaders", type: { name: "Composite", @@ -38997,7 +40595,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerGetPropertiesHeaders = { + exports2.ContainerGetPropertiesHeaders = { serializedName: "Container_getPropertiesHeaders", type: { name: "Composite", @@ -39137,7 +40735,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerGetPropertiesExceptionHeaders = { + exports2.ContainerGetPropertiesExceptionHeaders = { serializedName: "Container_getPropertiesExceptionHeaders", type: { name: "Composite", @@ -39153,7 +40751,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerDeleteHeaders = { + exports2.ContainerDeleteHeaders = { serializedName: "Container_deleteHeaders", type: { name: "Composite", @@ -39197,7 +40795,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerDeleteExceptionHeaders = { + exports2.ContainerDeleteExceptionHeaders = { serializedName: "Container_deleteExceptionHeaders", type: { name: "Composite", @@ -39213,7 +40811,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerSetMetadataHeaders = { + exports2.ContainerSetMetadataHeaders = { serializedName: "Container_setMetadataHeaders", type: { name: "Composite", @@ -39271,7 +40869,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerSetMetadataExceptionHeaders = { + exports2.ContainerSetMetadataExceptionHeaders = { serializedName: "Container_setMetadataExceptionHeaders", type: { name: "Composite", @@ -39287,7 +40885,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerGetAccessPolicyHeaders = { + exports2.ContainerGetAccessPolicyHeaders = { serializedName: "Container_getAccessPolicyHeaders", type: { name: "Composite", @@ -39353,7 +40951,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerGetAccessPolicyExceptionHeaders = { + exports2.ContainerGetAccessPolicyExceptionHeaders = { serializedName: "Container_getAccessPolicyExceptionHeaders", type: { name: "Composite", @@ -39369,7 +40967,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerSetAccessPolicyHeaders = { + exports2.ContainerSetAccessPolicyHeaders = { serializedName: "Container_setAccessPolicyHeaders", type: { name: "Composite", @@ -39427,7 +41025,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerSetAccessPolicyExceptionHeaders = { + exports2.ContainerSetAccessPolicyExceptionHeaders = { serializedName: "Container_setAccessPolicyExceptionHeaders", type: { name: "Composite", @@ -39443,7 +41041,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerRestoreHeaders = { + exports2.ContainerRestoreHeaders = { serializedName: "Container_restoreHeaders", type: { name: "Composite", @@ -39487,7 +41085,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerRestoreExceptionHeaders = { + exports2.ContainerRestoreExceptionHeaders = { serializedName: "Container_restoreExceptionHeaders", type: { name: "Composite", @@ -39503,7 +41101,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerRenameHeaders = { + exports2.ContainerRenameHeaders = { serializedName: "Container_renameHeaders", type: { name: "Composite", @@ -39547,7 +41145,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerRenameExceptionHeaders = { + exports2.ContainerRenameExceptionHeaders = { serializedName: "Container_renameExceptionHeaders", type: { name: "Composite", @@ -39563,7 +41161,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerSubmitBatchHeaders = { + exports2.ContainerSubmitBatchHeaders = { serializedName: "Container_submitBatchHeaders", type: { name: "Composite", @@ -39593,7 +41191,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerSubmitBatchExceptionHeaders = { + exports2.ContainerSubmitBatchExceptionHeaders = { serializedName: "Container_submitBatchExceptionHeaders", type: { name: "Composite", @@ -39609,7 +41207,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerFilterBlobsHeaders = { + exports2.ContainerFilterBlobsHeaders = { serializedName: "Container_filterBlobsHeaders", type: { name: "Composite", @@ -39646,7 +41244,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerFilterBlobsExceptionHeaders = { + exports2.ContainerFilterBlobsExceptionHeaders = { serializedName: "Container_filterBlobsExceptionHeaders", type: { name: "Composite", @@ -39662,7 +41260,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerAcquireLeaseHeaders = { + exports2.ContainerAcquireLeaseHeaders = { serializedName: "Container_acquireLeaseHeaders", type: { name: "Composite", @@ -39720,7 +41318,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerAcquireLeaseExceptionHeaders = { + exports2.ContainerAcquireLeaseExceptionHeaders = { serializedName: "Container_acquireLeaseExceptionHeaders", type: { name: "Composite", @@ -39736,7 +41334,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerReleaseLeaseHeaders = { + exports2.ContainerReleaseLeaseHeaders = { serializedName: "Container_releaseLeaseHeaders", type: { name: "Composite", @@ -39787,7 +41385,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerReleaseLeaseExceptionHeaders = { + exports2.ContainerReleaseLeaseExceptionHeaders = { serializedName: "Container_releaseLeaseExceptionHeaders", type: { name: "Composite", @@ -39803,7 +41401,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerRenewLeaseHeaders = { + exports2.ContainerRenewLeaseHeaders = { serializedName: "Container_renewLeaseHeaders", type: { name: "Composite", @@ -39861,7 +41459,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerRenewLeaseExceptionHeaders = { + exports2.ContainerRenewLeaseExceptionHeaders = { serializedName: "Container_renewLeaseExceptionHeaders", type: { name: "Composite", @@ -39877,7 +41475,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerBreakLeaseHeaders = { + exports2.ContainerBreakLeaseHeaders = { serializedName: "Container_breakLeaseHeaders", type: { name: "Composite", @@ -39935,7 +41533,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerBreakLeaseExceptionHeaders = { + exports2.ContainerBreakLeaseExceptionHeaders = { serializedName: "Container_breakLeaseExceptionHeaders", type: { name: "Composite", @@ -39951,7 +41549,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerChangeLeaseHeaders = { + exports2.ContainerChangeLeaseHeaders = { serializedName: "Container_changeLeaseHeaders", type: { name: "Composite", @@ -40009,7 +41607,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerChangeLeaseExceptionHeaders = { + exports2.ContainerChangeLeaseExceptionHeaders = { serializedName: "Container_changeLeaseExceptionHeaders", type: { name: "Composite", @@ -40025,7 +41623,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerListBlobFlatSegmentHeaders = { + exports2.ContainerListBlobFlatSegmentHeaders = { serializedName: "Container_listBlobFlatSegmentHeaders", type: { name: "Composite", @@ -40076,7 +41674,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerListBlobFlatSegmentExceptionHeaders = { + exports2.ContainerListBlobFlatSegmentExceptionHeaders = { serializedName: "Container_listBlobFlatSegmentExceptionHeaders", type: { name: "Composite", @@ -40092,7 +41690,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerListBlobHierarchySegmentHeaders = { + exports2.ContainerListBlobHierarchySegmentHeaders = { serializedName: "Container_listBlobHierarchySegmentHeaders", type: { name: "Composite", @@ -40143,7 +41741,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerListBlobHierarchySegmentExceptionHeaders = { + exports2.ContainerListBlobHierarchySegmentExceptionHeaders = { serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", type: { name: "Composite", @@ -40159,7 +41757,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerGetAccountInfoHeaders = { + exports2.ContainerGetAccountInfoHeaders = { serializedName: "Container_getAccountInfoHeaders", type: { name: "Composite", @@ -40231,7 +41829,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var ContainerGetAccountInfoExceptionHeaders = { + exports2.ContainerGetAccountInfoExceptionHeaders = { serializedName: "Container_getAccountInfoExceptionHeaders", type: { name: "Composite", @@ -40247,7 +41845,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobDownloadHeaders = { + exports2.BlobDownloadHeaders = { serializedName: "Blob_downloadHeaders", type: { name: "Composite", @@ -40587,7 +42185,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobDownloadExceptionHeaders = { + exports2.BlobDownloadExceptionHeaders = { serializedName: "Blob_downloadExceptionHeaders", type: { name: "Composite", @@ -40603,7 +42201,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobGetPropertiesHeaders = { + exports2.BlobGetPropertiesHeaders = { serializedName: "Blob_getPropertiesHeaders", type: { name: "Composite", @@ -40756,7 +42354,438 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; type: { name: "String" } - }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + accessTier: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "String" + } + }, + accessTierInferred: { + serializedName: "x-ms-access-tier-inferred", + xmlName: "x-ms-access-tier-inferred", + type: { + name: "Boolean" + } + }, + archiveStatus: { + serializedName: "x-ms-archive-status", + xmlName: "x-ms-archive-status", + type: { + name: "String" + } + }, + accessTierChangedOn: { + serializedName: "x-ms-access-tier-change-time", + xmlName: "x-ms-access-tier-change-time", + type: { + name: "DateTimeRfc1123" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + expiresOn: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + rehydratePriority: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.BlobGetPropertiesExceptionHeaders = { + serializedName: "Blob_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.BlobDeleteHeaders = { + serializedName: "Blob_deleteHeaders", + type: { + name: "Composite", + className: "BlobDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.BlobDeleteExceptionHeaders = { + serializedName: "Blob_deleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.BlobUndeleteHeaders = { + serializedName: "Blob_undeleteHeaders", + type: { + name: "Composite", + className: "BlobUndeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.BlobUndeleteExceptionHeaders = { + serializedName: "Blob_undeleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobUndeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.BlobSetExpiryHeaders = { + serializedName: "Blob_setExpiryHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } + }; + exports2.BlobSetExpiryExceptionHeaders = { + serializedName: "Blob_setExpiryExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.BlobSetHttpHeadersHeaders = { + serializedName: "Blob_setHttpHeadersHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersHeaders", + modelProperties: { etag: { serializedName: "etag", xmlName: "etag", @@ -40764,48 +42793,80 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "ByteArray" + name: "DateTimeRfc1123" } }, - contentEncoding: { - serializedName: "content-encoding", - xmlName: "content-encoding", + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", type: { - name: "String" + name: "Number" } }, - contentDisposition: { - serializedName: "content-disposition", - xmlName: "content-disposition", + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { name: "String" } }, - contentLanguage: { - serializedName: "content-language", - xmlName: "content-language", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { name: "String" } }, - cacheControl: { - serializedName: "cache-control", - xmlName: "cache-control", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { name: "String" } }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + date: { + serializedName: "date", + xmlName: "date", type: { - name: "Number" + name: "DateTimeRfc1123" } }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.BlobSetHttpHeadersExceptionHeaders = { + serializedName: "Blob_setHttpHeadersExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.BlobSetImmutabilityPolicyHeaders = { + serializedName: "Blob_setImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyHeaders", + modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -40834,139 +42895,227 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, - acceptRanges: { - serializedName: "accept-ranges", - xmlName: "accept-ranges", + immutabilityPolicyExpiry: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", type: { - name: "String" + name: "DateTimeRfc1123" } }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", type: { - name: "Number" + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] } - }, - isServerEncrypted: { - serializedName: "x-ms-server-encrypted", - xmlName: "x-ms-server-encrypted", + } + } + } + }; + exports2.BlobSetImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Boolean" + name: "String" + } + } + } + } + }; + exports2.BlobDeleteImmutabilityPolicyHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" } }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { name: "String" } }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { name: "String" } }, - accessTier: { - serializedName: "x-ms-access-tier", - xmlName: "x-ms-access-tier", + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } + }; + exports2.BlobDeleteImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.BlobSetLegalHoldHeaders = { + serializedName: "Blob_setLegalHoldHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { name: "String" } }, - accessTierInferred: { - serializedName: "x-ms-access-tier-inferred", - xmlName: "x-ms-access-tier-inferred", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { - name: "Boolean" + name: "String" } }, - archiveStatus: { - serializedName: "x-ms-archive-status", - xmlName: "x-ms-archive-status", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { name: "String" } }, - accessTierChangedOn: { - serializedName: "x-ms-access-tier-change-time", - xmlName: "x-ms-access-tier-change-time", + date: { + serializedName: "date", + xmlName: "date", type: { name: "DateTimeRfc1123" } }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } + } + } + }; + exports2.BlobSetLegalHoldExceptionHeaders = { + serializedName: "Blob_setLegalHoldExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.BlobSetMetadataHeaders = { + serializedName: "Blob_setMetadataHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", type: { name: "String" } }, - isCurrentVersion: { - serializedName: "x-ms-is-current-version", - xmlName: "x-ms-is-current-version", + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "Boolean" + name: "DateTimeRfc1123" } }, - tagCount: { - serializedName: "x-ms-tag-count", - xmlName: "x-ms-tag-count", + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "Number" + name: "String" } }, - expiresOn: { - serializedName: "x-ms-expiry-time", - xmlName: "x-ms-expiry-time", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { - name: "DateTimeRfc1123" + name: "String" } }, - isSealed: { - serializedName: "x-ms-blob-sealed", - xmlName: "x-ms-blob-sealed", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { - name: "Boolean" + name: "String" } }, - rehydratePriority: { - serializedName: "x-ms-rehydrate-priority", - xmlName: "x-ms-rehydrate-priority", + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", type: { - name: "Enum", - allowedValues: ["High", "Standard"] + name: "String" } }, - lastAccessed: { - serializedName: "x-ms-last-access-time", - xmlName: "x-ms-last-access-time", + date: { + serializedName: "date", + xmlName: "date", type: { name: "DateTimeRfc1123" } }, - immutabilityPolicyExpiresOn: { - serializedName: "x-ms-immutability-policy-until-date", - xmlName: "x-ms-immutability-policy-until-date", + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", type: { - name: "DateTimeRfc1123" + name: "Boolean" } }, - immutabilityPolicyMode: { - serializedName: "x-ms-immutability-policy-mode", - xmlName: "x-ms-immutability-policy-mode", + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] + name: "String" } }, - legalHold: { - serializedName: "x-ms-legal-hold", - xmlName: "x-ms-legal-hold", + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", type: { - name: "Boolean" + name: "String" } }, errorCode: { @@ -40979,11 +43128,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobGetPropertiesExceptionHeaders = { - serializedName: "Blob_getPropertiesExceptionHeaders", + exports2.BlobSetMetadataExceptionHeaders = { + serializedName: "Blob_setMetadataExceptionHeaders", type: { name: "Composite", - className: "BlobGetPropertiesExceptionHeaders", + className: "BlobSetMetadataExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -40995,12 +43144,33 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobDeleteHeaders = { - serializedName: "Blob_deleteHeaders", + exports2.BlobAcquireLeaseHeaders = { + serializedName: "Blob_acquireLeaseHeaders", type: { name: "Composite", - className: "BlobDeleteHeaders", + className: "BlobAcquireLeaseHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -41028,22 +43198,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; type: { name: "DateTimeRfc1123" } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } } } } }; - var BlobDeleteExceptionHeaders = { - serializedName: "Blob_deleteExceptionHeaders", + exports2.BlobAcquireLeaseExceptionHeaders = { + serializedName: "Blob_acquireLeaseExceptionHeaders", type: { name: "Composite", - className: "BlobDeleteExceptionHeaders", + className: "BlobAcquireLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -41055,12 +43218,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobUndeleteHeaders = { - serializedName: "Blob_undeleteHeaders", + exports2.BlobReleaseLeaseHeaders = { + serializedName: "Blob_releaseLeaseHeaders", type: { name: "Composite", - className: "BlobUndeleteHeaders", + className: "BlobReleaseLeaseHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -41088,22 +43265,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; type: { name: "DateTimeRfc1123" } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } } } } }; - var BlobUndeleteExceptionHeaders = { - serializedName: "Blob_undeleteExceptionHeaders", + exports2.BlobReleaseLeaseExceptionHeaders = { + serializedName: "Blob_releaseLeaseExceptionHeaders", type: { name: "Composite", - className: "BlobUndeleteExceptionHeaders", + className: "BlobReleaseLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -41115,11 +43285,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetExpiryHeaders = { - serializedName: "Blob_setExpiryHeaders", + exports2.BlobRenewLeaseHeaders = { + serializedName: "Blob_renewLeaseHeaders", type: { name: "Composite", - className: "BlobSetExpiryHeaders", + className: "BlobRenewLeaseHeaders", modelProperties: { etag: { serializedName: "etag", @@ -41135,6 +43305,13 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -41166,11 +43343,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetExpiryExceptionHeaders = { - serializedName: "Blob_setExpiryExceptionHeaders", + exports2.BlobRenewLeaseExceptionHeaders = { + serializedName: "Blob_renewLeaseExceptionHeaders", type: { name: "Composite", - className: "BlobSetExpiryExceptionHeaders", + className: "BlobRenewLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -41182,11 +43359,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetHttpHeadersHeaders = { - serializedName: "Blob_setHttpHeadersHeaders", + exports2.BlobChangeLeaseHeaders = { + serializedName: "Blob_changeLeaseHeaders", type: { name: "Composite", - className: "BlobSetHttpHeadersHeaders", + className: "BlobChangeLeaseHeaders", modelProperties: { etag: { serializedName: "etag", @@ -41202,13 +43379,6 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -41223,6 +43393,13 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", @@ -41236,22 +43413,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; type: { name: "DateTimeRfc1123" } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } } } } }; - var BlobSetHttpHeadersExceptionHeaders = { - serializedName: "Blob_setHttpHeadersExceptionHeaders", + exports2.BlobChangeLeaseExceptionHeaders = { + serializedName: "Blob_changeLeaseExceptionHeaders", type: { name: "Composite", - className: "BlobSetHttpHeadersExceptionHeaders", + className: "BlobChangeLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -41263,12 +43433,33 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetImmutabilityPolicyHeaders = { - serializedName: "Blob_setImmutabilityPolicyHeaders", + exports2.BlobBreakLeaseHeaders = { + serializedName: "Blob_breakLeaseHeaders", type: { name: "Composite", - className: "BlobSetImmutabilityPolicyHeaders", + className: "BlobBreakLeaseHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -41296,30 +43487,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; type: { name: "DateTimeRfc1123" } - }, - immutabilityPolicyExpiry: { - serializedName: "x-ms-immutability-policy-until-date", - xmlName: "x-ms-immutability-policy-until-date", - type: { - name: "DateTimeRfc1123" - } - }, - immutabilityPolicyMode: { - serializedName: "x-ms-immutability-policy-mode", - xmlName: "x-ms-immutability-policy-mode", - type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } } } } }; - var BlobSetImmutabilityPolicyExceptionHeaders = { - serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", + exports2.BlobBreakLeaseExceptionHeaders = { + serializedName: "Blob_breakLeaseExceptionHeaders", type: { name: "Composite", - className: "BlobSetImmutabilityPolicyExceptionHeaders", + className: "BlobBreakLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -41331,12 +43507,33 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobDeleteImmutabilityPolicyHeaders = { - serializedName: "Blob_deleteImmutabilityPolicyHeaders", + exports2.BlobCreateSnapshotHeaders = { + serializedName: "Blob_createSnapshotHeaders", type: { name: "Composite", - className: "BlobDeleteImmutabilityPolicyHeaders", + className: "BlobCreateSnapshotHeaders", modelProperties: { + snapshot: { + serializedName: "x-ms-snapshot", + xmlName: "x-ms-snapshot", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -41358,21 +43555,42 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } } } } }; - var BlobDeleteImmutabilityPolicyExceptionHeaders = { - serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", + exports2.BlobCreateSnapshotExceptionHeaders = { + serializedName: "Blob_createSnapshotExceptionHeaders", type: { name: "Composite", - className: "BlobDeleteImmutabilityPolicyExceptionHeaders", + className: "BlobCreateSnapshotExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -41384,12 +43602,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetLegalHoldHeaders = { - serializedName: "Blob_setLegalHoldHeaders", + exports2.BlobStartCopyFromURLHeaders = { + serializedName: "Blob_startCopyFromURLHeaders", type: { name: "Composite", - className: "BlobSetLegalHoldHeaders", + className: "BlobStartCopyFromURLHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -41411,6 +43643,13 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, date: { serializedName: "date", xmlName: "date", @@ -41418,21 +43657,36 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, - legalHold: { - serializedName: "x-ms-legal-hold", - xmlName: "x-ms-legal-hold", + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", type: { - name: "Boolean" + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" } } } } }; - var BlobSetLegalHoldExceptionHeaders = { - serializedName: "Blob_setLegalHoldExceptionHeaders", + exports2.BlobStartCopyFromURLExceptionHeaders = { + serializedName: "Blob_startCopyFromURLExceptionHeaders", type: { name: "Composite", - className: "BlobSetLegalHoldExceptionHeaders", + className: "BlobStartCopyFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -41444,11 +43698,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetMetadataHeaders = { - serializedName: "Blob_setMetadataHeaders", + exports2.BlobCopyFromURLHeaders = { + serializedName: "Blob_copyFromURLHeaders", type: { name: "Composite", - className: "BlobSetMetadataHeaders", + className: "BlobCopyFromURLHeaders", modelProperties: { etag: { serializedName: "etag", @@ -41499,20 +43753,35 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", type: { - name: "Boolean" + name: "String" } }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + copyStatus: { + defaultValue: "success", + isConstant: true, + serializedName: "x-ms-copy-status", type: { name: "String" } }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", @@ -41530,11 +43799,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetMetadataExceptionHeaders = { - serializedName: "Blob_setMetadataExceptionHeaders", + exports2.BlobCopyFromURLExceptionHeaders = { + serializedName: "Blob_copyFromURLExceptionHeaders", type: { name: "Composite", - className: "BlobSetMetadataExceptionHeaders", + className: "BlobCopyFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -41546,33 +43815,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobAcquireLeaseHeaders = { - serializedName: "Blob_acquireLeaseHeaders", + exports2.BlobAbortCopyFromURLHeaders = { + serializedName: "Blob_abortCopyFromURLHeaders", type: { name: "Composite", - className: "BlobAcquireLeaseHeaders", + className: "BlobAbortCopyFromURLHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", - type: { - name: "String" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -41600,15 +43848,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; type: { name: "DateTimeRfc1123" } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } } } } }; - var BlobAcquireLeaseExceptionHeaders = { - serializedName: "Blob_acquireLeaseExceptionHeaders", + exports2.BlobAbortCopyFromURLExceptionHeaders = { + serializedName: "Blob_abortCopyFromURLExceptionHeaders", type: { name: "Composite", - className: "BlobAcquireLeaseExceptionHeaders", + className: "BlobAbortCopyFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -41620,26 +43875,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobReleaseLeaseHeaders = { - serializedName: "Blob_releaseLeaseHeaders", + exports2.BlobSetTierHeaders = { + serializedName: "Blob_setTierHeaders", type: { name: "Composite", - className: "BlobReleaseLeaseHeaders", + className: "BlobSetTierHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -41661,21 +43902,21 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, - date: { - serializedName: "date", - xmlName: "date", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "DateTimeRfc1123" + name: "String" } } } } }; - var BlobReleaseLeaseExceptionHeaders = { - serializedName: "Blob_releaseLeaseExceptionHeaders", + exports2.BlobSetTierExceptionHeaders = { + serializedName: "Blob_setTierExceptionHeaders", type: { name: "Composite", - className: "BlobReleaseLeaseExceptionHeaders", + className: "BlobSetTierExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -41687,33 +43928,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobRenewLeaseHeaders = { - serializedName: "Blob_renewLeaseHeaders", + exports2.BlobGetAccountInfoHeaders = { + serializedName: "Blob_getAccountInfoHeaders", type: { name: "Composite", - className: "BlobRenewLeaseHeaders", + className: "BlobGetAccountInfoHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", - type: { - name: "String" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -41741,15 +43961,50 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; type: { name: "DateTimeRfc1123" } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean" + } } } } }; - var BlobRenewLeaseExceptionHeaders = { - serializedName: "Blob_renewLeaseExceptionHeaders", + exports2.BlobGetAccountInfoExceptionHeaders = { + serializedName: "Blob_getAccountInfoExceptionHeaders", type: { name: "Composite", - className: "BlobRenewLeaseExceptionHeaders", + className: "BlobGetAccountInfoExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -41761,12 +44016,49 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobChangeLeaseHeaders = { - serializedName: "Blob_changeLeaseHeaders", + exports2.BlobQueryHeaders = { + serializedName: "Blob_queryHeaders", type: { name: "Composite", - className: "BlobChangeLeaseHeaders", + className: "BlobQueryHeaders", modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, etag: { serializedName: "etag", xmlName: "etag", @@ -41774,13 +44066,129 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletionTime: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", type: { name: "DateTimeRfc1123" } }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -41795,16 +44203,16 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { name: "String" } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", type: { name: "String" } @@ -41815,15 +44223,64 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; type: { name: "DateTimeRfc1123" } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } } } } }; - var BlobChangeLeaseExceptionHeaders = { - serializedName: "Blob_changeLeaseExceptionHeaders", + exports2.BlobQueryExceptionHeaders = { + serializedName: "Blob_queryExceptionHeaders", type: { name: "Composite", - className: "BlobChangeLeaseExceptionHeaders", + className: "BlobQueryExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -41835,33 +44292,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobBreakLeaseHeaders = { - serializedName: "Blob_breakLeaseHeaders", + exports2.BlobGetTagsHeaders = { + serializedName: "Blob_getTagsHeaders", type: { name: "Composite", - className: "BlobBreakLeaseHeaders", + className: "BlobGetTagsHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseTime: { - serializedName: "x-ms-lease-time", - xmlName: "x-ms-lease-time", - type: { - name: "Number" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -41889,15 +44325,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; type: { name: "DateTimeRfc1123" } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } } } } }; - var BlobBreakLeaseExceptionHeaders = { - serializedName: "Blob_breakLeaseExceptionHeaders", + exports2.BlobGetTagsExceptionHeaders = { + serializedName: "Blob_getTagsExceptionHeaders", type: { name: "Composite", - className: "BlobBreakLeaseExceptionHeaders", + className: "BlobGetTagsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -41909,33 +44352,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobCreateSnapshotHeaders = { - serializedName: "Blob_createSnapshotHeaders", + exports2.BlobSetTagsHeaders = { + serializedName: "Blob_setTagsHeaders", type: { name: "Composite", - className: "BlobCreateSnapshotHeaders", + className: "BlobSetTagsHeaders", modelProperties: { - snapshot: { - serializedName: "x-ms-snapshot", - xmlName: "x-ms-snapshot", - type: { - name: "String" - } - }, - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -41957,13 +44379,6 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, date: { serializedName: "date", xmlName: "date", @@ -41971,13 +44386,6 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -41988,11 +44396,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobCreateSnapshotExceptionHeaders = { - serializedName: "Blob_createSnapshotExceptionHeaders", + exports2.BlobSetTagsExceptionHeaders = { + serializedName: "Blob_setTagsExceptionHeaders", type: { name: "Composite", - className: "BlobCreateSnapshotExceptionHeaders", + className: "BlobSetTagsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -42004,11 +44412,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobStartCopyFromURLHeaders = { - serializedName: "Blob_startCopyFromURLHeaders", + exports2.PageBlobCreateHeaders = { + serializedName: "PageBlob_createHeaders", type: { name: "Composite", - className: "BlobStartCopyFromURLHeaders", + className: "PageBlobCreateHeaders", modelProperties: { etag: { serializedName: "etag", @@ -42024,6 +44432,13 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -42059,19 +44474,25 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, - copyStatus: { - serializedName: "x-ms-copy-status", - xmlName: "x-ms-copy-status", + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", type: { - name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] + name: "String" } }, errorCode: { @@ -42084,11 +44505,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobStartCopyFromURLExceptionHeaders = { - serializedName: "Blob_startCopyFromURLExceptionHeaders", + exports2.PageBlobCreateExceptionHeaders = { + serializedName: "PageBlob_createExceptionHeaders", type: { name: "Composite", - className: "BlobStartCopyFromURLExceptionHeaders", + className: "PageBlobCreateExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -42100,11 +44521,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobCopyFromURLHeaders = { - serializedName: "Blob_copyFromURLHeaders", + exports2.PageBlobUploadPagesHeaders = { + serializedName: "PageBlob_uploadPagesHeaders", type: { name: "Composite", - className: "BlobCopyFromURLHeaders", + className: "PageBlobUploadPagesHeaders", modelProperties: { etag: { serializedName: "etag", @@ -42120,6 +44541,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -42141,13 +44583,6 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, date: { serializedName: "date", xmlName: "date", @@ -42155,35 +44590,20 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", type: { - name: "String" + name: "Boolean" } }, - copyStatus: { - defaultValue: "success", - isConstant: true, - serializedName: "x-ms-copy-status", + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", @@ -42201,11 +44621,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobCopyFromURLExceptionHeaders = { - serializedName: "Blob_copyFromURLExceptionHeaders", + exports2.PageBlobUploadPagesExceptionHeaders = { + serializedName: "PageBlob_uploadPagesExceptionHeaders", type: { name: "Composite", - className: "BlobCopyFromURLExceptionHeaders", + className: "PageBlobUploadPagesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -42217,12 +44637,47 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobAbortCopyFromURLHeaders = { - serializedName: "Blob_abortCopyFromURLHeaders", + exports2.PageBlobClearPagesHeaders = { + serializedName: "PageBlob_clearPagesHeaders", type: { name: "Composite", - className: "BlobAbortCopyFromURLHeaders", + className: "PageBlobClearPagesHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -42261,11 +44716,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobAbortCopyFromURLExceptionHeaders = { - serializedName: "Blob_abortCopyFromURLExceptionHeaders", + exports2.PageBlobClearPagesExceptionHeaders = { + serializedName: "PageBlob_clearPagesExceptionHeaders", type: { name: "Composite", - className: "BlobAbortCopyFromURLExceptionHeaders", + className: "PageBlobClearPagesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -42277,70 +44732,45 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetTierHeaders = { - serializedName: "Blob_setTierHeaders", + exports2.PageBlobUploadPagesFromURLHeaders = { + serializedName: "PageBlob_uploadPagesFromURLHeaders", type: { name: "Composite", - className: "BlobSetTierHeaders", + className: "PageBlobUploadPagesFromURLHeaders", modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + etag: { + serializedName: "etag", + xmlName: "etag", type: { name: "String" } }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "String" + name: "DateTimeRfc1123" } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", type: { - name: "String" + name: "ByteArray" } }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } - }; - var BlobSetTierExceptionHeaders = { - serializedName: "Blob_setTierExceptionHeaders", - type: { - name: "Composite", - className: "BlobSetTierExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", type: { - name: "String" + name: "ByteArray" } - } - } - } - }; - var BlobGetAccountInfoHeaders = { - serializedName: "Blob_getAccountInfoHeaders", - type: { - name: "Composite", - className: "BlobGetAccountInfoHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", type: { - name: "String" + name: "Number" } }, requestId: { @@ -42364,49 +44794,42 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, - skuName: { - serializedName: "x-ms-sku-name", - xmlName: "x-ms-sku-name", + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", type: { - name: "Enum", - allowedValues: [ - "Standard_LRS", - "Standard_GRS", - "Standard_RAGRS", - "Standard_ZRS", - "Premium_LRS" - ] + name: "Boolean" } }, - accountKind: { - serializedName: "x-ms-account-kind", - xmlName: "x-ms-account-kind", + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", type: { - name: "Enum", - allowedValues: [ - "Storage", - "BlobStorage", - "StorageV2", - "FileStorage", - "BlockBlobStorage" - ] + name: "String" } }, - isHierarchicalNamespaceEnabled: { - serializedName: "x-ms-is-hns-enabled", - xmlName: "x-ms-is-hns-enabled", + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", type: { - name: "Boolean" + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" } } } } }; - var BlobGetAccountInfoExceptionHeaders = { - serializedName: "Blob_getAccountInfoExceptionHeaders", + exports2.PageBlobUploadPagesFromURLExceptionHeaders = { + serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", type: { name: "Composite", - className: "BlobGetAccountInfoExceptionHeaders", + className: "PageBlobUploadPagesFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -42418,11 +44841,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobQueryHeaders = { - serializedName: "Blob_queryHeaders", + exports2.PageBlobGetPageRangesHeaders = { + serializedName: "PageBlob_getPageRangesHeaders", type: { name: "Composite", - className: "BlobQueryHeaders", + className: "PageBlobGetPageRangesHeaders", modelProperties: { lastModified: { serializedName: "last-modified", @@ -42431,36 +44854,6 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, - metadata: { - serializedName: "x-ms-meta", - headerCollectionPrefix: "x-ms-meta-", - xmlName: "x-ms-meta", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - }, - contentLength: { - serializedName: "content-length", - xmlName: "content-length", - type: { - name: "Number" - } - }, - contentType: { - serializedName: "content-type", - xmlName: "content-type", - type: { - name: "String" - } - }, - contentRange: { - serializedName: "content-range", - xmlName: "content-range", - type: { - name: "String" - } - }, etag: { serializedName: "etag", xmlName: "etag", @@ -42468,127 +44861,92 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - contentEncoding: { - serializedName: "content-encoding", - xmlName: "content-encoding", + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", type: { - name: "String" + name: "Number" } }, - cacheControl: { - serializedName: "cache-control", - xmlName: "cache-control", + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { name: "String" } }, - contentDisposition: { - serializedName: "content-disposition", - xmlName: "content-disposition", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { name: "String" } }, - contentLanguage: { - serializedName: "content-language", - xmlName: "content-language", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { name: "String" } }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" - } - }, - blobType: { - serializedName: "x-ms-blob-type", - xmlName: "x-ms-blob-type", - type: { - name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } - }, - copyCompletionTime: { - serializedName: "x-ms-copy-completion-time", - xmlName: "x-ms-copy-completion-time", + date: { + serializedName: "date", + xmlName: "date", type: { name: "DateTimeRfc1123" } }, - copyStatusDescription: { - serializedName: "x-ms-copy-status-description", - xmlName: "x-ms-copy-status-description", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { name: "String" } - }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", + } + } + } + }; + exports2.PageBlobGetPageRangesExceptionHeaders = { + serializedName: "PageBlob_getPageRangesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { name: "String" } - }, - copyProgress: { - serializedName: "x-ms-copy-progress", - xmlName: "x-ms-copy-progress", + } + } + } + }; + exports2.PageBlobGetPageRangesDiffHeaders = { + serializedName: "PageBlob_getPageRangesDiffHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "String" + name: "DateTimeRfc1123" } }, - copySource: { - serializedName: "x-ms-copy-source", - xmlName: "x-ms-copy-source", + etag: { + serializedName: "etag", + xmlName: "etag", type: { name: "String" } }, - copyStatus: { - serializedName: "x-ms-copy-status", - xmlName: "x-ms-copy-status", - type: { - name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } - }, - leaseDuration: { - serializedName: "x-ms-lease-duration", - xmlName: "x-ms-lease-duration", - type: { - name: "Enum", - allowedValues: ["infinite", "fixed"] - } - }, - leaseState: { - serializedName: "x-ms-lease-state", - xmlName: "x-ms-lease-state", - type: { - name: "Enum", - allowedValues: [ - "available", - "leased", - "expired", - "breaking", - "broken" - ] - } - }, - leaseStatus: { - serializedName: "x-ms-lease-status", - xmlName: "x-ms-lease-status", + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", type: { - name: "Enum", - allowedValues: ["locked", "unlocked"] + name: "Number" } }, clientRequestId: { @@ -42612,13 +44970,6 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, - acceptRanges: { - serializedName: "accept-ranges", - xmlName: "accept-ranges", - type: { - name: "String" - } - }, date: { serializedName: "date", xmlName: "date", @@ -42626,39 +44977,85 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Number" + name: "String" + } + } + } + } + }; + exports2.PageBlobGetPageRangesDiffExceptionHeaders = { + serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + exports2.PageBlobResizeHeaders = { + serializedName: "PageBlob_resizeHeaders", + type: { + name: "Composite", + className: "PageBlobResizeHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" } }, - isServerEncrypted: { - serializedName: "x-ms-server-encrypted", - xmlName: "x-ms-server-encrypted", + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "Boolean" + name: "String" } }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { name: "String" } }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { name: "String" } }, - blobContentMD5: { - serializedName: "x-ms-blob-content-md5", - xmlName: "x-ms-blob-content-md5", + date: { + serializedName: "date", + xmlName: "date", type: { - name: "ByteArray" + name: "DateTimeRfc1123" } }, errorCode: { @@ -42667,22 +45064,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; type: { name: "String" } - }, - contentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } } } } }; - var BlobQueryExceptionHeaders = { - serializedName: "Blob_queryExceptionHeaders", + exports2.PageBlobResizeExceptionHeaders = { + serializedName: "PageBlob_resizeExceptionHeaders", type: { name: "Composite", - className: "BlobQueryExceptionHeaders", + className: "PageBlobResizeExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -42694,12 +45084,33 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobGetTagsHeaders = { - serializedName: "Blob_getTagsHeaders", + exports2.PageBlobUpdateSequenceNumberHeaders = { + serializedName: "PageBlob_updateSequenceNumberHeaders", type: { name: "Composite", - className: "BlobGetTagsHeaders", + className: "PageBlobUpdateSequenceNumberHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -42738,11 +45149,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobGetTagsExceptionHeaders = { - serializedName: "Blob_getTagsExceptionHeaders", + exports2.PageBlobUpdateSequenceNumberExceptionHeaders = { + serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", type: { name: "Composite", - className: "BlobGetTagsExceptionHeaders", + className: "PageBlobUpdateSequenceNumberExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -42754,12 +45165,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetTagsHeaders = { - serializedName: "Blob_setTagsHeaders", + exports2.PageBlobCopyIncrementalHeaders = { + serializedName: "PageBlob_copyIncrementalHeaders", type: { name: "Composite", - className: "BlobSetTagsHeaders", + className: "PageBlobCopyIncrementalHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -42788,6 +45213,21 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -42798,11 +45238,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlobSetTagsExceptionHeaders = { - serializedName: "Blob_setTagsExceptionHeaders", + exports2.PageBlobCopyIncrementalExceptionHeaders = { + serializedName: "PageBlob_copyIncrementalExceptionHeaders", type: { name: "Composite", - className: "BlobSetTagsExceptionHeaders", + className: "PageBlobCopyIncrementalExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -42814,11 +45254,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobCreateHeaders = { - serializedName: "PageBlob_createHeaders", + exports2.AppendBlobCreateHeaders = { + serializedName: "AppendBlob_createHeaders", type: { name: "Composite", - className: "PageBlobCreateHeaders", + className: "AppendBlobCreateHeaders", modelProperties: { etag: { serializedName: "etag", @@ -42907,11 +45347,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobCreateExceptionHeaders = { - serializedName: "PageBlob_createExceptionHeaders", + exports2.AppendBlobCreateExceptionHeaders = { + serializedName: "AppendBlob_createExceptionHeaders", type: { name: "Composite", - className: "PageBlobCreateExceptionHeaders", + className: "AppendBlobCreateExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -42923,11 +45363,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobUploadPagesHeaders = { - serializedName: "PageBlob_uploadPagesHeaders", + exports2.AppendBlobAppendBlockHeaders = { + serializedName: "AppendBlob_appendBlockHeaders", type: { name: "Composite", - className: "PageBlobUploadPagesHeaders", + className: "AppendBlobAppendBlockHeaders", modelProperties: { etag: { serializedName: "etag", @@ -42957,13 +45397,6 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "ByteArray" } }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -42992,6 +45425,20 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", @@ -43023,11 +45470,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobUploadPagesExceptionHeaders = { - serializedName: "PageBlob_uploadPagesExceptionHeaders", + exports2.AppendBlobAppendBlockExceptionHeaders = { + serializedName: "AppendBlob_appendBlockExceptionHeaders", type: { name: "Composite", - className: "PageBlobUploadPagesExceptionHeaders", + className: "AppendBlobAppendBlockExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -43039,11 +45486,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobClearPagesHeaders = { - serializedName: "PageBlob_clearPagesHeaders", + exports2.AppendBlobAppendBlockFromUrlHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlHeaders", type: { name: "Composite", - className: "PageBlobClearPagesHeaders", + className: "AppendBlobAppendBlockFromUrlHeaders", modelProperties: { etag: { serializedName: "etag", @@ -43073,20 +45520,6 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "ByteArray" } }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", @@ -43108,6 +45541,41 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -43118,11 +45586,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobClearPagesExceptionHeaders = { - serializedName: "PageBlob_clearPagesExceptionHeaders", + exports2.AppendBlobAppendBlockFromUrlExceptionHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", type: { name: "Composite", - className: "PageBlobClearPagesExceptionHeaders", + className: "AppendBlobAppendBlockFromUrlExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -43134,11 +45602,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobUploadPagesFromURLHeaders = { - serializedName: "PageBlob_uploadPagesFromURLHeaders", + exports2.AppendBlobSealHeaders = { + serializedName: "AppendBlob_sealHeaders", type: { name: "Composite", - className: "PageBlobUploadPagesFromURLHeaders", + className: "AppendBlobSealHeaders", modelProperties: { etag: { serializedName: "etag", @@ -43154,25 +45622,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "Number" + name: "String" } }, requestId: { @@ -43196,42 +45650,21 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", type: { name: "Boolean" } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } } } } }; - var PageBlobUploadPagesFromURLExceptionHeaders = { - serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", + exports2.AppendBlobSealExceptionHeaders = { + serializedName: "AppendBlob_sealExceptionHeaders", type: { name: "Composite", - className: "PageBlobUploadPagesFromURLExceptionHeaders", + className: "AppendBlobSealExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -43243,19 +45676,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobGetPageRangesHeaders = { - serializedName: "PageBlob_getPageRangesHeaders", + exports2.BlockBlobUploadHeaders = { + serializedName: "BlockBlob_uploadHeaders", type: { name: "Composite", - className: "PageBlobGetPageRangesHeaders", + className: "BlockBlobUploadHeaders", modelProperties: { - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, etag: { serializedName: "etag", xmlName: "etag", @@ -43263,11 +45689,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, - blobContentLength: { - serializedName: "x-ms-blob-content-length", - xmlName: "x-ms-blob-content-length", + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "Number" + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" } }, clientRequestId: { @@ -43291,6 +45724,13 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, date: { serializedName: "date", xmlName: "date", @@ -43298,6 +45738,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -43308,11 +45769,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobGetPageRangesExceptionHeaders = { - serializedName: "PageBlob_getPageRangesExceptionHeaders", + exports2.BlockBlobUploadExceptionHeaders = { + serializedName: "BlockBlob_uploadExceptionHeaders", type: { name: "Composite", - className: "PageBlobGetPageRangesExceptionHeaders", + className: "BlockBlobUploadExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -43324,19 +45785,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobGetPageRangesDiffHeaders = { - serializedName: "PageBlob_getPageRangesDiffHeaders", + exports2.BlockBlobPutBlobFromUrlHeaders = { + serializedName: "BlockBlob_putBlobFromUrlHeaders", type: { name: "Composite", - className: "PageBlobGetPageRangesDiffHeaders", + className: "BlockBlobPutBlobFromUrlHeaders", modelProperties: { - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, etag: { serializedName: "etag", xmlName: "etag", @@ -43344,11 +45798,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, - blobContentLength: { - serializedName: "x-ms-blob-content-length", - xmlName: "x-ms-blob-content-length", + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "Number" + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" } }, clientRequestId: { @@ -43372,6 +45833,13 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, date: { serializedName: "date", xmlName: "date", @@ -43379,6 +45847,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -43389,11 +45878,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobGetPageRangesDiffExceptionHeaders = { - serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", + exports2.BlockBlobPutBlobFromUrlExceptionHeaders = { + serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", type: { name: "Composite", - className: "PageBlobGetPageRangesDiffExceptionHeaders", + className: "BlockBlobPutBlobFromUrlExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -43405,31 +45894,17 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobResizeHeaders = { - serializedName: "PageBlob_resizeHeaders", + exports2.BlockBlobStageBlockHeaders = { + serializedName: "BlockBlob_stageBlockHeaders", type: { name: "Composite", - className: "PageBlobResizeHeaders", + className: "BlockBlobStageBlockHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", type: { - name: "Number" + name: "ByteArray" } }, clientRequestId: { @@ -43460,6 +45935,34 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -43470,11 +45973,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobResizeExceptionHeaders = { - serializedName: "PageBlob_resizeExceptionHeaders", + exports2.BlockBlobStageBlockExceptionHeaders = { + serializedName: "BlockBlob_stageBlockExceptionHeaders", type: { name: "Composite", - className: "PageBlobResizeExceptionHeaders", + className: "BlockBlobStageBlockExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -43486,31 +45989,24 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobUpdateSequenceNumberHeaders = { - serializedName: "PageBlob_updateSequenceNumberHeaders", + exports2.BlockBlobStageBlockFromURLHeaders = { + serializedName: "BlockBlob_stageBlockFromURLHeaders", type: { name: "Composite", - className: "PageBlobUpdateSequenceNumberHeaders", + className: "BlockBlobStageBlockFromURLHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", type: { - name: "DateTimeRfc1123" + name: "ByteArray" } }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", type: { - name: "Number" + name: "ByteArray" } }, clientRequestId: { @@ -43541,6 +46037,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -43551,11 +46068,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobUpdateSequenceNumberExceptionHeaders = { - serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", + exports2.BlockBlobStageBlockFromURLExceptionHeaders = { + serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", type: { name: "Composite", - className: "PageBlobUpdateSequenceNumberExceptionHeaders", + className: "BlockBlobStageBlockFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -43567,11 +46084,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobCopyIncrementalHeaders = { - serializedName: "PageBlob_copyIncrementalHeaders", + exports2.BlockBlobCommitBlockListHeaders = { + serializedName: "BlockBlob_commitBlockListHeaders", type: { name: "Composite", - className: "PageBlobCopyIncrementalHeaders", + className: "BlockBlobCommitBlockListHeaders", modelProperties: { etag: { serializedName: "etag", @@ -43587,6 +46104,20 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -43608,6 +46139,13 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, date: { serializedName: "date", xmlName: "date", @@ -43615,19 +46153,25 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, - copyStatus: { - serializedName: "x-ms-copy-status", - xmlName: "x-ms-copy-status", + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", type: { - name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] + name: "String" } }, errorCode: { @@ -43640,11 +46184,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var PageBlobCopyIncrementalExceptionHeaders = { - serializedName: "PageBlob_copyIncrementalExceptionHeaders", + exports2.BlockBlobCommitBlockListExceptionHeaders = { + serializedName: "BlockBlob_commitBlockListExceptionHeaders", type: { name: "Composite", - className: "PageBlobCopyIncrementalExceptionHeaders", + className: "BlockBlobCommitBlockListExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -43656,12 +46200,19 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var AppendBlobCreateHeaders = { - serializedName: "AppendBlob_createHeaders", + exports2.BlockBlobGetBlockListHeaders = { + serializedName: "BlockBlob_getBlockListHeaders", type: { name: "Composite", - className: "AppendBlobCreateHeaders", + className: "BlockBlobGetBlockListHeaders", modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, etag: { serializedName: "etag", xmlName: "etag", @@ -43669,18 +46220,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + contentType: { + serializedName: "content-type", + xmlName: "content-type", type: { - name: "DateTimeRfc1123" + name: "String" } }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", type: { - name: "ByteArray" + name: "Number" } }, clientRequestId: { @@ -43704,13 +46255,6 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "String" } }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, date: { serializedName: "date", xmlName: "date", @@ -43718,27 +46262,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; name: "DateTimeRfc1123" } }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { name: "String" } - }, + } + } + } + }; + exports2.BlockBlobGetBlockListExceptionHeaders = { + serializedName: "BlockBlob_getBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListExceptionHeaders", + modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -43749,1182 +46288,931 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var AppendBlobCreateExceptionHeaders = { - serializedName: "AppendBlob_createExceptionHeaders", - type: { - name: "Composite", - className: "AppendBlobCreateExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/parameters.js +var require_parameters = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/models/parameters.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.action3 = exports2.action2 = exports2.leaseId1 = exports2.action1 = exports2.proposedLeaseId = exports2.duration = exports2.action = exports2.comp10 = exports2.sourceLeaseId = exports2.sourceContainerName = exports2.comp9 = exports2.deletedContainerVersion = exports2.deletedContainerName = exports2.comp8 = exports2.containerAcl = exports2.comp7 = exports2.comp6 = exports2.ifUnmodifiedSince = exports2.ifModifiedSince = exports2.leaseId = exports2.preventEncryptionScopeOverride = exports2.defaultEncryptionScope = exports2.access = exports2.metadata = exports2.restype2 = exports2.where = exports2.comp5 = exports2.multipartContentType = exports2.contentLength = exports2.comp4 = exports2.body = exports2.restype1 = exports2.comp3 = exports2.keyInfo = exports2.include = exports2.maxPageSize = exports2.marker = exports2.prefix = exports2.comp2 = exports2.comp1 = exports2.accept1 = exports2.requestId = exports2.version = exports2.timeoutInSeconds = exports2.comp = exports2.restype = exports2.url = exports2.accept = exports2.blobServiceProperties = exports2.contentType = void 0; + exports2.fileRequestIntent = exports2.copySourceTags = exports2.copySourceAuthorization = exports2.sourceContentMD5 = exports2.xMsRequiresSync = exports2.legalHold1 = exports2.sealBlob = exports2.blobTagsString = exports2.copySource = exports2.sourceIfTags = exports2.sourceIfNoneMatch = exports2.sourceIfMatch = exports2.sourceIfUnmodifiedSince = exports2.sourceIfModifiedSince = exports2.rehydratePriority = exports2.tier = exports2.comp14 = exports2.encryptionScope = exports2.legalHold = exports2.comp13 = exports2.immutabilityPolicyMode = exports2.immutabilityPolicyExpiry = exports2.comp12 = exports2.blobContentDisposition = exports2.blobContentLanguage = exports2.blobContentEncoding = exports2.blobContentMD5 = exports2.blobContentType = exports2.blobCacheControl = exports2.expiresOn = exports2.expiryOptions = exports2.comp11 = exports2.blobDeleteType = exports2.deleteSnapshots = exports2.ifTags = exports2.ifNoneMatch = exports2.ifMatch = exports2.encryptionAlgorithm = exports2.encryptionKeySha256 = exports2.encryptionKey = exports2.rangeGetContentCRC64 = exports2.rangeGetContentMD5 = exports2.range = exports2.versionId = exports2.snapshot = exports2.delimiter = exports2.include1 = exports2.proposedLeaseId1 = exports2.action4 = exports2.breakPeriod = void 0; + exports2.listType = exports2.comp25 = exports2.blocks = exports2.blockId = exports2.comp24 = exports2.copySourceBlobProperties = exports2.blobType2 = exports2.comp23 = exports2.sourceRange1 = exports2.appendPosition = exports2.maxSize = exports2.comp22 = exports2.blobType1 = exports2.comp21 = exports2.sequenceNumberAction = exports2.prevSnapshotUrl = exports2.prevsnapshot = exports2.comp20 = exports2.range1 = exports2.sourceContentCrc64 = exports2.sourceRange = exports2.sourceUrl = exports2.pageWrite1 = exports2.ifSequenceNumberEqualTo = exports2.ifSequenceNumberLessThan = exports2.ifSequenceNumberLessThanOrEqualTo = exports2.pageWrite = exports2.comp19 = exports2.accept2 = exports2.body1 = exports2.contentType1 = exports2.blobSequenceNumber = exports2.blobContentLength = exports2.blobType = exports2.transactionalContentCrc64 = exports2.transactionalContentMD5 = exports2.tags = exports2.comp18 = exports2.comp17 = exports2.queryRequest = exports2.tier1 = exports2.comp16 = exports2.copyId = exports2.copyActionAbortConstant = exports2.comp15 = void 0; + var mappers_js_1 = require_mappers(); + exports2.contentType = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } + }; + exports2.blobServiceProperties = { + parameterPath: "blobServiceProperties", + mapper: mappers_js_1.BlobServiceProperties + }; + exports2.accept = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } + }; + exports2.url = { + parameterPath: "url", + mapper: { + serializedName: "url", + required: true, + xmlName: "url", + type: { + name: "String" + } + }, + skipEncoding: true + }; + exports2.restype = { + parameterPath: "restype", + mapper: { + defaultValue: "service", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } + }; + exports2.comp = { + parameterPath: "comp", + mapper: { + defaultValue: "properties", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.timeoutInSeconds = { + parameterPath: ["options", "timeoutInSeconds"], + mapper: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "timeout", + xmlName: "timeout", + type: { + name: "Number" + } + } + }; + exports2.version = { + parameterPath: "version", + mapper: { + defaultValue: "2025-07-05", + isConstant: true, + serializedName: "x-ms-version", + type: { + name: "String" + } + } + }; + exports2.requestId = { + parameterPath: ["options", "requestId"], + mapper: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + } + }; + exports2.accept1 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } + }; + exports2.comp1 = { + parameterPath: "comp", + mapper: { + defaultValue: "stats", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.comp2 = { + parameterPath: "comp", + mapper: { + defaultValue: "list", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.prefix = { + parameterPath: ["options", "prefix"], + mapper: { + serializedName: "prefix", + xmlName: "prefix", + type: { + name: "String" + } + } + }; + exports2.marker = { + parameterPath: ["options", "marker"], + mapper: { + serializedName: "marker", + xmlName: "marker", + type: { + name: "String" + } + } + }; + exports2.maxPageSize = { + parameterPath: ["options", "maxPageSize"], + mapper: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "maxresults", + xmlName: "maxresults", + type: { + name: "Number" + } + } + }; + exports2.include = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListContainersIncludeType", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: ["metadata", "deleted", "system"] + } + } + } + }, + collectionFormat: "CSV" + }; + exports2.keyInfo = { + parameterPath: "keyInfo", + mapper: mappers_js_1.KeyInfo + }; + exports2.comp3 = { + parameterPath: "comp", + mapper: { + defaultValue: "userdelegationkey", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.restype1 = { + parameterPath: "restype", + mapper: { + defaultValue: "account", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } + }; + exports2.body = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } + }; + exports2.comp4 = { + parameterPath: "comp", + mapper: { + defaultValue: "batch", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.contentLength = { + parameterPath: "contentLength", + mapper: { + serializedName: "Content-Length", + required: true, + xmlName: "Content-Length", + type: { + name: "Number" + } + } + }; + exports2.multipartContentType = { + parameterPath: "multipartContentType", + mapper: { + serializedName: "Content-Type", + required: true, + xmlName: "Content-Type", + type: { + name: "String" + } + } + }; + exports2.comp5 = { + parameterPath: "comp", + mapper: { + defaultValue: "blobs", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.where = { + parameterPath: ["options", "where"], + mapper: { + serializedName: "where", + xmlName: "where", + type: { + name: "String" + } + } + }; + exports2.restype2 = { + parameterPath: "restype", + mapper: { + defaultValue: "container", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } + }; + exports2.metadata = { + parameterPath: ["options", "metadata"], + mapper: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + } + }; + exports2.access = { + parameterPath: ["options", "access"], + mapper: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + } + }; + exports2.defaultEncryptionScope = { + parameterPath: [ + "options", + "containerEncryptionScope", + "defaultEncryptionScope" + ], + mapper: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } + } + }; + exports2.preventEncryptionScopeOverride = { + parameterPath: [ + "options", + "containerEncryptionScope", + "preventEncryptionScopeOverride" + ], + mapper: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + } + }; + exports2.leaseId = { + parameterPath: ["options", "leaseAccessConditions", "leaseId"], + mapper: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } + }; + exports2.ifModifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], + mapper: { + serializedName: "If-Modified-Since", + xmlName: "If-Modified-Since", + type: { + name: "DateTimeRfc1123" + } + } + }; + exports2.ifUnmodifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], + mapper: { + serializedName: "If-Unmodified-Since", + xmlName: "If-Unmodified-Since", + type: { + name: "DateTimeRfc1123" + } + } + }; + exports2.comp6 = { + parameterPath: "comp", + mapper: { + defaultValue: "metadata", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.comp7 = { + parameterPath: "comp", + mapper: { + defaultValue: "acl", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.containerAcl = { + parameterPath: ["options", "containerAcl"], + mapper: { + serializedName: "containerAcl", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SignedIdentifier" + } + } + } + } + }; + exports2.comp8 = { + parameterPath: "comp", + mapper: { + defaultValue: "undelete", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.deletedContainerName = { + parameterPath: ["options", "deletedContainerName"], + mapper: { + serializedName: "x-ms-deleted-container-name", + xmlName: "x-ms-deleted-container-name", + type: { + name: "String" + } + } + }; + exports2.deletedContainerVersion = { + parameterPath: ["options", "deletedContainerVersion"], + mapper: { + serializedName: "x-ms-deleted-container-version", + xmlName: "x-ms-deleted-container-version", + type: { + name: "String" + } + } + }; + exports2.comp9 = { + parameterPath: "comp", + mapper: { + defaultValue: "rename", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.sourceContainerName = { + parameterPath: "sourceContainerName", + mapper: { + serializedName: "x-ms-source-container-name", + required: true, + xmlName: "x-ms-source-container-name", + type: { + name: "String" + } + } + }; + exports2.sourceLeaseId = { + parameterPath: ["options", "sourceLeaseId"], + mapper: { + serializedName: "x-ms-source-lease-id", + xmlName: "x-ms-source-lease-id", + type: { + name: "String" + } + } + }; + exports2.comp10 = { + parameterPath: "comp", + mapper: { + defaultValue: "lease", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.action = { + parameterPath: "action", + mapper: { + defaultValue: "acquire", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" } } }; - var AppendBlobAppendBlockHeaders = { - serializedName: "AppendBlob_appendBlockHeaders", - type: { - name: "Composite", - className: "AppendBlobAppendBlockHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - blobAppendOffset: { - serializedName: "x-ms-blob-append-offset", - xmlName: "x-ms-blob-append-offset", - type: { - name: "String" - } - }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", - type: { - name: "Number" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.duration = { + parameterPath: ["options", "duration"], + mapper: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Number" } } }; - var AppendBlobAppendBlockExceptionHeaders = { - serializedName: "AppendBlob_appendBlockExceptionHeaders", - type: { - name: "Composite", - className: "AppendBlobAppendBlockExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.proposedLeaseId = { + parameterPath: ["options", "proposedLeaseId"], + mapper: { + serializedName: "x-ms-proposed-lease-id", + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" } } }; - var AppendBlobAppendBlockFromUrlHeaders = { - serializedName: "AppendBlob_appendBlockFromUrlHeaders", - type: { - name: "Composite", - className: "AppendBlobAppendBlockFromUrlHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - blobAppendOffset: { - serializedName: "x-ms-blob-append-offset", - xmlName: "x-ms-blob-append-offset", - type: { - name: "String" - } - }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", - type: { - name: "Number" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.action1 = { + parameterPath: "action", + mapper: { + defaultValue: "release", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" } } }; - var AppendBlobAppendBlockFromUrlExceptionHeaders = { - serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", - type: { - name: "Composite", - className: "AppendBlobAppendBlockFromUrlExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.leaseId1 = { + parameterPath: "leaseId", + mapper: { + serializedName: "x-ms-lease-id", + required: true, + xmlName: "x-ms-lease-id", + type: { + name: "String" } } }; - var AppendBlobSealHeaders = { - serializedName: "AppendBlob_sealHeaders", - type: { - name: "Composite", - className: "AppendBlobSealHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - isSealed: { - serializedName: "x-ms-blob-sealed", - xmlName: "x-ms-blob-sealed", - type: { - name: "Boolean" - } - } + exports2.action2 = { + parameterPath: "action", + mapper: { + defaultValue: "renew", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" } } }; - var AppendBlobSealExceptionHeaders = { - serializedName: "AppendBlob_sealExceptionHeaders", - type: { - name: "Composite", - className: "AppendBlobSealExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.action3 = { + parameterPath: "action", + mapper: { + defaultValue: "break", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" } } }; - var BlockBlobUploadHeaders = { - serializedName: "BlockBlob_uploadHeaders", - type: { - name: "Composite", - className: "BlockBlobUploadHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + exports2.breakPeriod = { + parameterPath: ["options", "breakPeriod"], + mapper: { + serializedName: "x-ms-lease-break-period", + xmlName: "x-ms-lease-break-period", + type: { + name: "Number" + } + } + }; + exports2.action4 = { + parameterPath: "action", + mapper: { + defaultValue: "change", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } + }; + exports2.proposedLeaseId1 = { + parameterPath: "proposedLeaseId", + mapper: { + serializedName: "x-ms-proposed-lease-id", + required: true, + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } + } + }; + exports2.include1 = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListBlobsIncludeItem", + type: { + name: "Sequence", + element: { type: { - name: "String" + name: "Enum", + allowedValues: [ + "copy", + "deleted", + "metadata", + "snapshots", + "uncommittedblobs", + "versions", + "tags", + "immutabilitypolicy", + "legalhold", + "deletedwithversions" + ] } } } + }, + collectionFormat: "CSV" + }; + exports2.delimiter = { + parameterPath: "delimiter", + mapper: { + serializedName: "delimiter", + required: true, + xmlName: "delimiter", + type: { + name: "String" + } + } + }; + exports2.snapshot = { + parameterPath: ["options", "snapshot"], + mapper: { + serializedName: "snapshot", + xmlName: "snapshot", + type: { + name: "String" + } + } + }; + exports2.versionId = { + parameterPath: ["options", "versionId"], + mapper: { + serializedName: "versionid", + xmlName: "versionid", + type: { + name: "String" + } } }; - var BlockBlobUploadExceptionHeaders = { - serializedName: "BlockBlob_uploadExceptionHeaders", - type: { - name: "Composite", - className: "BlockBlobUploadExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.range = { + parameterPath: ["options", "range"], + mapper: { + serializedName: "x-ms-range", + xmlName: "x-ms-range", + type: { + name: "String" } } }; - var BlockBlobPutBlobFromUrlHeaders = { - serializedName: "BlockBlob_putBlobFromUrlHeaders", - type: { - name: "Composite", - className: "BlockBlobPutBlobFromUrlHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.rangeGetContentMD5 = { + parameterPath: ["options", "rangeGetContentMD5"], + mapper: { + serializedName: "x-ms-range-get-content-md5", + xmlName: "x-ms-range-get-content-md5", + type: { + name: "Boolean" } } }; - var BlockBlobPutBlobFromUrlExceptionHeaders = { - serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", - type: { - name: "Composite", - className: "BlockBlobPutBlobFromUrlExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.rangeGetContentCRC64 = { + parameterPath: ["options", "rangeGetContentCRC64"], + mapper: { + serializedName: "x-ms-range-get-content-crc64", + xmlName: "x-ms-range-get-content-crc64", + type: { + name: "Boolean" } } }; - var BlockBlobStageBlockHeaders = { - serializedName: "BlockBlob_stageBlockHeaders", - type: { - name: "Composite", - className: "BlockBlobStageBlockHeaders", - modelProperties: { - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.encryptionKey = { + parameterPath: ["options", "cpkInfo", "encryptionKey"], + mapper: { + serializedName: "x-ms-encryption-key", + xmlName: "x-ms-encryption-key", + type: { + name: "String" } } }; - var BlockBlobStageBlockExceptionHeaders = { - serializedName: "BlockBlob_stageBlockExceptionHeaders", - type: { - name: "Composite", - className: "BlockBlobStageBlockExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.encryptionKeySha256 = { + parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], + mapper: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" } } }; - var BlockBlobStageBlockFromURLHeaders = { - serializedName: "BlockBlob_stageBlockFromURLHeaders", - type: { - name: "Composite", - className: "BlockBlobStageBlockFromURLHeaders", - modelProperties: { - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.encryptionAlgorithm = { + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], + mapper: { + serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", + type: { + name: "String" } } }; - var BlockBlobStageBlockFromURLExceptionHeaders = { - serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", - type: { - name: "Composite", - className: "BlockBlobStageBlockFromURLExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.ifMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], + mapper: { + serializedName: "If-Match", + xmlName: "If-Match", + type: { + name: "String" } } }; - var BlockBlobCommitBlockListHeaders = { - serializedName: "BlockBlob_commitBlockListHeaders", - type: { - name: "Composite", - className: "BlockBlobCommitBlockListHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.ifNoneMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], + mapper: { + serializedName: "If-None-Match", + xmlName: "If-None-Match", + type: { + name: "String" + } + } + }; + exports2.ifTags = { + parameterPath: ["options", "modifiedAccessConditions", "ifTags"], + mapper: { + serializedName: "x-ms-if-tags", + xmlName: "x-ms-if-tags", + type: { + name: "String" + } + } + }; + exports2.deleteSnapshots = { + parameterPath: ["options", "deleteSnapshots"], + mapper: { + serializedName: "x-ms-delete-snapshots", + xmlName: "x-ms-delete-snapshots", + type: { + name: "Enum", + allowedValues: ["include", "only"] + } + } + }; + exports2.blobDeleteType = { + parameterPath: ["options", "blobDeleteType"], + mapper: { + serializedName: "deletetype", + xmlName: "deletetype", + type: { + name: "String" + } + } + }; + exports2.comp11 = { + parameterPath: "comp", + mapper: { + defaultValue: "expiry", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.expiryOptions = { + parameterPath: "expiryOptions", + mapper: { + serializedName: "x-ms-expiry-option", + required: true, + xmlName: "x-ms-expiry-option", + type: { + name: "String" + } + } + }; + exports2.expiresOn = { + parameterPath: ["options", "expiresOn"], + mapper: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "String" + } + } + }; + exports2.blobCacheControl = { + parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], + mapper: { + serializedName: "x-ms-blob-cache-control", + xmlName: "x-ms-blob-cache-control", + type: { + name: "String" + } + } + }; + exports2.blobContentType = { + parameterPath: ["options", "blobHttpHeaders", "blobContentType"], + mapper: { + serializedName: "x-ms-blob-content-type", + xmlName: "x-ms-blob-content-type", + type: { + name: "String" } } }; - var BlockBlobCommitBlockListExceptionHeaders = { - serializedName: "BlockBlob_commitBlockListExceptionHeaders", - type: { - name: "Composite", - className: "BlockBlobCommitBlockListExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.blobContentMD5 = { + parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], + mapper: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" } } }; - var BlockBlobGetBlockListHeaders = { - serializedName: "BlockBlob_getBlockListHeaders", - type: { - name: "Composite", - className: "BlockBlobGetBlockListHeaders", - modelProperties: { - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - contentType: { - serializedName: "content-type", - xmlName: "content-type", - type: { - name: "String" - } - }, - blobContentLength: { - serializedName: "x-ms-blob-content-length", - xmlName: "x-ms-blob-content-length", - type: { - name: "Number" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.blobContentEncoding = { + parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], + mapper: { + serializedName: "x-ms-blob-content-encoding", + xmlName: "x-ms-blob-content-encoding", + type: { + name: "String" } } }; - var BlockBlobGetBlockListExceptionHeaders = { - serializedName: "BlockBlob_getBlockListExceptionHeaders", - type: { - name: "Composite", - className: "BlockBlobGetBlockListExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + exports2.blobContentLanguage = { + parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], + mapper: { + serializedName: "x-ms-blob-content-language", + xmlName: "x-ms-blob-content-language", + type: { + name: "String" } } }; - var Mappers = /* @__PURE__ */ Object.freeze({ - __proto__: null, - AccessPolicy, - AppendBlobAppendBlockExceptionHeaders, - AppendBlobAppendBlockFromUrlExceptionHeaders, - AppendBlobAppendBlockFromUrlHeaders, - AppendBlobAppendBlockHeaders, - AppendBlobCreateExceptionHeaders, - AppendBlobCreateHeaders, - AppendBlobSealExceptionHeaders, - AppendBlobSealHeaders, - ArrowConfiguration, - ArrowField, - BlobAbortCopyFromURLExceptionHeaders, - BlobAbortCopyFromURLHeaders, - BlobAcquireLeaseExceptionHeaders, - BlobAcquireLeaseHeaders, - BlobBreakLeaseExceptionHeaders, - BlobBreakLeaseHeaders, - BlobChangeLeaseExceptionHeaders, - BlobChangeLeaseHeaders, - BlobCopyFromURLExceptionHeaders, - BlobCopyFromURLHeaders, - BlobCreateSnapshotExceptionHeaders, - BlobCreateSnapshotHeaders, - BlobDeleteExceptionHeaders, - BlobDeleteHeaders, - BlobDeleteImmutabilityPolicyExceptionHeaders, - BlobDeleteImmutabilityPolicyHeaders, - BlobDownloadExceptionHeaders, - BlobDownloadHeaders, - BlobFlatListSegment, - BlobGetAccountInfoExceptionHeaders, - BlobGetAccountInfoHeaders, - BlobGetPropertiesExceptionHeaders, - BlobGetPropertiesHeaders, - BlobGetTagsExceptionHeaders, - BlobGetTagsHeaders, - BlobHierarchyListSegment, - BlobItemInternal, - BlobName, - BlobPrefix, - BlobPropertiesInternal, - BlobQueryExceptionHeaders, - BlobQueryHeaders, - BlobReleaseLeaseExceptionHeaders, - BlobReleaseLeaseHeaders, - BlobRenewLeaseExceptionHeaders, - BlobRenewLeaseHeaders, - BlobServiceProperties, - BlobServiceStatistics, - BlobSetExpiryExceptionHeaders, - BlobSetExpiryHeaders, - BlobSetHttpHeadersExceptionHeaders, - BlobSetHttpHeadersHeaders, - BlobSetImmutabilityPolicyExceptionHeaders, - BlobSetImmutabilityPolicyHeaders, - BlobSetLegalHoldExceptionHeaders, - BlobSetLegalHoldHeaders, - BlobSetMetadataExceptionHeaders, - BlobSetMetadataHeaders, - BlobSetTagsExceptionHeaders, - BlobSetTagsHeaders, - BlobSetTierExceptionHeaders, - BlobSetTierHeaders, - BlobStartCopyFromURLExceptionHeaders, - BlobStartCopyFromURLHeaders, - BlobTag, - BlobTags, - BlobUndeleteExceptionHeaders, - BlobUndeleteHeaders, - Block, - BlockBlobCommitBlockListExceptionHeaders, - BlockBlobCommitBlockListHeaders, - BlockBlobGetBlockListExceptionHeaders, - BlockBlobGetBlockListHeaders, - BlockBlobPutBlobFromUrlExceptionHeaders, - BlockBlobPutBlobFromUrlHeaders, - BlockBlobStageBlockExceptionHeaders, - BlockBlobStageBlockFromURLExceptionHeaders, - BlockBlobStageBlockFromURLHeaders, - BlockBlobStageBlockHeaders, - BlockBlobUploadExceptionHeaders, - BlockBlobUploadHeaders, - BlockList, - BlockLookupList, - ClearRange, - ContainerAcquireLeaseExceptionHeaders, - ContainerAcquireLeaseHeaders, - ContainerBreakLeaseExceptionHeaders, - ContainerBreakLeaseHeaders, - ContainerChangeLeaseExceptionHeaders, - ContainerChangeLeaseHeaders, - ContainerCreateExceptionHeaders, - ContainerCreateHeaders, - ContainerDeleteExceptionHeaders, - ContainerDeleteHeaders, - ContainerFilterBlobsExceptionHeaders, - ContainerFilterBlobsHeaders, - ContainerGetAccessPolicyExceptionHeaders, - ContainerGetAccessPolicyHeaders, - ContainerGetAccountInfoExceptionHeaders, - ContainerGetAccountInfoHeaders, - ContainerGetPropertiesExceptionHeaders, - ContainerGetPropertiesHeaders, - ContainerItem, - ContainerListBlobFlatSegmentExceptionHeaders, - ContainerListBlobFlatSegmentHeaders, - ContainerListBlobHierarchySegmentExceptionHeaders, - ContainerListBlobHierarchySegmentHeaders, - ContainerProperties, - ContainerReleaseLeaseExceptionHeaders, - ContainerReleaseLeaseHeaders, - ContainerRenameExceptionHeaders, - ContainerRenameHeaders, - ContainerRenewLeaseExceptionHeaders, - ContainerRenewLeaseHeaders, - ContainerRestoreExceptionHeaders, - ContainerRestoreHeaders, - ContainerSetAccessPolicyExceptionHeaders, - ContainerSetAccessPolicyHeaders, - ContainerSetMetadataExceptionHeaders, - ContainerSetMetadataHeaders, - ContainerSubmitBatchExceptionHeaders, - ContainerSubmitBatchHeaders, - CorsRule, - DelimitedTextConfiguration, - FilterBlobItem, - FilterBlobSegment, - GeoReplication, - JsonTextConfiguration, - KeyInfo, - ListBlobsFlatSegmentResponse, - ListBlobsHierarchySegmentResponse, - ListContainersSegmentResponse, - Logging, - Metrics, - PageBlobClearPagesExceptionHeaders, - PageBlobClearPagesHeaders, - PageBlobCopyIncrementalExceptionHeaders, - PageBlobCopyIncrementalHeaders, - PageBlobCreateExceptionHeaders, - PageBlobCreateHeaders, - PageBlobGetPageRangesDiffExceptionHeaders, - PageBlobGetPageRangesDiffHeaders, - PageBlobGetPageRangesExceptionHeaders, - PageBlobGetPageRangesHeaders, - PageBlobResizeExceptionHeaders, - PageBlobResizeHeaders, - PageBlobUpdateSequenceNumberExceptionHeaders, - PageBlobUpdateSequenceNumberHeaders, - PageBlobUploadPagesExceptionHeaders, - PageBlobUploadPagesFromURLExceptionHeaders, - PageBlobUploadPagesFromURLHeaders, - PageBlobUploadPagesHeaders, - PageList, - PageRange, - QueryFormat, - QueryRequest, - QuerySerialization, - RetentionPolicy, - ServiceFilterBlobsExceptionHeaders, - ServiceFilterBlobsHeaders, - ServiceGetAccountInfoExceptionHeaders, - ServiceGetAccountInfoHeaders, - ServiceGetPropertiesExceptionHeaders, - ServiceGetPropertiesHeaders, - ServiceGetStatisticsExceptionHeaders, - ServiceGetStatisticsHeaders, - ServiceGetUserDelegationKeyExceptionHeaders, - ServiceGetUserDelegationKeyHeaders, - ServiceListContainersSegmentExceptionHeaders, - ServiceListContainersSegmentHeaders, - ServiceSetPropertiesExceptionHeaders, - ServiceSetPropertiesHeaders, - ServiceSubmitBatchExceptionHeaders, - ServiceSubmitBatchHeaders, - SignedIdentifier, - StaticWebsite, - StorageError, - UserDelegationKey - }); - var contentType = { - parameterPath: ["options", "contentType"], + exports2.blobContentDisposition = { + parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], mapper: { - defaultValue: "application/xml", + serializedName: "x-ms-blob-content-disposition", + xmlName: "x-ms-blob-content-disposition", + type: { + name: "String" + } + } + }; + exports2.comp12 = { + parameterPath: "comp", + mapper: { + defaultValue: "immutabilityPolicies", isConstant: true, - serializedName: "Content-Type", + serializedName: "comp", type: { name: "String" } } }; - var blobServiceProperties = { - parameterPath: "blobServiceProperties", - mapper: BlobServiceProperties + exports2.immutabilityPolicyExpiry = { + parameterPath: ["options", "immutabilityPolicyExpiry"], + mapper: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + } }; - var accept = { - parameterPath: "accept", + exports2.immutabilityPolicyMode = { + parameterPath: ["options", "immutabilityPolicyMode"], mapper: { - defaultValue: "application/xml", + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } + }; + exports2.comp13 = { + parameterPath: "comp", + mapper: { + defaultValue: "legalhold", isConstant: true, - serializedName: "Accept", + serializedName: "comp", type: { name: "String" } } }; - var url = { - parameterPath: "url", + exports2.legalHold = { + parameterPath: "legalHold", mapper: { - serializedName: "url", + serializedName: "x-ms-legal-hold", required: true, - xmlName: "url", + xmlName: "x-ms-legal-hold", type: { - name: "String" + name: "Boolean" } - }, - skipEncoding: true + } }; - var restype = { - parameterPath: "restype", + exports2.encryptionScope = { + parameterPath: ["options", "encryptionScope"], mapper: { - defaultValue: "service", - isConstant: true, - serializedName: "restype", + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", type: { name: "String" } } }; - var comp = { + exports2.comp14 = { parameterPath: "comp", mapper: { - defaultValue: "properties", + defaultValue: "snapshot", isConstant: true, serializedName: "comp", type: { @@ -44932,132 +47220,203 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var timeoutInSeconds = { - parameterPath: ["options", "timeoutInSeconds"], + exports2.tier = { + parameterPath: ["options", "tier"], mapper: { - constraints: { - InclusiveMinimum: 0 - }, - serializedName: "timeout", - xmlName: "timeout", + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", type: { - name: "Number" + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold" + ] } } }; - var version = { - parameterPath: "version", + exports2.rehydratePriority = { + parameterPath: ["options", "rehydratePriority"], mapper: { - defaultValue: "2025-05-05", - isConstant: true, - serializedName: "x-ms-version", + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", type: { - name: "String" + name: "Enum", + allowedValues: ["High", "Standard"] } } }; - var requestId = { - parameterPath: ["options", "requestId"], + exports2.sourceIfModifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfModifiedSince" + ], mapper: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + serializedName: "x-ms-source-if-modified-since", + xmlName: "x-ms-source-if-modified-since", + type: { + name: "DateTimeRfc1123" + } + } + }; + exports2.sourceIfUnmodifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfUnmodifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-unmodified-since", + xmlName: "x-ms-source-if-unmodified-since", + type: { + name: "DateTimeRfc1123" + } + } + }; + exports2.sourceIfMatch = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], + mapper: { + serializedName: "x-ms-source-if-match", + xmlName: "x-ms-source-if-match", type: { name: "String" } } }; - var accept1 = { - parameterPath: "accept", + exports2.sourceIfNoneMatch = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfNoneMatch" + ], mapper: { - defaultValue: "application/xml", - isConstant: true, - serializedName: "Accept", + serializedName: "x-ms-source-if-none-match", + xmlName: "x-ms-source-if-none-match", type: { name: "String" } } }; - var comp1 = { - parameterPath: "comp", + exports2.sourceIfTags = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], mapper: { - defaultValue: "stats", - isConstant: true, - serializedName: "comp", + serializedName: "x-ms-source-if-tags", + xmlName: "x-ms-source-if-tags", type: { name: "String" } } }; - var comp2 = { - parameterPath: "comp", + exports2.copySource = { + parameterPath: "copySource", mapper: { - defaultValue: "list", - isConstant: true, - serializedName: "comp", + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", type: { name: "String" } } }; - var prefix = { - parameterPath: ["options", "prefix"], + exports2.blobTagsString = { + parameterPath: ["options", "blobTagsString"], mapper: { - serializedName: "prefix", - xmlName: "prefix", + serializedName: "x-ms-tags", + xmlName: "x-ms-tags", type: { name: "String" } } }; - var marker = { - parameterPath: ["options", "marker"], + exports2.sealBlob = { + parameterPath: ["options", "sealBlob"], mapper: { - serializedName: "marker", - xmlName: "marker", + serializedName: "x-ms-seal-blob", + xmlName: "x-ms-seal-blob", + type: { + name: "Boolean" + } + } + }; + exports2.legalHold1 = { + parameterPath: ["options", "legalHold"], + mapper: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } + }; + exports2.xMsRequiresSync = { + parameterPath: "xMsRequiresSync", + mapper: { + defaultValue: "true", + isConstant: true, + serializedName: "x-ms-requires-sync", type: { name: "String" } } }; - var maxPageSize = { - parameterPath: ["options", "maxPageSize"], + exports2.sourceContentMD5 = { + parameterPath: ["options", "sourceContentMD5"], mapper: { - constraints: { - InclusiveMinimum: 1 - }, - serializedName: "maxresults", - xmlName: "maxresults", + serializedName: "x-ms-source-content-md5", + xmlName: "x-ms-source-content-md5", type: { - name: "Number" + name: "ByteArray" } } }; - var include = { - parameterPath: ["options", "include"], + exports2.copySourceAuthorization = { + parameterPath: ["options", "copySourceAuthorization"], mapper: { - serializedName: "include", - xmlName: "include", - xmlElementName: "ListContainersIncludeType", + serializedName: "x-ms-copy-source-authorization", + xmlName: "x-ms-copy-source-authorization", type: { - name: "Sequence", - element: { - type: { - name: "Enum", - allowedValues: ["metadata", "deleted", "system"] - } - } + name: "String" } - }, - collectionFormat: "CSV" + } }; - var keyInfo = { - parameterPath: "keyInfo", - mapper: KeyInfo + exports2.copySourceTags = { + parameterPath: ["options", "copySourceTags"], + mapper: { + serializedName: "x-ms-copy-source-tag-option", + xmlName: "x-ms-copy-source-tag-option", + type: { + name: "Enum", + allowedValues: ["REPLACE", "COPY"] + } + } + }; + exports2.fileRequestIntent = { + parameterPath: ["options", "fileRequestIntent"], + mapper: { + serializedName: "x-ms-file-request-intent", + xmlName: "x-ms-file-request-intent", + type: { + name: "String" + } + } }; - var comp3 = { + exports2.comp15 = { parameterPath: "comp", mapper: { - defaultValue: "userdelegationkey", + defaultValue: "copy", isConstant: true, serializedName: "comp", type: { @@ -45065,32 +47424,32 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var restype1 = { - parameterPath: "restype", + exports2.copyActionAbortConstant = { + parameterPath: "copyActionAbortConstant", mapper: { - defaultValue: "account", + defaultValue: "abort", isConstant: true, - serializedName: "restype", + serializedName: "x-ms-copy-action", type: { name: "String" } } }; - var body = { - parameterPath: "body", + exports2.copyId = { + parameterPath: "copyId", mapper: { - serializedName: "body", + serializedName: "copyid", required: true, - xmlName: "body", + xmlName: "copyid", type: { - name: "Stream" + name: "String" } } }; - var comp4 = { + exports2.comp16 = { parameterPath: "comp", mapper: { - defaultValue: "batch", + defaultValue: "tier", isConstant: true, serializedName: "comp", type: { @@ -45098,32 +47457,53 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var contentLength = { - parameterPath: "contentLength", + exports2.tier1 = { + parameterPath: "tier", mapper: { - serializedName: "Content-Length", + serializedName: "x-ms-access-tier", required: true, - xmlName: "Content-Length", + xmlName: "x-ms-access-tier", type: { - name: "Number" + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold" + ] } } }; - var multipartContentType = { - parameterPath: "multipartContentType", + exports2.queryRequest = { + parameterPath: ["options", "queryRequest"], + mapper: mappers_js_1.QueryRequest + }; + exports2.comp17 = { + parameterPath: "comp", mapper: { - serializedName: "Content-Type", - required: true, - xmlName: "Content-Type", + defaultValue: "query", + isConstant: true, + serializedName: "comp", type: { name: "String" } } }; - var comp5 = { + exports2.comp18 = { parameterPath: "comp", mapper: { - defaultValue: "blobs", + defaultValue: "tags", isConstant: true, serializedName: "comp", type: { @@ -45131,152 +47511,218 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var where = { - parameterPath: ["options", "where"], + exports2.tags = { + parameterPath: ["options", "tags"], + mapper: mappers_js_1.BlobTags + }; + exports2.transactionalContentMD5 = { + parameterPath: ["options", "transactionalContentMD5"], mapper: { - serializedName: "where", - xmlName: "where", + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray" + } + } + }; + exports2.transactionalContentCrc64 = { + parameterPath: ["options", "transactionalContentCrc64"], + mapper: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } + }; + exports2.blobType = { + parameterPath: "blobType", + mapper: { + defaultValue: "PageBlob", + isConstant: true, + serializedName: "x-ms-blob-type", type: { name: "String" } } }; - var restype2 = { - parameterPath: "restype", + exports2.blobContentLength = { + parameterPath: "blobContentLength", mapper: { - defaultValue: "container", + serializedName: "x-ms-blob-content-length", + required: true, + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + } + }; + exports2.blobSequenceNumber = { + parameterPath: ["options", "blobSequenceNumber"], + mapper: { + defaultValue: 0, + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + } + }; + exports2.contentType1 = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/octet-stream", isConstant: true, - serializedName: "restype", + serializedName: "Content-Type", type: { name: "String" } } }; - var metadata = { - parameterPath: ["options", "metadata"], + exports2.body1 = { + parameterPath: "body", mapper: { - serializedName: "x-ms-meta", - xmlName: "x-ms-meta", - headerCollectionPrefix: "x-ms-meta-", + serializedName: "body", + required: true, + xmlName: "body", type: { - name: "Dictionary", - value: { type: { name: "String" } } + name: "Stream" } } }; - var access = { - parameterPath: ["options", "access"], + exports2.accept2 = { + parameterPath: "accept", mapper: { - serializedName: "x-ms-blob-public-access", - xmlName: "x-ms-blob-public-access", + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", type: { - name: "Enum", - allowedValues: ["container", "blob"] + name: "String" + } + } + }; + exports2.comp19 = { + parameterPath: "comp", + mapper: { + defaultValue: "page", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + exports2.pageWrite = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "update", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" } } }; - var defaultEncryptionScope = { + exports2.ifSequenceNumberLessThanOrEqualTo = { parameterPath: [ "options", - "containerEncryptionScope", - "defaultEncryptionScope" + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThanOrEqualTo" ], mapper: { - serializedName: "x-ms-default-encryption-scope", - xmlName: "x-ms-default-encryption-scope", + serializedName: "x-ms-if-sequence-number-le", + xmlName: "x-ms-if-sequence-number-le", + type: { + name: "Number" + } + } + }; + exports2.ifSequenceNumberLessThan = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThan" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-lt", + xmlName: "x-ms-if-sequence-number-lt", type: { - name: "String" + name: "Number" } } }; - var preventEncryptionScopeOverride = { + exports2.ifSequenceNumberEqualTo = { parameterPath: [ "options", - "containerEncryptionScope", - "preventEncryptionScopeOverride" + "sequenceNumberAccessConditions", + "ifSequenceNumberEqualTo" ], mapper: { - serializedName: "x-ms-deny-encryption-scope-override", - xmlName: "x-ms-deny-encryption-scope-override", + serializedName: "x-ms-if-sequence-number-eq", + xmlName: "x-ms-if-sequence-number-eq", type: { - name: "Boolean" + name: "Number" } } }; - var leaseId = { - parameterPath: ["options", "leaseAccessConditions", "leaseId"], + exports2.pageWrite1 = { + parameterPath: "pageWrite", mapper: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", + defaultValue: "clear", + isConstant: true, + serializedName: "x-ms-page-write", type: { name: "String" } } }; - var ifModifiedSince = { - parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], - mapper: { - serializedName: "If-Modified-Since", - xmlName: "If-Modified-Since", - type: { - name: "DateTimeRfc1123" - } - } - }; - var ifUnmodifiedSince = { - parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], + exports2.sourceUrl = { + parameterPath: "sourceUrl", mapper: { - serializedName: "If-Unmodified-Since", - xmlName: "If-Unmodified-Since", + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", type: { - name: "DateTimeRfc1123" + name: "String" } } }; - var comp6 = { - parameterPath: "comp", + exports2.sourceRange = { + parameterPath: "sourceRange", mapper: { - defaultValue: "metadata", - isConstant: true, - serializedName: "comp", + serializedName: "x-ms-source-range", + required: true, + xmlName: "x-ms-source-range", type: { name: "String" } } }; - var comp7 = { - parameterPath: "comp", + exports2.sourceContentCrc64 = { + parameterPath: ["options", "sourceContentCrc64"], mapper: { - defaultValue: "acl", - isConstant: true, - serializedName: "comp", + serializedName: "x-ms-source-content-crc64", + xmlName: "x-ms-source-content-crc64", type: { - name: "String" + name: "ByteArray" } } }; - var containerAcl = { - parameterPath: ["options", "containerAcl"], + exports2.range1 = { + parameterPath: "range", mapper: { - serializedName: "containerAcl", - xmlName: "SignedIdentifiers", - xmlIsWrapped: true, - xmlElementName: "SignedIdentifier", + serializedName: "x-ms-range", + required: true, + xmlName: "x-ms-range", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "SignedIdentifier" - } - } + name: "String" } } }; - var comp8 = { + exports2.comp20 = { parameterPath: "comp", mapper: { - defaultValue: "undelete", + defaultValue: "pagelist", isConstant: true, serializedName: "comp", type: { @@ -45284,62 +47730,64 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var deletedContainerName = { - parameterPath: ["options", "deletedContainerName"], + exports2.prevsnapshot = { + parameterPath: ["options", "prevsnapshot"], mapper: { - serializedName: "x-ms-deleted-container-name", - xmlName: "x-ms-deleted-container-name", + serializedName: "prevsnapshot", + xmlName: "prevsnapshot", type: { name: "String" } } }; - var deletedContainerVersion = { - parameterPath: ["options", "deletedContainerVersion"], + exports2.prevSnapshotUrl = { + parameterPath: ["options", "prevSnapshotUrl"], mapper: { - serializedName: "x-ms-deleted-container-version", - xmlName: "x-ms-deleted-container-version", + serializedName: "x-ms-previous-snapshot-url", + xmlName: "x-ms-previous-snapshot-url", type: { name: "String" } } }; - var comp9 = { - parameterPath: "comp", + exports2.sequenceNumberAction = { + parameterPath: "sequenceNumberAction", mapper: { - defaultValue: "rename", - isConstant: true, - serializedName: "comp", + serializedName: "x-ms-sequence-number-action", + required: true, + xmlName: "x-ms-sequence-number-action", type: { - name: "String" + name: "Enum", + allowedValues: ["max", "update", "increment"] } } }; - var sourceContainerName = { - parameterPath: "sourceContainerName", + exports2.comp21 = { + parameterPath: "comp", mapper: { - serializedName: "x-ms-source-container-name", - required: true, - xmlName: "x-ms-source-container-name", + defaultValue: "incrementalcopy", + isConstant: true, + serializedName: "comp", type: { name: "String" } } }; - var sourceLeaseId = { - parameterPath: ["options", "sourceLeaseId"], + exports2.blobType1 = { + parameterPath: "blobType", mapper: { - serializedName: "x-ms-source-lease-id", - xmlName: "x-ms-source-lease-id", + defaultValue: "AppendBlob", + isConstant: true, + serializedName: "x-ms-blob-type", type: { name: "String" } } }; - var comp10 = { + exports2.comp22 = { parameterPath: "comp", mapper: { - defaultValue: "lease", + defaultValue: "appendblock", isConstant: true, serializedName: "comp", type: { @@ -45347,5444 +47795,6258 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var action = { - parameterPath: "action", + exports2.maxSize = { + parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], mapper: { - defaultValue: "acquire", - isConstant: true, - serializedName: "x-ms-lease-action", + serializedName: "x-ms-blob-condition-maxsize", + xmlName: "x-ms-blob-condition-maxsize", type: { - name: "String" + name: "Number" } } }; - var duration = { - parameterPath: ["options", "duration"], + exports2.appendPosition = { + parameterPath: [ + "options", + "appendPositionAccessConditions", + "appendPosition" + ], mapper: { - serializedName: "x-ms-lease-duration", - xmlName: "x-ms-lease-duration", + serializedName: "x-ms-blob-condition-appendpos", + xmlName: "x-ms-blob-condition-appendpos", type: { name: "Number" } } }; - var proposedLeaseId = { - parameterPath: ["options", "proposedLeaseId"], + exports2.sourceRange1 = { + parameterPath: ["options", "sourceRange"], mapper: { - serializedName: "x-ms-proposed-lease-id", - xmlName: "x-ms-proposed-lease-id", + serializedName: "x-ms-source-range", + xmlName: "x-ms-source-range", type: { name: "String" } } }; - var action1 = { - parameterPath: "action", + exports2.comp23 = { + parameterPath: "comp", mapper: { - defaultValue: "release", + defaultValue: "seal", isConstant: true, - serializedName: "x-ms-lease-action", + serializedName: "comp", type: { name: "String" } } }; - var leaseId1 = { - parameterPath: "leaseId", + exports2.blobType2 = { + parameterPath: "blobType", mapper: { - serializedName: "x-ms-lease-id", - required: true, - xmlName: "x-ms-lease-id", + defaultValue: "BlockBlob", + isConstant: true, + serializedName: "x-ms-blob-type", type: { name: "String" } } }; - var action2 = { - parameterPath: "action", + exports2.copySourceBlobProperties = { + parameterPath: ["options", "copySourceBlobProperties"], mapper: { - defaultValue: "renew", - isConstant: true, - serializedName: "x-ms-lease-action", + serializedName: "x-ms-copy-source-blob-properties", + xmlName: "x-ms-copy-source-blob-properties", type: { - name: "String" + name: "Boolean" } } }; - var action3 = { - parameterPath: "action", + exports2.comp24 = { + parameterPath: "comp", mapper: { - defaultValue: "break", + defaultValue: "block", isConstant: true, - serializedName: "x-ms-lease-action", + serializedName: "comp", type: { name: "String" } } }; - var breakPeriod = { - parameterPath: ["options", "breakPeriod"], + exports2.blockId = { + parameterPath: "blockId", mapper: { - serializedName: "x-ms-lease-break-period", - xmlName: "x-ms-lease-break-period", + serializedName: "blockid", + required: true, + xmlName: "blockid", type: { - name: "Number" + name: "String" } } }; - var action4 = { - parameterPath: "action", + exports2.blocks = { + parameterPath: "blocks", + mapper: mappers_js_1.BlockLookupList + }; + exports2.comp25 = { + parameterPath: "comp", mapper: { - defaultValue: "change", + defaultValue: "blocklist", isConstant: true, - serializedName: "x-ms-lease-action", + serializedName: "comp", type: { name: "String" } } }; - var proposedLeaseId1 = { - parameterPath: "proposedLeaseId", + exports2.listType = { + parameterPath: "listType", mapper: { - serializedName: "x-ms-proposed-lease-id", + defaultValue: "committed", + serializedName: "blocklisttype", required: true, - xmlName: "x-ms-proposed-lease-id", + xmlName: "blocklisttype", type: { - name: "String" + name: "Enum", + allowedValues: ["committed", "uncommitted", "all"] } } }; - var include1 = { - parameterPath: ["options", "include"], - mapper: { - serializedName: "include", - xmlName: "include", - xmlElementName: "ListBlobsIncludeItem", - type: { - name: "Sequence", - element: { - type: { - name: "Enum", - allowedValues: [ - "copy", - "deleted", - "metadata", - "snapshots", - "uncommittedblobs", - "versions", - "tags", - "immutabilitypolicy", - "legalhold", - "deletedwithversions" - ] - } - } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/service.js +var require_service = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/service.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ServiceImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var ServiceImpl = class { + client; + /** + * Initialize a new instance of the class Service class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * Sets properties for a storage account's Blob service endpoint, including properties for Storage + * Analytics and CORS (Cross-Origin Resource Sharing) rules + * @param blobServiceProperties The StorageService properties. + * @param options The options parameters. + */ + setProperties(blobServiceProperties, options) { + return this.client.sendOperationRequest({ blobServiceProperties, options }, setPropertiesOperationSpec); + } + /** + * gets the properties of a storage account's Blob service, including properties for Storage Analytics + * and CORS (Cross-Origin Resource Sharing) rules. + * @param options The options parameters. + */ + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); + } + /** + * Retrieves statistics related to replication for the Blob service. It is only available on the + * secondary location endpoint when read-access geo-redundant replication is enabled for the storage + * account. + * @param options The options parameters. + */ + getStatistics(options) { + return this.client.sendOperationRequest({ options }, getStatisticsOperationSpec); + } + /** + * The List Containers Segment operation returns a list of the containers under the specified account + * @param options The options parameters. + */ + listContainersSegment(options) { + return this.client.sendOperationRequest({ options }, listContainersSegmentOperationSpec); + } + /** + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * @param keyInfo Key information + * @param options The options parameters. + */ + getUserDelegationKey(keyInfo, options) { + return this.client.sendOperationRequest({ keyInfo, options }, getUserDelegationKeyOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a + * given search expression. Filter blobs searches across all containers within a storage account but + * can be scoped within the expression to a single container. + * @param options The options parameters. + */ + filterBlobs(options) { + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); + } + }; + exports2.ServiceImpl = ServiceImpl; + var xmlSerializer = coreClient.createSerializer( + Mappers, + /* isXml */ + true + ); + var setPropertiesOperationSpec = { + path: "/", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.ServiceSetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceSetPropertiesExceptionHeaders } }, - collectionFormat: "CSV" + requestBody: Parameters.blobServiceProperties, + queryParameters: [ + Parameters.restype, + Parameters.comp, + Parameters.timeoutInSeconds + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer }; - var delimiter = { - parameterPath: "delimiter", - mapper: { - serializedName: "delimiter", - required: true, - xmlName: "delimiter", - type: { - name: "String" + var getPropertiesOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobServiceProperties, + headersMapper: Mappers.ServiceGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetPropertiesExceptionHeaders } - } + }, + queryParameters: [ + Parameters.restype, + Parameters.comp, + Parameters.timeoutInSeconds + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - var snapshot = { - parameterPath: ["options", "snapshot"], - mapper: { - serializedName: "snapshot", - xmlName: "snapshot", - type: { - name: "String" + var getStatisticsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobServiceStatistics, + headersMapper: Mappers.ServiceGetStatisticsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetStatisticsExceptionHeaders } - } + }, + queryParameters: [ + Parameters.restype, + Parameters.timeoutInSeconds, + Parameters.comp1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - var versionId = { - parameterPath: ["options", "versionId"], - mapper: { - serializedName: "versionid", - xmlName: "versionid", - type: { - name: "String" + var listContainersSegmentOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListContainersSegmentResponse, + headersMapper: Mappers.ServiceListContainersSegmentHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceListContainersSegmentExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.include + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - var range = { - parameterPath: ["options", "range"], - mapper: { - serializedName: "x-ms-range", - xmlName: "x-ms-range", - type: { - name: "String" + var getUserDelegationKeyOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.UserDelegationKey, + headersMapper: Mappers.ServiceGetUserDelegationKeyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetUserDelegationKeyExceptionHeaders } - } + }, + requestBody: Parameters.keyInfo, + queryParameters: [ + Parameters.restype, + Parameters.timeoutInSeconds, + Parameters.comp3 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer }; - var rangeGetContentMD5 = { - parameterPath: ["options", "rangeGetContentMD5"], - mapper: { - serializedName: "x-ms-range-get-content-md5", - xmlName: "x-ms-range-get-content-md5", - type: { - name: "Boolean" + var getAccountInfoOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ServiceGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetAccountInfoExceptionHeaders + } + }, + queryParameters: [ + Parameters.comp, + Parameters.timeoutInSeconds, + Parameters.restype1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer + }; + var submitBatchOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.ServiceSubmitBatchHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceSubmitBatchExceptionHeaders + } + }, + requestBody: Parameters.body, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp4], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer + }; + var filterBlobsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.FilterBlobSegment, + headersMapper: Mappers.ServiceFilterBlobsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceFilterBlobsExceptionHeaders } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.comp5, + Parameters.where + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer + }; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/container.js +var require_container = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/container.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ContainerImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var ContainerImpl = class { + client; + /** + * Initialize a new instance of the class Container class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * creates a new container under the specified account. If the container with the same name already + * exists, the operation fails + * @param options The options parameters. + */ + create(options) { + return this.client.sendOperationRequest({ options }, createOperationSpec); + } + /** + * returns all user-defined metadata and system properties for the specified container. The data + * returned does not include the container's list of blobs + * @param options The options parameters. + */ + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); + } + /** + * operation marks the specified container for deletion. The container and any blobs contained within + * it are later deleted during garbage collection + * @param options The options parameters. + */ + delete(options) { + return this.client.sendOperationRequest({ options }, deleteOperationSpec); + } + /** + * operation sets one or more user-defined name-value pairs for the specified container. + * @param options The options parameters. + */ + setMetadata(options) { + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); + } + /** + * gets the permissions for the specified container. The permissions indicate whether container data + * may be accessed publicly. + * @param options The options parameters. + */ + getAccessPolicy(options) { + return this.client.sendOperationRequest({ options }, getAccessPolicyOperationSpec); } - }; - var rangeGetContentCRC64 = { - parameterPath: ["options", "rangeGetContentCRC64"], - mapper: { - serializedName: "x-ms-range-get-content-crc64", - xmlName: "x-ms-range-get-content-crc64", - type: { - name: "Boolean" - } + /** + * sets the permissions for the specified container. The permissions indicate whether blobs in a + * container may be accessed publicly. + * @param options The options parameters. + */ + setAccessPolicy(options) { + return this.client.sendOperationRequest({ options }, setAccessPolicyOperationSpec); } - }; - var encryptionKey = { - parameterPath: ["options", "cpkInfo", "encryptionKey"], - mapper: { - serializedName: "x-ms-encryption-key", - xmlName: "x-ms-encryption-key", - type: { - name: "String" - } + /** + * Restores a previously-deleted container. + * @param options The options parameters. + */ + restore(options) { + return this.client.sendOperationRequest({ options }, restoreOperationSpec); } - }; - var encryptionKeySha256 = { - parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], - mapper: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } + /** + * Renames an existing container. + * @param sourceContainerName Required. Specifies the name of the container to rename. + * @param options The options parameters. + */ + rename(sourceContainerName, options) { + return this.client.sendOperationRequest({ sourceContainerName, options }, renameOperationSpec); } - }; - var encryptionAlgorithm = { - parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], - mapper: { - serializedName: "x-ms-encryption-algorithm", - xmlName: "x-ms-encryption-algorithm", - type: { - name: "String" - } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec); } - }; - var ifMatch = { - parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], - mapper: { - serializedName: "If-Match", - xmlName: "If-Match", - type: { - name: "String" - } + /** + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. + */ + filterBlobs(options) { + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); } - }; - var ifNoneMatch = { - parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], - mapper: { - serializedName: "If-None-Match", - xmlName: "If-None-Match", - type: { - name: "String" - } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + acquireLease(options) { + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); } - }; - var ifTags = { - parameterPath: ["options", "modifiedAccessConditions", "ifTags"], - mapper: { - serializedName: "x-ms-if-tags", - xmlName: "x-ms-if-tags", - type: { - name: "String" - } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec); } - }; - var deleteSnapshots = { - parameterPath: ["options", "deleteSnapshots"], - mapper: { - serializedName: "x-ms-delete-snapshots", - xmlName: "x-ms-delete-snapshots", - type: { - name: "Enum", - allowedValues: ["include", "only"] - } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec); } - }; - var blobDeleteType = { - parameterPath: ["options", "blobDeleteType"], - mapper: { - serializedName: "deletetype", - xmlName: "deletetype", - type: { - name: "String" - } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + breakLease(options) { + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); } - }; - var comp11 = { - parameterPath: "comp", - mapper: { - defaultValue: "expiry", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec); } - }; - var expiryOptions = { - parameterPath: "expiryOptions", - mapper: { - serializedName: "x-ms-expiry-option", - required: true, - xmlName: "x-ms-expiry-option", - type: { - name: "String" - } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param options The options parameters. + */ + listBlobFlatSegment(options) { + return this.client.sendOperationRequest({ options }, listBlobFlatSegmentOperationSpec); } - }; - var expiresOn = { - parameterPath: ["options", "expiresOn"], - mapper: { - serializedName: "x-ms-expiry-time", - xmlName: "x-ms-expiry-time", - type: { - name: "String" - } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix + * element in the response body that acts as a placeholder for all blobs whose names begin with the + * same substring up to the appearance of the delimiter character. The delimiter may be a single + * character or a string. + * @param options The options parameters. + */ + listBlobHierarchySegment(delimiter, options) { + return this.client.sendOperationRequest({ delimiter, options }, listBlobHierarchySegmentOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); } }; - var blobCacheControl = { - parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], - mapper: { - serializedName: "x-ms-blob-cache-control", - xmlName: "x-ms-blob-cache-control", - type: { - name: "String" + exports2.ContainerImpl = ContainerImpl; + var xmlSerializer = coreClient.createSerializer( + Mappers, + /* isXml */ + true + ); + var createOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerCreateHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerCreateExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.access, + Parameters.defaultEncryptionScope, + Parameters.preventEncryptionScopeOverride + ], + isXML: true, + serializer: xmlSerializer }; - var blobContentType = { - parameterPath: ["options", "blobHttpHeaders", "blobContentType"], - mapper: { - serializedName: "x-ms-blob-content-type", - xmlName: "x-ms-blob-content-type", - type: { - name: "String" + var getPropertiesOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ContainerGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetPropertiesExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId + ], + isXML: true, + serializer: xmlSerializer }; - var blobContentMD5 = { - parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], - mapper: { - serializedName: "x-ms-blob-content-md5", - xmlName: "x-ms-blob-content-md5", - type: { - name: "ByteArray" + var deleteOperationSpec = { + path: "/{containerName}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: Mappers.ContainerDeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerDeleteExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince + ], + isXML: true, + serializer: xmlSerializer }; - var blobContentEncoding = { - parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], - mapper: { - serializedName: "x-ms-blob-content-encoding", - xmlName: "x-ms-blob-content-encoding", - type: { - name: "String" + var setMetadataOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerSetMetadataHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSetMetadataExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp6 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince + ], + isXML: true, + serializer: xmlSerializer }; - var blobContentLanguage = { - parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], - mapper: { - serializedName: "x-ms-blob-content-language", - xmlName: "x-ms-blob-content-language", - type: { - name: "String" + var getAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { + name: "Sequence", + element: { + type: { name: "Composite", className: "SignedIdentifier" } + } + }, + serializedName: "SignedIdentifiers", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier" + }, + headersMapper: Mappers.ContainerGetAccessPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetAccessPolicyExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp7 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId + ], + isXML: true, + serializer: xmlSerializer }; - var blobContentDisposition = { - parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], - mapper: { - serializedName: "x-ms-blob-content-disposition", - xmlName: "x-ms-blob-content-disposition", - type: { - name: "String" + var setAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerSetAccessPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSetAccessPolicyExceptionHeaders } - } + }, + requestBody: Parameters.containerAcl, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp7 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.access, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer }; - var comp12 = { - parameterPath: "comp", - mapper: { - defaultValue: "immutabilityPolicies", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var restoreOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerRestoreHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRestoreExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp8 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.deletedContainerName, + Parameters.deletedContainerVersion + ], + isXML: true, + serializer: xmlSerializer }; - var immutabilityPolicyExpiry = { - parameterPath: ["options", "immutabilityPolicyExpiry"], - mapper: { - serializedName: "x-ms-immutability-policy-until-date", - xmlName: "x-ms-immutability-policy-until-date", - type: { - name: "DateTimeRfc1123" + var renameOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerRenameHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRenameExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp9 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.sourceContainerName, + Parameters.sourceLeaseId + ], + isXML: true, + serializer: xmlSerializer }; - var immutabilityPolicyMode = { - parameterPath: ["options", "immutabilityPolicyMode"], - mapper: { - serializedName: "x-ms-immutability-policy-mode", - xmlName: "x-ms-immutability-policy-mode", - type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] + var submitBatchOperationSpec = { + path: "/{containerName}", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.ContainerSubmitBatchHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSubmitBatchExceptionHeaders } - } + }, + requestBody: Parameters.body, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp4, + Parameters.restype2 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer }; - var comp13 = { - parameterPath: "comp", - mapper: { - defaultValue: "legalhold", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.FilterBlobSegment, + headersMapper: Mappers.ContainerFilterBlobsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerFilterBlobsExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.comp5, + Parameters.where, + Parameters.restype2 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - var legalHold = { - parameterPath: "legalHold", - mapper: { - serializedName: "x-ms-legal-hold", - required: true, - xmlName: "x-ms-legal-hold", - type: { - name: "Boolean" + var acquireLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerAcquireLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerAcquireLeaseExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action, + Parameters.duration, + Parameters.proposedLeaseId + ], + isXML: true, + serializer: xmlSerializer }; - var encryptionScope = { - parameterPath: ["options", "encryptionScope"], - mapper: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" + var releaseLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerReleaseLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerReleaseLeaseExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action1, + Parameters.leaseId1 + ], + isXML: true, + serializer: xmlSerializer }; - var comp14 = { - parameterPath: "comp", - mapper: { - defaultValue: "snapshot", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var renewLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerRenewLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRenewLeaseExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action2 + ], + isXML: true, + serializer: xmlSerializer }; - var tier = { - parameterPath: ["options", "tier"], - mapper: { - serializedName: "x-ms-access-tier", - xmlName: "x-ms-access-tier", - type: { - name: "Enum", - allowedValues: [ - "P4", - "P6", - "P10", - "P15", - "P20", - "P30", - "P40", - "P50", - "P60", - "P70", - "P80", - "Hot", - "Cool", - "Archive", - "Cold" - ] + var breakLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.ContainerBreakLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerBreakLeaseExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action3, + Parameters.breakPeriod + ], + isXML: true, + serializer: xmlSerializer }; - var rehydratePriority = { - parameterPath: ["options", "rehydratePriority"], - mapper: { - serializedName: "x-ms-rehydrate-priority", - xmlName: "x-ms-rehydrate-priority", - type: { - name: "Enum", - allowedValues: ["High", "Standard"] + var changeLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerChangeLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerChangeLeaseExceptionHeaders } - } - }; - var sourceIfModifiedSince = { - parameterPath: [ - "options", - "sourceModifiedAccessConditions", - "sourceIfModifiedSince" + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 ], - mapper: { - serializedName: "x-ms-source-if-modified-since", - xmlName: "x-ms-source-if-modified-since", - type: { - name: "DateTimeRfc1123" - } - } - }; - var sourceIfUnmodifiedSince = { - parameterPath: [ - "options", - "sourceModifiedAccessConditions", - "sourceIfUnmodifiedSince" + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action4, + Parameters.proposedLeaseId1 ], - mapper: { - serializedName: "x-ms-source-if-unmodified-since", - xmlName: "x-ms-source-if-unmodified-since", - type: { - name: "DateTimeRfc1123" - } - } + isXML: true, + serializer: xmlSerializer }; - var sourceIfMatch = { - parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], - mapper: { - serializedName: "x-ms-source-if-match", - xmlName: "x-ms-source-if-match", - type: { - name: "String" + var listBlobFlatSegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListBlobsFlatSegmentResponse, + headersMapper: Mappers.ContainerListBlobFlatSegmentHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerListBlobFlatSegmentExceptionHeaders } - } - }; - var sourceIfNoneMatch = { - parameterPath: [ - "options", - "sourceModifiedAccessConditions", - "sourceIfNoneMatch" + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.restype2, + Parameters.include1 ], - mapper: { - serializedName: "x-ms-source-if-none-match", - xmlName: "x-ms-source-if-none-match", - type: { - name: "String" - } - } + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - var sourceIfTags = { - parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], - mapper: { - serializedName: "x-ms-source-if-tags", - xmlName: "x-ms-source-if-tags", - type: { - name: "String" + var listBlobHierarchySegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListBlobsHierarchySegmentResponse, + headersMapper: Mappers.ContainerListBlobHierarchySegmentHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerListBlobHierarchySegmentExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.restype2, + Parameters.include1, + Parameters.delimiter + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - var copySource = { - parameterPath: "copySource", - mapper: { - serializedName: "x-ms-copy-source", - required: true, - xmlName: "x-ms-copy-source", - type: { - name: "String" + var getAccountInfoOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ContainerGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetAccountInfoExceptionHeaders } - } + }, + queryParameters: [ + Parameters.comp, + Parameters.timeoutInSeconds, + Parameters.restype1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - var blobTagsString = { - parameterPath: ["options", "blobTagsString"], - mapper: { - serializedName: "x-ms-tags", - xmlName: "x-ms-tags", - type: { - name: "String" - } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/blob.js +var require_blob = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/blob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var BlobImpl = class { + client; + /** + * Initialize a new instance of the class Blob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; } - }; - var sealBlob = { - parameterPath: ["options", "sealBlob"], - mapper: { - serializedName: "x-ms-seal-blob", - xmlName: "x-ms-seal-blob", - type: { - name: "Boolean" - } + /** + * The Download operation reads or downloads a blob from the system, including its metadata and + * properties. You can also call Download to read a snapshot. + * @param options The options parameters. + */ + download(options) { + return this.client.sendOperationRequest({ options }, downloadOperationSpec); } - }; - var legalHold1 = { - parameterPath: ["options", "legalHold"], - mapper: { - serializedName: "x-ms-legal-hold", - xmlName: "x-ms-legal-hold", - type: { - name: "Boolean" - } + /** + * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system + * properties for the blob. It does not return the content of the blob. + * @param options The options parameters. + */ + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); } - }; - var xMsRequiresSync = { - parameterPath: "xMsRequiresSync", - mapper: { - defaultValue: "true", - isConstant: true, - serializedName: "x-ms-requires-sync", - type: { - name: "String" - } + /** + * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + * permanently removed from the storage account. If the storage account's soft delete feature is + * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + * immediately. However, the blob service retains the blob or snapshot for the number of days specified + * by the DeleteRetentionPolicy section of [Storage service properties] + * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is + * permanently removed from the storage account. Note that you continue to be charged for the + * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the + * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You + * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a + * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 + * (ResourceNotFound). + * @param options The options parameters. + */ + delete(options) { + return this.client.sendOperationRequest({ options }, deleteOperationSpec); } - }; - var sourceContentMD5 = { - parameterPath: ["options", "sourceContentMD5"], - mapper: { - serializedName: "x-ms-source-content-md5", - xmlName: "x-ms-source-content-md5", - type: { - name: "ByteArray" - } + /** + * Undelete a blob that was previously soft deleted + * @param options The options parameters. + */ + undelete(options) { + return this.client.sendOperationRequest({ options }, undeleteOperationSpec); } - }; - var copySourceAuthorization = { - parameterPath: ["options", "copySourceAuthorization"], - mapper: { - serializedName: "x-ms-copy-source-authorization", - xmlName: "x-ms-copy-source-authorization", - type: { - name: "String" - } + /** + * Sets the time a blob will expire and be deleted. + * @param expiryOptions Required. Indicates mode of the expiry time + * @param options The options parameters. + */ + setExpiry(expiryOptions, options) { + return this.client.sendOperationRequest({ expiryOptions, options }, setExpiryOperationSpec); } - }; - var copySourceTags = { - parameterPath: ["options", "copySourceTags"], - mapper: { - serializedName: "x-ms-copy-source-tag-option", - xmlName: "x-ms-copy-source-tag-option", - type: { - name: "Enum", - allowedValues: ["REPLACE", "COPY"] - } + /** + * The Set HTTP Headers operation sets system properties on the blob + * @param options The options parameters. + */ + setHttpHeaders(options) { + return this.client.sendOperationRequest({ options }, setHttpHeadersOperationSpec); } - }; - var comp15 = { - parameterPath: "comp", - mapper: { - defaultValue: "copy", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * The Set Immutability Policy operation sets the immutability policy on the blob + * @param options The options parameters. + */ + setImmutabilityPolicy(options) { + return this.client.sendOperationRequest({ options }, setImmutabilityPolicyOperationSpec); } - }; - var copyActionAbortConstant = { - parameterPath: "copyActionAbortConstant", - mapper: { - defaultValue: "abort", - isConstant: true, - serializedName: "x-ms-copy-action", - type: { - name: "String" - } + /** + * The Delete Immutability Policy operation deletes the immutability policy on the blob + * @param options The options parameters. + */ + deleteImmutabilityPolicy(options) { + return this.client.sendOperationRequest({ options }, deleteImmutabilityPolicyOperationSpec); } - }; - var copyId = { - parameterPath: "copyId", - mapper: { - serializedName: "copyid", - required: true, - xmlName: "copyid", - type: { - name: "String" - } + /** + * The Set Legal Hold operation sets a legal hold on the blob. + * @param legalHold Specified if a legal hold should be set on the blob. + * @param options The options parameters. + */ + setLegalHold(legalHold, options) { + return this.client.sendOperationRequest({ legalHold, options }, setLegalHoldOperationSpec); } - }; - var comp16 = { - parameterPath: "comp", - mapper: { - defaultValue: "tier", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more + * name-value pairs + * @param options The options parameters. + */ + setMetadata(options) { + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); } - }; - var tier1 = { - parameterPath: "tier", - mapper: { - serializedName: "x-ms-access-tier", - required: true, - xmlName: "x-ms-access-tier", - type: { - name: "Enum", - allowedValues: [ - "P4", - "P6", - "P10", - "P15", - "P20", - "P30", - "P40", - "P50", - "P60", - "P70", - "P80", - "Hot", - "Cool", - "Archive", - "Cold" - ] - } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + acquireLease(options) { + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); } - }; - var queryRequest = { - parameterPath: ["options", "queryRequest"], - mapper: QueryRequest - }; - var comp17 = { - parameterPath: "comp", - mapper: { - defaultValue: "query", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec); } - }; - var comp18 = { - parameterPath: "comp", - mapper: { - defaultValue: "tags", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec); } - }; - var tags = { - parameterPath: ["options", "tags"], - mapper: BlobTags - }; - var transactionalContentMD5 = { - parameterPath: ["options", "transactionalContentMD5"], - mapper: { - serializedName: "Content-MD5", - xmlName: "Content-MD5", - type: { - name: "ByteArray" - } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec); } - }; - var transactionalContentCrc64 = { - parameterPath: ["options", "transactionalContentCrc64"], - mapper: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + breakLease(options) { + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); } - }; - var blobType = { - parameterPath: "blobType", - mapper: { - defaultValue: "PageBlob", - isConstant: true, - serializedName: "x-ms-blob-type", - type: { - name: "String" - } + /** + * The Create Snapshot operation creates a read-only snapshot of a blob + * @param options The options parameters. + */ + createSnapshot(options) { + return this.client.sendOperationRequest({ options }, createSnapshotOperationSpec); } - }; - var blobContentLength = { - parameterPath: "blobContentLength", - mapper: { - serializedName: "x-ms-blob-content-length", - required: true, - xmlName: "x-ms-blob-content-length", - type: { - name: "Number" - } + /** + * The Start Copy From URL operation copies a blob or an internet resource to a new blob. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + startCopyFromURL(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, startCopyFromURLOperationSpec); } - }; - var blobSequenceNumber = { - parameterPath: ["options", "blobSequenceNumber"], - mapper: { - defaultValue: 0, - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" - } + /** + * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return + * a response until the copy is complete. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyFromURL(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, copyFromURLOperationSpec); } - }; - var contentType1 = { - parameterPath: ["options", "contentType"], - mapper: { - defaultValue: "application/octet-stream", - isConstant: true, - serializedName: "Content-Type", - type: { - name: "String" - } + /** + * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination + * blob with zero length and full metadata. + * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob + * operation. + * @param options The options parameters. + */ + abortCopyFromURL(copyId, options) { + return this.client.sendOperationRequest({ copyId, options }, abortCopyFromURLOperationSpec); } - }; - var body1 = { - parameterPath: "body", - mapper: { - serializedName: "body", - required: true, - xmlName: "body", - type: { - name: "Stream" - } + /** + * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant storage only). A + * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block + * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's + * ETag. + * @param tier Indicates the tier to be set on the blob. + * @param options The options parameters. + */ + setTier(tier, options) { + return this.client.sendOperationRequest({ tier, options }, setTierOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); + } + /** + * The Query operation enables users to select/project on blob data by providing simple query + * expressions. + * @param options The options parameters. + */ + query(options) { + return this.client.sendOperationRequest({ options }, queryOperationSpec); + } + /** + * The Get Tags operation enables users to get the tags associated with a blob. + * @param options The options parameters. + */ + getTags(options) { + return this.client.sendOperationRequest({ options }, getTagsOperationSpec); } - }; - var accept2 = { - parameterPath: "accept", - mapper: { - defaultValue: "application/xml", - isConstant: true, - serializedName: "Accept", - type: { - name: "String" - } + /** + * The Set Tags operation enables users to set tags on a blob. + * @param options The options parameters. + */ + setTags(options) { + return this.client.sendOperationRequest({ options }, setTagsOperationSpec); } }; - var comp19 = { - parameterPath: "comp", - mapper: { - defaultValue: "page", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + exports2.BlobImpl = BlobImpl; + var xmlSerializer = coreClient.createSerializer( + Mappers, + /* isXml */ + true + ); + var downloadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobDownloadHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobDownloadHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDownloadExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.rangeGetContentMD5, + Parameters.rangeGetContentCRC64, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer }; - var pageWrite = { - parameterPath: "pageWrite", - mapper: { - defaultValue: "update", - isConstant: true, - serializedName: "x-ms-page-write", - type: { - name: "String" + var getPropertiesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "HEAD", + responses: { + 200: { + headersMapper: Mappers.BlobGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetPropertiesExceptionHeaders } - } - }; - var ifSequenceNumberLessThanOrEqualTo = { - parameterPath: [ - "options", - "sequenceNumberAccessConditions", - "ifSequenceNumberLessThanOrEqualTo" + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId ], - mapper: { - serializedName: "x-ms-if-sequence-number-le", - xmlName: "x-ms-if-sequence-number-le", - type: { - name: "Number" - } - } - }; - var ifSequenceNumberLessThan = { - parameterPath: [ - "options", - "sequenceNumberAccessConditions", - "ifSequenceNumberLessThan" + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags ], - mapper: { - serializedName: "x-ms-if-sequence-number-lt", - xmlName: "x-ms-if-sequence-number-lt", - type: { - name: "Number" - } - } + isXML: true, + serializer: xmlSerializer }; - var ifSequenceNumberEqualTo = { - parameterPath: [ - "options", - "sequenceNumberAccessConditions", - "ifSequenceNumberEqualTo" - ], - mapper: { - serializedName: "x-ms-if-sequence-number-eq", - xmlName: "x-ms-if-sequence-number-eq", - type: { - name: "Number" + var deleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: Mappers.BlobDeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDeleteExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.blobDeleteType + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.deleteSnapshots + ], + isXML: true, + serializer: xmlSerializer }; - var pageWrite1 = { - parameterPath: "pageWrite", - mapper: { - defaultValue: "clear", - isConstant: true, - serializedName: "x-ms-page-write", - type: { - name: "String" + var undeleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobUndeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobUndeleteExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp8], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - var sourceUrl = { - parameterPath: "sourceUrl", - mapper: { - serializedName: "x-ms-copy-source", - required: true, - xmlName: "x-ms-copy-source", - type: { - name: "String" + var setExpiryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetExpiryHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetExpiryExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp11], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.expiryOptions, + Parameters.expiresOn + ], + isXML: true, + serializer: xmlSerializer }; - var sourceRange = { - parameterPath: "sourceRange", - mapper: { - serializedName: "x-ms-source-range", - required: true, - xmlName: "x-ms-source-range", - type: { - name: "String" + var setHttpHeadersOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetHttpHeadersHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetHttpHeadersExceptionHeaders } - } + }, + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition + ], + isXML: true, + serializer: xmlSerializer }; - var sourceContentCrc64 = { - parameterPath: ["options", "sourceContentCrc64"], - mapper: { - serializedName: "x-ms-source-content-crc64", - xmlName: "x-ms-source-content-crc64", - type: { - name: "ByteArray" + var setImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetImmutabilityPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetImmutabilityPolicyExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp12 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifUnmodifiedSince, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode + ], + isXML: true, + serializer: xmlSerializer }; - var range1 = { - parameterPath: "range", - mapper: { - serializedName: "x-ms-range", - required: true, - xmlName: "x-ms-range", - type: { - name: "String" + var deleteImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 200: { + headersMapper: Mappers.BlobDeleteImmutabilityPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp12 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - var comp20 = { - parameterPath: "comp", - mapper: { - defaultValue: "pagelist", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var setLegalHoldOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetLegalHoldHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetLegalHoldExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp13 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.legalHold + ], + isXML: true, + serializer: xmlSerializer }; - var prevsnapshot = { - parameterPath: ["options", "prevsnapshot"], - mapper: { - serializedName: "prevsnapshot", - xmlName: "prevsnapshot", - type: { - name: "String" + var setMetadataOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetMetadataHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetMetadataExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp6], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope + ], + isXML: true, + serializer: xmlSerializer }; - var prevSnapshotUrl = { - parameterPath: ["options", "prevSnapshotUrl"], - mapper: { - serializedName: "x-ms-previous-snapshot-url", - xmlName: "x-ms-previous-snapshot-url", - type: { - name: "String" + var acquireLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlobAcquireLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobAcquireLeaseExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action, + Parameters.duration, + Parameters.proposedLeaseId, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer }; - var sequenceNumberAction = { - parameterPath: "sequenceNumberAction", - mapper: { - serializedName: "x-ms-sequence-number-action", - required: true, - xmlName: "x-ms-sequence-number-action", - type: { - name: "Enum", - allowedValues: ["max", "update", "increment"] + var releaseLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobReleaseLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobReleaseLeaseExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action1, + Parameters.leaseId1, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer }; - var comp21 = { - parameterPath: "comp", - mapper: { - defaultValue: "incrementalcopy", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var renewLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobRenewLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobRenewLeaseExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action2, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer }; - var blobType1 = { - parameterPath: "blobType", - mapper: { - defaultValue: "AppendBlob", - isConstant: true, - serializedName: "x-ms-blob-type", - type: { - name: "String" + var changeLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobChangeLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobChangeLeaseExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action4, + Parameters.proposedLeaseId1, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer }; - var comp22 = { - parameterPath: "comp", - mapper: { - defaultValue: "appendblock", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var breakLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobBreakLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobBreakLeaseExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action3, + Parameters.breakPeriod, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer }; - var maxSize = { - parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], - mapper: { - serializedName: "x-ms-blob-condition-maxsize", - xmlName: "x-ms-blob-condition-maxsize", - type: { - name: "Number" + var createSnapshotOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlobCreateSnapshotHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobCreateSnapshotExceptionHeaders } - } - }; - var appendPosition = { - parameterPath: [ - "options", - "appendPositionAccessConditions", - "appendPosition" + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp14], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope ], - mapper: { - serializedName: "x-ms-blob-condition-appendpos", - xmlName: "x-ms-blob-condition-appendpos", - type: { - name: "Number" - } - } + isXML: true, + serializer: xmlSerializer }; - var sourceRange1 = { - parameterPath: ["options", "sourceRange"], - mapper: { - serializedName: "x-ms-source-range", - xmlName: "x-ms-source-range", - type: { - name: "String" + var startCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobStartCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobStartCopyFromURLExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.tier, + Parameters.rehydratePriority, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceIfTags, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.sealBlob, + Parameters.legalHold1 + ], + isXML: true, + serializer: xmlSerializer }; - var comp23 = { - parameterPath: "comp", - mapper: { - defaultValue: "seal", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var copyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobCopyFromURLExceptionHeaders } - } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.xMsRequiresSync, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.copySourceTags, + Parameters.fileRequestIntent + ], + isXML: true, + serializer: xmlSerializer }; - var blobType2 = { - parameterPath: "blobType", - mapper: { - defaultValue: "BlockBlob", - isConstant: true, - serializedName: "x-ms-blob-type", - type: { - name: "String" + var abortCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: Mappers.BlobAbortCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobAbortCopyFromURLExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp15, + Parameters.copyId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.copyActionAbortConstant + ], + isXML: true, + serializer: xmlSerializer }; - var copySourceBlobProperties = { - parameterPath: ["options", "copySourceBlobProperties"], - mapper: { - serializedName: "x-ms-copy-source-blob-properties", - xmlName: "x-ms-copy-source-blob-properties", - type: { - name: "Boolean" + var setTierOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetTierHeaders + }, + 202: { + headersMapper: Mappers.BlobSetTierHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetTierExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp16 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags, + Parameters.rehydratePriority, + Parameters.tier1 + ], + isXML: true, + serializer: xmlSerializer }; - var comp24 = { - parameterPath: "comp", - mapper: { - defaultValue: "block", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var getAccountInfoOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.BlobGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetAccountInfoExceptionHeaders } - } + }, + queryParameters: [ + Parameters.comp, + Parameters.timeoutInSeconds, + Parameters.restype1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer }; - var blockId = { - parameterPath: "blockId", - mapper: { - serializedName: "blockid", - required: true, - xmlName: "blockid", - type: { - name: "String" + var queryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobQueryHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobQueryHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobQueryExceptionHeaders } - } - }; - var blocks = { - parameterPath: "blocks", - mapper: BlockLookupList + }, + requestBody: Parameters.queryRequest, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.comp17 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer }; - var comp25 = { - parameterPath: "comp", - mapper: { - defaultValue: "blocklist", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var getTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobTags, + headersMapper: Mappers.BlobGetTagsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetTagsExceptionHeaders } - } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp18 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer }; - var listType = { - parameterPath: "listType", - mapper: { - defaultValue: "committed", - serializedName: "blocklisttype", - required: true, - xmlName: "blocklisttype", - type: { - name: "Enum", - allowedValues: ["committed", "uncommitted", "all"] + var setTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: Mappers.BlobSetTagsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetTagsExceptionHeaders } - } + }, + requestBody: Parameters.tags, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.versionId, + Parameters.comp18 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.leaseId, + Parameters.ifTags, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer }; - var ServiceImpl = class { + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/pageBlob.js +var require_pageBlob = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/pageBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.PageBlobImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var PageBlobImpl = class { + client; /** - * Initialize a new instance of the class Service class. + * Initialize a new instance of the class PageBlob class. * @param client Reference to the service client */ constructor(client) { this.client = client; } /** - * Sets properties for a storage account's Blob service endpoint, including properties for Storage - * Analytics and CORS (Cross-Origin Resource Sharing) rules - * @param blobServiceProperties The StorageService properties. + * The Create operation creates a new page blob. + * @param contentLength The length of the request. + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. * @param options The options parameters. */ - setProperties(blobServiceProperties2, options) { - return this.client.sendOperationRequest({ blobServiceProperties: blobServiceProperties2, options }, setPropertiesOperationSpec); + create(contentLength, blobContentLength, options) { + return this.client.sendOperationRequest({ contentLength, blobContentLength, options }, createOperationSpec); } /** - * gets the properties of a storage account's Blob service, including properties for Storage Analytics - * and CORS (Cross-Origin Resource Sharing) rules. + * The Upload Pages operation writes a range of pages to a page blob + * @param contentLength The length of the request. + * @param body Initial data * @param options The options parameters. */ - getProperties(options) { - return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$2); + uploadPages(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, uploadPagesOperationSpec); } /** - * Retrieves statistics related to replication for the Blob service. It is only available on the - * secondary location endpoint when read-access geo-redundant replication is enabled for the storage - * account. + * The Clear Pages operation clears a set of pages from a page blob + * @param contentLength The length of the request. * @param options The options parameters. */ - getStatistics(options) { - return this.client.sendOperationRequest({ options }, getStatisticsOperationSpec); + clearPages(contentLength, options) { + return this.client.sendOperationRequest({ contentLength, options }, clearPagesOperationSpec); } /** - * The List Containers Segment operation returns a list of the containers under the specified account + * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a + * URL + * @param sourceUrl Specify a URL to the copy source. + * @param sourceRange Bytes of source data in the specified range. The length of this range should + * match the ContentLength header and x-ms-range/Range destination range header. + * @param contentLength The length of the request. + * @param range The range of bytes to which the source range would be written. The range should be 512 + * aligned and range-end is required. * @param options The options parameters. */ - listContainersSegment(options) { - return this.client.sendOperationRequest({ options }, listContainersSegmentOperationSpec); + uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { + return this.client.sendOperationRequest({ sourceUrl, sourceRange, contentLength, range, options }, uploadPagesFromURLOperationSpec); } /** - * Retrieves a user delegation key for the Blob service. This is only a valid operation when using - * bearer token authentication. - * @param keyInfo Key information + * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a + * page blob * @param options The options parameters. */ - getUserDelegationKey(keyInfo2, options) { - return this.client.sendOperationRequest({ keyInfo: keyInfo2, options }, getUserDelegationKeyOperationSpec); + getPageRanges(options) { + return this.client.sendOperationRequest({ options }, getPageRangesOperationSpec); } /** - * Returns the sku name and account kind + * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were + * changed between target blob and previous snapshot. * @param options The options parameters. */ - getAccountInfo(options) { - return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$2); + getPageRangesDiff(options) { + return this.client.sendOperationRequest({ options }, getPageRangesDiffOperationSpec); } /** - * The Batch operation allows multiple API calls to be embedded into a single HTTP request. - * @param contentLength The length of the request. - * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch - * boundary. Example header value: multipart/mixed; boundary=batch_ - * @param body Initial data + * Resize the Blob + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. * @param options The options parameters. */ - submitBatch(contentLength2, multipartContentType2, body2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, multipartContentType: multipartContentType2, body: body2, options }, submitBatchOperationSpec$1); + resize(blobContentLength, options) { + return this.client.sendOperationRequest({ blobContentLength, options }, resizeOperationSpec); } /** - * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a - * given search expression. Filter blobs searches across all containers within a storage account but - * can be scoped within the expression to a single container. + * Update the sequence number of the blob + * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. + * This property applies to page blobs only. This property indicates how the service should modify the + * blob's sequence number * @param options The options parameters. */ - filterBlobs(options) { - return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec$1); + updateSequenceNumber(sequenceNumberAction, options) { + return this.client.sendOperationRequest({ sequenceNumberAction, options }, updateSequenceNumberOperationSpec); + } + /** + * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. + * The snapshot is copied such that only the differential changes between the previously copied + * snapshot are transferred to the destination. The copied snapshots are complete copies of the + * original snapshot and can be read or copied from as usual. This API is supported since REST version + * 2016-05-31. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyIncremental(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, copyIncrementalOperationSpec); } }; - var xmlSerializer$5 = coreClient__namespace.createSerializer( + exports2.PageBlobImpl = PageBlobImpl; + var xmlSerializer = coreClient.createSerializer( Mappers, /* isXml */ true ); - var setPropertiesOperationSpec = { - path: "/", + var createOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { - 202: { - headersMapper: ServiceSetPropertiesHeaders + 201: { + headersMapper: Mappers.PageBlobCreateHeaders }, default: { - bodyMapper: StorageError, - headersMapper: ServiceSetPropertiesExceptionHeaders + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobCreateExceptionHeaders } }, - requestBody: blobServiceProperties, - queryParameters: [ - restype, - comp, - timeoutInSeconds + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.blobType, + Parameters.blobContentLength, + Parameters.blobSequenceNumber ], - urlParameters: [url], + isXML: true, + serializer: xmlSerializer + }; + var uploadPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobUploadPagesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], headerParameters: [ - contentType, - accept, - version, - requestId + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2, + Parameters.pageWrite, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo ], isXML: true, contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$5 + mediaType: "binary", + serializer: xmlSerializer }; - var getPropertiesOperationSpec$2 = { - path: "/", - httpMethod: "GET", + var clearPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", responses: { - 200: { - bodyMapper: BlobServiceProperties, - headersMapper: ServiceGetPropertiesHeaders + 201: { + headersMapper: Mappers.PageBlobClearPagesHeaders }, default: { - bodyMapper: StorageError, - headersMapper: ServiceGetPropertiesExceptionHeaders + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobClearPagesExceptionHeaders } }, - queryParameters: [ - restype, - comp, - timeoutInSeconds - ], - urlParameters: [url], + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], headerParameters: [ - version, - requestId, - accept1 + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo, + Parameters.pageWrite1 ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer }; - var getStatisticsOperationSpec = { - path: "/", - httpMethod: "GET", + var uploadPagesFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", responses: { - 200: { - bodyMapper: BlobServiceStatistics, - headersMapper: ServiceGetStatisticsHeaders + 201: { + headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders }, default: { - bodyMapper: StorageError, - headersMapper: ServiceGetStatisticsExceptionHeaders + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders } }, - queryParameters: [ - restype, - timeoutInSeconds, - comp1 - ], - urlParameters: [url], + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], headerParameters: [ - version, - requestId, - accept1 + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.fileRequestIntent, + Parameters.pageWrite, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo, + Parameters.sourceUrl, + Parameters.sourceRange, + Parameters.sourceContentCrc64, + Parameters.range1 ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer }; - var listContainersSegmentOperationSpec = { - path: "/", + var getPageRangesOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { - bodyMapper: ListContainersSegmentResponse, - headersMapper: ServiceListContainersSegmentHeaders + bodyMapper: Mappers.PageList, + headersMapper: Mappers.PageBlobGetPageRangesHeaders }, default: { - bodyMapper: StorageError, - headersMapper: ServiceListContainersSegmentExceptionHeaders + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders } }, queryParameters: [ - timeoutInSeconds, - comp2, - prefix, - marker, - maxPageSize, - include + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.snapshot, + Parameters.comp20 ], - urlParameters: [url], + urlParameters: [Parameters.url], headerParameters: [ - version, - requestId, - accept1 + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer }; - var getUserDelegationKeyOperationSpec = { - path: "/", - httpMethod: "POST", + var getPageRangesDiffOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", responses: { 200: { - bodyMapper: UserDelegationKey, - headersMapper: ServiceGetUserDelegationKeyHeaders + bodyMapper: Mappers.PageList, + headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders }, default: { - bodyMapper: StorageError, - headersMapper: ServiceGetUserDelegationKeyExceptionHeaders + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders } }, - requestBody: keyInfo, queryParameters: [ - restype, - timeoutInSeconds, - comp3 + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.snapshot, + Parameters.comp20, + Parameters.prevsnapshot ], - urlParameters: [url], + urlParameters: [Parameters.url], headerParameters: [ - contentType, - accept, - version, - requestId + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.prevSnapshotUrl ], isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$5 + serializer: xmlSerializer }; - var getAccountInfoOperationSpec$2 = { - path: "/", - httpMethod: "GET", + var resizeOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", responses: { 200: { - headersMapper: ServiceGetAccountInfoHeaders + headersMapper: Mappers.PageBlobResizeHeaders }, default: { - bodyMapper: StorageError, - headersMapper: ServiceGetAccountInfoExceptionHeaders + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobResizeExceptionHeaders } }, - queryParameters: [ - comp, - timeoutInSeconds, - restype1 - ], - urlParameters: [url], + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], headerParameters: [ - version, - requestId, - accept1 + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.blobContentLength ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer }; - var submitBatchOperationSpec$1 = { - path: "/", - httpMethod: "POST", + var updateSequenceNumberOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", responses: { - 202: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: ServiceSubmitBatchHeaders + 200: { + headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders }, default: { - bodyMapper: StorageError, - headersMapper: ServiceSubmitBatchExceptionHeaders + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders } }, - requestBody: body, - queryParameters: [timeoutInSeconds, comp4], - urlParameters: [url], + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], headerParameters: [ - accept, - version, - requestId, - contentLength, - multipartContentType + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobSequenceNumber, + Parameters.sequenceNumberAction ], isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$5 + serializer: xmlSerializer }; - var filterBlobsOperationSpec$1 = { - path: "/", - httpMethod: "GET", + var copyIncrementalOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", responses: { - 200: { - bodyMapper: FilterBlobSegment, - headersMapper: ServiceFilterBlobsHeaders + 202: { + headersMapper: Mappers.PageBlobCopyIncrementalHeaders }, default: { - bodyMapper: StorageError, - headersMapper: ServiceFilterBlobsExceptionHeaders + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders } }, - queryParameters: [ - timeoutInSeconds, - marker, - maxPageSize, - comp5, - where - ], - urlParameters: [url], + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp21], + urlParameters: [Parameters.url], headerParameters: [ - version, - requestId, - accept1 + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.copySource ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer }; - var ContainerImpl = class { + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/appendBlob.js +var require_appendBlob = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/appendBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AppendBlobImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var AppendBlobImpl = class { + client; /** - * Initialize a new instance of the class Container class. + * Initialize a new instance of the class AppendBlob class. * @param client Reference to the service client */ constructor(client) { this.client = client; } /** - * creates a new container under the specified account. If the container with the same name already - * exists, the operation fails - * @param options The options parameters. - */ - create(options) { - return this.client.sendOperationRequest({ options }, createOperationSpec$2); - } - /** - * returns all user-defined metadata and system properties for the specified container. The data - * returned does not include the container's list of blobs - * @param options The options parameters. - */ - getProperties(options) { - return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$1); - } - /** - * operation marks the specified container for deletion. The container and any blobs contained within - * it are later deleted during garbage collection - * @param options The options parameters. - */ - delete(options) { - return this.client.sendOperationRequest({ options }, deleteOperationSpec$1); - } - /** - * operation sets one or more user-defined name-value pairs for the specified container. - * @param options The options parameters. - */ - setMetadata(options) { - return this.client.sendOperationRequest({ options }, setMetadataOperationSpec$1); - } - /** - * gets the permissions for the specified container. The permissions indicate whether container data - * may be accessed publicly. - * @param options The options parameters. - */ - getAccessPolicy(options) { - return this.client.sendOperationRequest({ options }, getAccessPolicyOperationSpec); - } - /** - * sets the permissions for the specified container. The permissions indicate whether blobs in a - * container may be accessed publicly. - * @param options The options parameters. - */ - setAccessPolicy(options) { - return this.client.sendOperationRequest({ options }, setAccessPolicyOperationSpec); - } - /** - * Restores a previously-deleted container. - * @param options The options parameters. - */ - restore(options) { - return this.client.sendOperationRequest({ options }, restoreOperationSpec); - } - /** - * Renames an existing container. - * @param sourceContainerName Required. Specifies the name of the container to rename. + * The Create Append Blob operation creates a new append blob. + * @param contentLength The length of the request. * @param options The options parameters. */ - rename(sourceContainerName2, options) { - return this.client.sendOperationRequest({ sourceContainerName: sourceContainerName2, options }, renameOperationSpec); + create(contentLength, options) { + return this.client.sendOperationRequest({ contentLength, options }, createOperationSpec); } /** - * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * The Append Block operation commits a new block of data to the end of an existing append blob. The + * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. * @param contentLength The length of the request. - * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch - * boundary. Example header value: multipart/mixed; boundary=batch_ * @param body Initial data * @param options The options parameters. */ - submitBatch(contentLength2, multipartContentType2, body2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, multipartContentType: multipartContentType2, body: body2, options }, submitBatchOperationSpec); + appendBlock(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, appendBlockOperationSpec); } /** - * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given - * search expression. Filter blobs searches within the given container. + * The Append Block operation commits a new block of data to the end of an existing append blob where + * the contents are read from a source url. The Append Block operation is permitted only if the blob + * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version + * 2015-02-21 version or later. + * @param sourceUrl Specify a URL to the copy source. + * @param contentLength The length of the request. * @param options The options parameters. */ - filterBlobs(options) { - return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); + appendBlockFromUrl(sourceUrl, contentLength, options) { + return this.client.sendOperationRequest({ sourceUrl, contentLength, options }, appendBlockFromUrlOperationSpec); } /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite + * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version + * 2019-12-12 version or later. * @param options The options parameters. */ - acquireLease(options) { - return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec$1); + seal(options) { + return this.client.sendOperationRequest({ options }, sealOperationSpec); } + }; + exports2.AppendBlobImpl = AppendBlobImpl; + var xmlSerializer = coreClient.createSerializer( + Mappers, + /* isXml */ + true + ); + var createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobCreateHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobCreateExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.blobType1 + ], + isXML: true, + serializer: xmlSerializer + }; + var appendBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobAppendBlockHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobAppendBlockExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2, + Parameters.maxSize, + Parameters.appendPosition + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer + }; + var appendBlockFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobAppendBlockFromUrlHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.fileRequestIntent, + Parameters.transactionalContentMD5, + Parameters.sourceUrl, + Parameters.sourceContentCrc64, + Parameters.maxSize, + Parameters.appendPosition, + Parameters.sourceRange1 + ], + isXML: true, + serializer: xmlSerializer + }; + var sealOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.AppendBlobSealHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobSealExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp23], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.appendPosition + ], + isXML: true, + serializer: xmlSerializer + }; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/blockBlob.js +var require_blockBlob = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/blockBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlockBlobImpl = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreClient = tslib_1.__importStar(require_commonjs8()); + var Mappers = tslib_1.__importStar(require_mappers()); + var Parameters = tslib_1.__importStar(require_parameters()); + var BlockBlobImpl = class { + client; /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param leaseId Specifies the current lease ID on the resource. - * @param options The options parameters. + * Initialize a new instance of the class BlockBlob class. + * @param client Reference to the service client */ - releaseLease(leaseId2, options) { - return this.client.sendOperationRequest({ leaseId: leaseId2, options }, releaseLeaseOperationSpec$1); + constructor(client) { + this.client = client; } /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param leaseId Specifies the current lease ID on the resource. + * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing + * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put + * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a + * partial update of the content of a block blob, use the Put Block List operation. + * @param contentLength The length of the request. + * @param body Initial data * @param options The options parameters. */ - renewLease(leaseId2, options) { - return this.client.sendOperationRequest({ leaseId: leaseId2, options }, renewLeaseOperationSpec$1); + upload(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, uploadOperationSpec); } /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite + * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read + * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are + * not supported with Put Blob from URL; the content of an existing blob is overwritten with the + * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, + * use the Put Block from URL API in conjunction with Put Block List. + * @param contentLength The length of the request. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. * @param options The options parameters. */ - breakLease(options) { - return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec$1); + putBlobFromUrl(contentLength, copySource, options) { + return this.client.sendOperationRequest({ contentLength, copySource, options }, putBlobFromUrlOperationSpec); } /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param leaseId Specifies the current lease ID on the resource. - * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 - * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor - * (String) for a list of valid GUID string formats. + * The Stage Block operation creates a new block to be committed as part of a blob + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param body Initial data * @param options The options parameters. */ - changeLease(leaseId2, proposedLeaseId2, options) { - return this.client.sendOperationRequest({ leaseId: leaseId2, proposedLeaseId: proposedLeaseId2, options }, changeLeaseOperationSpec$1); + stageBlock(blockId, contentLength, body, options) { + return this.client.sendOperationRequest({ blockId, contentLength, body, options }, stageBlockOperationSpec); } /** - * [Update] The List Blobs operation returns a list of the blobs under the specified container + * The Stage Block operation creates a new block to be committed as part of a blob where the contents + * are read from a URL. + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param sourceUrl Specify a URL to the copy source. * @param options The options parameters. */ - listBlobFlatSegment(options) { - return this.client.sendOperationRequest({ options }, listBlobFlatSegmentOperationSpec); + stageBlockFromURL(blockId, contentLength, sourceUrl, options) { + return this.client.sendOperationRequest({ blockId, contentLength, sourceUrl, options }, stageBlockFromURLOperationSpec); } /** - * [Update] The List Blobs operation returns a list of the blobs under the specified container - * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix - * element in the response body that acts as a placeholder for all blobs whose names begin with the - * same substring up to the appearance of the delimiter character. The delimiter may be a single - * character or a string. + * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the + * blob. In order to be written as part of a blob, a block must have been successfully written to the + * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading + * only those blocks that have changed, then committing the new and existing blocks together. You can + * do this by specifying whether to commit a block from the committed block list or from the + * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list + * it may belong to. + * @param blocks Blob Blocks. * @param options The options parameters. */ - listBlobHierarchySegment(delimiter2, options) { - return this.client.sendOperationRequest({ delimiter: delimiter2, options }, listBlobHierarchySegmentOperationSpec); + commitBlockList(blocks, options) { + return this.client.sendOperationRequest({ blocks, options }, commitBlockListOperationSpec); } /** - * Returns the sku name and account kind + * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block + * blob + * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted + * blocks, or both lists together. * @param options The options parameters. */ - getAccountInfo(options) { - return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$1); + getBlockList(listType, options) { + return this.client.sendOperationRequest({ listType, options }, getBlockListOperationSpec); } }; - var xmlSerializer$4 = coreClient__namespace.createSerializer( + exports2.BlockBlobImpl = BlockBlobImpl; + var xmlSerializer = coreClient.createSerializer( Mappers, /* isXml */ true ); - var createOperationSpec$2 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: ContainerCreateHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerCreateExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, restype2], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - metadata, - access, - defaultEncryptionScope, - preventEncryptionScopeOverride - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var getPropertiesOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - headersMapper: ContainerGetPropertiesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerGetPropertiesExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, restype2], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var deleteOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "DELETE", + var uploadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", responses: { - 202: { - headersMapper: ContainerDeleteHeaders + 201: { + headersMapper: Mappers.BlockBlobUploadHeaders }, default: { - bodyMapper: StorageError, - headersMapper: ContainerDeleteExceptionHeaders + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobUploadExceptionHeaders } }, - queryParameters: [timeoutInSeconds, restype2], - urlParameters: [url], + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2, + Parameters.blobType2 ], isXML: true, - serializer: xmlSerializer$4 + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer }; - var setMetadataOperationSpec$1 = { - path: "/{containerName}", + var putBlobFromUrlOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { - 200: { - headersMapper: ContainerSetMetadataHeaders + 201: { + headersMapper: Mappers.BlockBlobPutBlobFromUrlHeaders }, default: { - bodyMapper: StorageError, - headersMapper: ContainerSetMetadataExceptionHeaders + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobPutBlobFromUrlExceptionHeaders } }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp6 - ], - urlParameters: [url], + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], headerParameters: [ - version, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.encryptionScope, + Parameters.tier, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceIfTags, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.copySourceTags, + Parameters.fileRequestIntent, + Parameters.transactionalContentMD5, + Parameters.blobType2, + Parameters.copySourceBlobProperties ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer }; - var getAccessPolicyOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", + var stageBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", responses: { - 200: { - bodyMapper: { - type: { - name: "Sequence", - element: { - type: { name: "Composite", className: "SignedIdentifier" } - } - }, - serializedName: "SignedIdentifiers", - xmlName: "SignedIdentifiers", - xmlIsWrapped: true, - xmlElementName: "SignedIdentifier" - }, - headersMapper: ContainerGetAccessPolicyHeaders + 201: { + headersMapper: Mappers.BlockBlobStageBlockHeaders }, default: { - bodyMapper: StorageError, - headersMapper: ContainerGetAccessPolicyExceptionHeaders + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobStageBlockExceptionHeaders } }, + requestBody: Parameters.body1, queryParameters: [ - timeoutInSeconds, - restype2, - comp7 + Parameters.timeoutInSeconds, + Parameters.comp24, + Parameters.blockId ], - urlParameters: [url], + urlParameters: [Parameters.url], headerParameters: [ - version, - requestId, - accept1, - leaseId + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2 ], isXML: true, - serializer: xmlSerializer$4 + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer }; - var setAccessPolicyOperationSpec = { - path: "/{containerName}", + var stageBlockFromURLOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { - 200: { - headersMapper: ContainerSetAccessPolicyHeaders + 201: { + headersMapper: Mappers.BlockBlobStageBlockFromURLHeaders }, default: { - bodyMapper: StorageError, - headersMapper: ContainerSetAccessPolicyExceptionHeaders + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobStageBlockFromURLExceptionHeaders } }, - requestBody: containerAcl, queryParameters: [ - timeoutInSeconds, - restype2, - comp7 + Parameters.timeoutInSeconds, + Parameters.comp24, + Parameters.blockId ], - urlParameters: [url], + urlParameters: [Parameters.url], headerParameters: [ - contentType, - accept, - version, - requestId, - access, - leaseId, - ifModifiedSince, - ifUnmodifiedSince + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.fileRequestIntent, + Parameters.sourceUrl, + Parameters.sourceContentCrc64, + Parameters.sourceRange1 ], isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$4 + serializer: xmlSerializer }; - var restoreOperationSpec = { - path: "/{containerName}", + var commitBlockListOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: ContainerRestoreHeaders + headersMapper: Mappers.BlockBlobCommitBlockListHeaders }, default: { - bodyMapper: StorageError, - headersMapper: ContainerRestoreExceptionHeaders + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobCommitBlockListExceptionHeaders } }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp8 - ], - urlParameters: [url], + requestBody: Parameters.blocks, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp25], + urlParameters: [Parameters.url], headerParameters: [ - version, - requestId, - accept1, - deletedContainerName, - deletedContainerVersion + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64 ], isXML: true, - serializer: xmlSerializer$4 + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer }; - var renameOperationSpec = { - path: "/{containerName}", - httpMethod: "PUT", + var getBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", responses: { 200: { - headersMapper: ContainerRenameHeaders + bodyMapper: Mappers.BlockList, + headersMapper: Mappers.BlockBlobGetBlockListHeaders }, default: { - bodyMapper: StorageError, - headersMapper: ContainerRenameExceptionHeaders + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobGetBlockListExceptionHeaders } }, queryParameters: [ - timeoutInSeconds, - restype2, - comp9 + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.comp25, + Parameters.listType ], - urlParameters: [url], + urlParameters: [Parameters.url], headerParameters: [ - version, - requestId, - accept1, - sourceContainerName, - sourceLeaseId + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer }; - var submitBatchOperationSpec = { - path: "/{containerName}", - httpMethod: "POST", - responses: { - 202: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/index.js +var require_operations = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operations/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + tslib_1.__exportStar(require_service(), exports2); + tslib_1.__exportStar(require_container(), exports2); + tslib_1.__exportStar(require_blob(), exports2); + tslib_1.__exportStar(require_pageBlob(), exports2); + tslib_1.__exportStar(require_appendBlob(), exports2); + tslib_1.__exportStar(require_blockBlob(), exports2); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/storageClient.js +var require_storageClient = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/storageClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageClient = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var coreHttpCompat = tslib_1.__importStar(require_commonjs9()); + var index_js_1 = require_operations(); + var StorageClient = class extends coreHttpCompat.ExtendedServiceClient { + url; + version; + /** + * Initializes a new instance of the StorageClient class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url, options) { + if (url === void 0) { + throw new Error("'url' cannot be null"); + } + if (!options) { + options = {}; + } + const defaults = { + requestContentType: "application/json; charset=utf-8" + }; + const packageDetails = `azsdk-js-azure-storage-blob/12.28.0`; + const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` : `${packageDetails}`; + const optionsWithDefaults = { + ...defaults, + ...options, + userAgentOptions: { + userAgentPrefix }, - headersMapper: ContainerSubmitBatchHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerSubmitBatchExceptionHeaders + endpoint: options.endpoint ?? options.baseUri ?? "{url}" + }; + super(optionsWithDefaults); + this.url = url; + this.version = options.version || "2025-07-05"; + this.service = new index_js_1.ServiceImpl(this); + this.container = new index_js_1.ContainerImpl(this); + this.blob = new index_js_1.BlobImpl(this); + this.pageBlob = new index_js_1.PageBlobImpl(this); + this.appendBlob = new index_js_1.AppendBlobImpl(this); + this.blockBlob = new index_js_1.BlockBlobImpl(this); + } + service; + container; + blob; + pageBlob; + appendBlob; + blockBlob; + }; + exports2.StorageClient = StorageClient; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/service.js +var require_service2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/service.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/container.js +var require_container2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/container.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/blob.js +var require_blob2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/blob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/pageBlob.js +var require_pageBlob2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/pageBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/appendBlob.js +var require_appendBlob2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/appendBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/blockBlob.js +var require_blockBlob2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/blockBlob.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/index.js +var require_operationsInterfaces = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/operationsInterfaces/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + tslib_1.__exportStar(require_service2(), exports2); + tslib_1.__exportStar(require_container2(), exports2); + tslib_1.__exportStar(require_blob2(), exports2); + tslib_1.__exportStar(require_pageBlob2(), exports2); + tslib_1.__exportStar(require_appendBlob2(), exports2); + tslib_1.__exportStar(require_blockBlob2(), exports2); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/index.js +var require_src2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generated/src/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageClient = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + tslib_1.__exportStar(require_models(), exports2); + var storageClient_js_1 = require_storageClient(); + Object.defineProperty(exports2, "StorageClient", { enumerable: true, get: function() { + return storageClient_js_1.StorageClient; + } }); + tslib_1.__exportStar(require_operationsInterfaces(), exports2); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/StorageContextClient.js +var require_StorageContextClient = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/StorageContextClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageContextClient = void 0; + var index_js_1 = require_src2(); + var StorageContextClient = class extends index_js_1.StorageClient { + async sendOperationRequest(operationArguments, operationSpec) { + const operationSpecToSend = { ...operationSpec }; + if (operationSpecToSend.path === "/{containerName}" || operationSpecToSend.path === "/{containerName}/{blob}") { + operationSpecToSend.path = ""; + } + return super.sendOperationRequest(operationArguments, operationSpecToSend); + } + }; + exports2.StorageContextClient = StorageContextClient; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/StorageClient.js +var require_StorageClient = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/StorageClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageClient = void 0; + var StorageContextClient_js_1 = require_StorageContextClient(); + var Pipeline_js_1 = require_Pipeline(); + var utils_common_js_1 = require_utils_common(); + var StorageClient = class { + /** + * Encoded URL string value. + */ + url; + accountName; + /** + * Request policy pipeline. + * + * @internal + */ + pipeline; + /** + * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + credential; + /** + * StorageClient is a reference to protocol layer operations entry, which is + * generated by AutoRest generator. + */ + storageClientContext; + /** + */ + isHttps; + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + constructor(url, pipeline) { + this.url = (0, utils_common_js_1.escapeURLPath)(url); + this.accountName = (0, utils_common_js_1.getAccountNameFromUrl)(url); + this.pipeline = pipeline; + this.storageClientContext = new StorageContextClient_js_1.StorageContextClient(this.url, (0, Pipeline_js_1.getCoreClientOptions)(pipeline)); + this.isHttps = (0, utils_common_js_1.iEqual)((0, utils_common_js_1.getURLScheme)(this.url) || "", "https"); + this.credential = (0, Pipeline_js_1.getCredentialFromPipeline)(pipeline); + const storageClientContext = this.storageClientContext; + storageClientContext.requestContentType = void 0; + } + }; + exports2.StorageClient = StorageClient; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/tracing.js +var require_tracing = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/tracing.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.tracingClient = void 0; + var core_tracing_1 = require_commonjs5(); + var constants_js_1 = require_constants9(); + exports2.tracingClient = (0, core_tracing_1.createTracingClient)({ + packageName: "@azure/storage-blob", + packageVersion: constants_js_1.SDK_VERSION, + namespace: "Microsoft.Storage" + }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/BlobSASPermissions.js +var require_BlobSASPermissions = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/BlobSASPermissions.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobSASPermissions = void 0; + var BlobSASPermissions = class _BlobSASPermissions { + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const blobSASPermissions = new _BlobSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + blobSASPermissions.read = true; + break; + case "a": + blobSASPermissions.add = true; + break; + case "c": + blobSASPermissions.create = true; + break; + case "w": + blobSASPermissions.write = true; + break; + case "d": + blobSASPermissions.delete = true; + break; + case "x": + blobSASPermissions.deleteVersion = true; + break; + case "t": + blobSASPermissions.tag = true; + break; + case "m": + blobSASPermissions.move = true; + break; + case "e": + blobSASPermissions.execute = true; + break; + case "i": + blobSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + blobSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission: ${char}`); + } + } + return blobSASPermissions; + } + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const blobSASPermissions = new _BlobSASPermissions(); + if (permissionLike.read) { + blobSASPermissions.read = true; + } + if (permissionLike.add) { + blobSASPermissions.add = true; + } + if (permissionLike.create) { + blobSASPermissions.create = true; + } + if (permissionLike.write) { + blobSASPermissions.write = true; + } + if (permissionLike.delete) { + blobSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + blobSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + blobSASPermissions.tag = true; + } + if (permissionLike.move) { + blobSASPermissions.move = true; + } + if (permissionLike.execute) { + blobSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + blobSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } + return blobSASPermissions; + } + /** + * Specifies Read access granted. + */ + read = false; + /** + * Specifies Add access granted. + */ + add = false; + /** + * Specifies Create access granted. + */ + create = false; + /** + * Specifies Write access granted. + */ + write = false; + /** + * Specifies Delete access granted. + */ + delete = false; + /** + * Specifies Delete version access granted. + */ + deleteVersion = false; + /** + * Specfies Tag access granted. + */ + tag = false; + /** + * Specifies Move access granted. + */ + move = false; + /** + * Specifies Execute access granted. + */ + execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete = false; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } + }; + exports2.BlobSASPermissions = BlobSASPermissions; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/ContainerSASPermissions.js +var require_ContainerSASPermissions = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/ContainerSASPermissions.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ContainerSASPermissions = void 0; + var ContainerSASPermissions = class _ContainerSASPermissions { + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const containerSASPermissions = new _ContainerSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + containerSASPermissions.read = true; + break; + case "a": + containerSASPermissions.add = true; + break; + case "c": + containerSASPermissions.create = true; + break; + case "w": + containerSASPermissions.write = true; + break; + case "d": + containerSASPermissions.delete = true; + break; + case "l": + containerSASPermissions.list = true; + break; + case "t": + containerSASPermissions.tag = true; + break; + case "x": + containerSASPermissions.deleteVersion = true; + break; + case "m": + containerSASPermissions.move = true; + break; + case "e": + containerSASPermissions.execute = true; + break; + case "i": + containerSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; + default: + throw new RangeError(`Invalid permission ${char}`); + } + } + return containerSASPermissions; + } + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const containerSASPermissions = new _ContainerSASPermissions(); + if (permissionLike.read) { + containerSASPermissions.read = true; + } + if (permissionLike.add) { + containerSASPermissions.add = true; + } + if (permissionLike.create) { + containerSASPermissions.create = true; + } + if (permissionLike.write) { + containerSASPermissions.write = true; + } + if (permissionLike.delete) { + containerSASPermissions.delete = true; + } + if (permissionLike.list) { + containerSASPermissions.list = true; + } + if (permissionLike.deleteVersion) { + containerSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + containerSASPermissions.tag = true; + } + if (permissionLike.move) { + containerSASPermissions.move = true; + } + if (permissionLike.execute) { + containerSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + containerSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } + return containerSASPermissions; + } + /** + * Specifies Read access granted. + */ + read = false; + /** + * Specifies Add access granted. + */ + add = false; + /** + * Specifies Create access granted. + */ + create = false; + /** + * Specifies Write access granted. + */ + write = false; + /** + * Specifies Delete access granted. + */ + delete = false; + /** + * Specifies Delete version access granted. + */ + deleteVersion = false; + /** + * Specifies List access granted. + */ + list = false; + /** + * Specfies Tag access granted. + */ + tag = false; + /** + * Specifies Move access granted. + */ + move = false; + /** + * Specifies Execute access granted. + */ + execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + filterByTags = false; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); } - }, - requestBody: body, - queryParameters: [ - timeoutInSeconds, - comp4, - restype2 - ], - urlParameters: [url], - headerParameters: [ - accept, - version, - requestId, - contentLength, - multipartContentType - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$4 - }; - var filterBlobsOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: FilterBlobSegment, - headersMapper: ContainerFilterBlobsHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerFilterBlobsExceptionHeaders + if (this.add) { + permissions.push("a"); } - }, - queryParameters: [ - timeoutInSeconds, - marker, - maxPageSize, - comp5, - where, - restype2 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var acquireLeaseOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: ContainerAcquireLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerAcquireLeaseExceptionHeaders + if (this.create) { + permissions.push("c"); } - }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action, - duration, - proposedLeaseId - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var releaseLeaseOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: ContainerReleaseLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerReleaseLeaseExceptionHeaders + if (this.write) { + permissions.push("w"); } - }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action1, - leaseId1 - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var renewLeaseOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: ContainerRenewLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerRenewLeaseExceptionHeaders + if (this.delete) { + permissions.push("d"); } - }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - leaseId1, - action2 - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var breakLeaseOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: ContainerBreakLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerBreakLeaseExceptionHeaders + if (this.deleteVersion) { + permissions.push("x"); } - }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action3, - breakPeriod - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var changeLeaseOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: ContainerChangeLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerChangeLeaseExceptionHeaders + if (this.list) { + permissions.push("l"); } - }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - leaseId1, - action4, - proposedLeaseId1 - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var listBlobFlatSegmentOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: ListBlobsFlatSegmentResponse, - headersMapper: ContainerListBlobFlatSegmentHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerListBlobFlatSegmentExceptionHeaders + if (this.tag) { + permissions.push("t"); } - }, - queryParameters: [ - timeoutInSeconds, - comp2, - prefix, - marker, - maxPageSize, - restype2, - include1 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var listBlobHierarchySegmentOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: ListBlobsHierarchySegmentResponse, - headersMapper: ContainerListBlobHierarchySegmentHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders + if (this.move) { + permissions.push("m"); } - }, - queryParameters: [ - timeoutInSeconds, - comp2, - prefix, - marker, - maxPageSize, - restype2, - include1, - delimiter - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var getAccountInfoOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - headersMapper: ContainerGetAccountInfoHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerGetAccountInfoExceptionHeaders + if (this.execute) { + permissions.push("e"); } - }, - queryParameters: [ - comp, - timeoutInSeconds, - restype1 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$4 + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } + return permissions.join(""); + } }; - var BlobImpl = class { + exports2.ContainerSASPermissions = ContainerSASPermissions; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/credentials/UserDelegationKeyCredential.js +var require_UserDelegationKeyCredential = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/credentials/UserDelegationKeyCredential.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.UserDelegationKeyCredential = void 0; + var node_crypto_1 = require("node:crypto"); + var UserDelegationKeyCredential = class { /** - * Initialize a new instance of the class Blob class. - * @param client Reference to the service client + * Azure Storage account name; readonly. */ - constructor(client) { - this.client = client; - } + accountName; /** - * The Download operation reads or downloads a blob from the system, including its metadata and - * properties. You can also call Download to read a snapshot. - * @param options The options parameters. + * Azure Storage user delegation key; readonly. */ - download(options) { - return this.client.sendOperationRequest({ options }, downloadOperationSpec); - } + userDelegationKey; /** - * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system - * properties for the blob. It does not return the content of the blob. - * @param options The options parameters. + * Key value in Buffer type. */ - getProperties(options) { - return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); - } + key; /** - * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is - * permanently removed from the storage account. If the storage account's soft delete feature is - * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible - * immediately. However, the blob service retains the blob or snapshot for the number of days specified - * by the DeleteRetentionPolicy section of [Storage service properties] - * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is - * permanently removed from the storage account. Note that you continue to be charged for the - * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the - * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You - * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a - * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 - * (ResourceNotFound). - * @param options The options parameters. + * Creates an instance of UserDelegationKeyCredential. + * @param accountName - + * @param userDelegationKey - */ - delete(options) { - return this.client.sendOperationRequest({ options }, deleteOperationSpec); + constructor(accountName, userDelegationKey) { + this.accountName = accountName; + this.userDelegationKey = userDelegationKey; + this.key = Buffer.from(userDelegationKey.value, "base64"); } /** - * Undelete a blob that was previously soft deleted - * @param options The options parameters. + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - */ - undelete(options) { - return this.client.sendOperationRequest({ options }, undeleteOperationSpec); + computeHMACSHA256(stringToSign) { + return (0, node_crypto_1.createHmac)("sha256", this.key).update(stringToSign, "utf8").digest("base64"); } + }; + exports2.UserDelegationKeyCredential = UserDelegationKeyCredential; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/SasIPRange.js +var require_SasIPRange = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/SasIPRange.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ipRangeToString = ipRangeToString; + function ipRangeToString(ipRange) { + return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; + } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/SASQueryParameters.js +var require_SASQueryParameters = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/SASQueryParameters.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.SASQueryParameters = exports2.SASProtocol = void 0; + var SasIPRange_js_1 = require_SasIPRange(); + var utils_common_js_1 = require_utils_common(); + var SASProtocol; + (function(SASProtocol2) { + SASProtocol2["Https"] = "https"; + SASProtocol2["HttpsAndHttp"] = "https,http"; + })(SASProtocol || (exports2.SASProtocol = SASProtocol = {})); + var SASQueryParameters = class { /** - * Sets the time a blob will expire and be deleted. - * @param expiryOptions Required. Indicates mode of the expiry time - * @param options The options parameters. + * The storage API version. */ - setExpiry(expiryOptions2, options) { - return this.client.sendOperationRequest({ expiryOptions: expiryOptions2, options }, setExpiryOperationSpec); - } + version; /** - * The Set HTTP Headers operation sets system properties on the blob - * @param options The options parameters. + * Optional. The allowed HTTP protocol(s). */ - setHttpHeaders(options) { - return this.client.sendOperationRequest({ options }, setHttpHeadersOperationSpec); - } + protocol; /** - * The Set Immutability Policy operation sets the immutability policy on the blob - * @param options The options parameters. + * Optional. The start time for this SAS token. */ - setImmutabilityPolicy(options) { - return this.client.sendOperationRequest({ options }, setImmutabilityPolicyOperationSpec); - } + startsOn; /** - * The Delete Immutability Policy operation deletes the immutability policy on the blob - * @param options The options parameters. + * Optional only when identifier is provided. The expiry time for this SAS token. */ - deleteImmutabilityPolicy(options) { - return this.client.sendOperationRequest({ options }, deleteImmutabilityPolicyOperationSpec); - } + expiresOn; /** - * The Set Legal Hold operation sets a legal hold on the blob. - * @param legalHold Specified if a legal hold should be set on the blob. - * @param options The options parameters. + * Optional only when identifier is provided. + * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for + * more details. */ - setLegalHold(legalHold2, options) { - return this.client.sendOperationRequest({ legalHold: legalHold2, options }, setLegalHoldOperationSpec); - } + permissions; /** - * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more - * name-value pairs - * @param options The options parameters. + * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices} + * for more details. */ - setMetadata(options) { - return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); - } + services; /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param options The options parameters. + * Optional. The storage resource types being accessed (only for Account SAS). Please refer to + * {@link AccountSASResourceTypes} for more details. */ - acquireLease(options) { - return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); - } + resourceTypes; /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param leaseId Specifies the current lease ID on the resource. - * @param options The options parameters. + * Optional. The signed identifier (only for {@link BlobSASSignatureValues}). + * + * @see https://learn.microsoft.com/rest/api/storageservices/establishing-a-stored-access-policy */ - releaseLease(leaseId2, options) { - return this.client.sendOperationRequest({ leaseId: leaseId2, options }, releaseLeaseOperationSpec); - } + identifier; /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param leaseId Specifies the current lease ID on the resource. - * @param options The options parameters. + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. */ - renewLease(leaseId2, options) { - return this.client.sendOperationRequest({ leaseId: leaseId2, options }, renewLeaseOperationSpec); - } + encryptionScope; /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param leaseId Specifies the current lease ID on the resource. - * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 - * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor - * (String) for a list of valid GUID string formats. - * @param options The options parameters. + * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}). + * @see https://learn.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only */ - changeLease(leaseId2, proposedLeaseId2, options) { - return this.client.sendOperationRequest({ leaseId: leaseId2, proposedLeaseId: proposedLeaseId2, options }, changeLeaseOperationSpec); - } + resource; /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param options The options parameters. + * The signature for the SAS token. */ - breakLease(options) { - return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); - } + signature; /** - * The Create Snapshot operation creates a read-only snapshot of a blob - * @param options The options parameters. + * Value for cache-control header in Blob/File Service SAS. */ - createSnapshot(options) { - return this.client.sendOperationRequest({ options }, createSnapshotOperationSpec); - } + cacheControl; /** - * The Start Copy From URL operation copies a blob or an internet resource to a new blob. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. + * Value for content-disposition header in Blob/File Service SAS. */ - startCopyFromURL(copySource2, options) { - return this.client.sendOperationRequest({ copySource: copySource2, options }, startCopyFromURLOperationSpec); - } + contentDisposition; /** - * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return - * a response until the copy is complete. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. + * Value for content-encoding header in Blob/File Service SAS. */ - copyFromURL(copySource2, options) { - return this.client.sendOperationRequest({ copySource: copySource2, options }, copyFromURLOperationSpec); - } + contentEncoding; /** - * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination - * blob with zero length and full metadata. - * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob - * operation. - * @param options The options parameters. + * Value for content-length header in Blob/File Service SAS. */ - abortCopyFromURL(copyId2, options) { - return this.client.sendOperationRequest({ copyId: copyId2, options }, abortCopyFromURLOperationSpec); - } + contentLanguage; /** - * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium - * storage account and on a block blob in a blob storage account (locally redundant storage only). A - * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block - * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's - * ETag. - * @param tier Indicates the tier to be set on the blob. - * @param options The options parameters. + * Value for content-type header in Blob/File Service SAS. */ - setTier(tier2, options) { - return this.client.sendOperationRequest({ tier: tier2, options }, setTierOperationSpec); - } + contentType; /** - * Returns the sku name and account kind - * @param options The options parameters. + * Inner value of getter ipRange. */ - getAccountInfo(options) { - return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); - } + ipRangeInner; /** - * The Query operation enables users to select/project on blob data by providing simple query - * expressions. - * @param options The options parameters. + * The Azure Active Directory object ID in GUID format. + * Property of user delegation key. */ - query(options) { - return this.client.sendOperationRequest({ options }, queryOperationSpec); + signedOid; + /** + * The Azure Active Directory tenant ID in GUID format. + * Property of user delegation key. + */ + signedTenantId; + /** + * The date-time the key is active. + * Property of user delegation key. + */ + signedStartsOn; + /** + * The date-time the key expires. + * Property of user delegation key. + */ + signedExpiresOn; + /** + * Abbreviation of the Azure Storage service that accepts the user delegation key. + * Property of user delegation key. + */ + signedService; + /** + * The service version that created the user delegation key. + * Property of user delegation key. + */ + signedVersion; + /** + * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key + * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key + * has the required permissions before granting access but no additional permission check for the user specified in + * this value will be performed. This is only used for User Delegation SAS. + */ + preauthorizedAgentObjectId; + /** + * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access. + * This is only used for User Delegation SAS. + */ + correlationId; + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange() { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start + }; + } + return void 0; + } + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { + this.version = version; + this.signature = signature; + if (permissionsOrOptions !== void 0 && typeof permissionsOrOptions !== "string") { + this.permissions = permissionsOrOptions.permissions; + this.services = permissionsOrOptions.services; + this.resourceTypes = permissionsOrOptions.resourceTypes; + this.protocol = permissionsOrOptions.protocol; + this.startsOn = permissionsOrOptions.startsOn; + this.expiresOn = permissionsOrOptions.expiresOn; + this.ipRangeInner = permissionsOrOptions.ipRange; + this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; + this.resource = permissionsOrOptions.resource; + this.cacheControl = permissionsOrOptions.cacheControl; + this.contentDisposition = permissionsOrOptions.contentDisposition; + this.contentEncoding = permissionsOrOptions.contentEncoding; + this.contentLanguage = permissionsOrOptions.contentLanguage; + this.contentType = permissionsOrOptions.contentType; + if (permissionsOrOptions.userDelegationKey) { + this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; + this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; + this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; + this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; + this.signedService = permissionsOrOptions.userDelegationKey.signedService; + this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; + this.correlationId = permissionsOrOptions.correlationId; + } + } else { + this.services = services; + this.resourceTypes = resourceTypes; + this.expiresOn = expiresOn; + this.permissions = permissionsOrOptions; + this.protocol = protocol; + this.startsOn = startsOn; + this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; + this.identifier = identifier; + this.resource = resource; + this.cacheControl = cacheControl; + this.contentDisposition = contentDisposition; + this.contentEncoding = contentEncoding; + this.contentLanguage = contentLanguage; + this.contentType = contentType; + if (userDelegationKey) { + this.signedOid = userDelegationKey.signedObjectId; + this.signedTenantId = userDelegationKey.signedTenantId; + this.signedStartsOn = userDelegationKey.signedStartsOn; + this.signedExpiresOn = userDelegationKey.signedExpiresOn; + this.signedService = userDelegationKey.signedService; + this.signedVersion = userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; + this.correlationId = correlationId; + } + } } /** - * The Get Tags operation enables users to get the tags associated with a blob. - * @param options The options parameters. + * Encodes all SAS query parameters into a string that can be appended to a URL. + * */ - getTags(options) { - return this.client.sendOperationRequest({ options }, getTagsOperationSpec); + toString() { + const params = [ + "sv", + "ss", + "srt", + "spr", + "st", + "se", + "sip", + "si", + "ses", + "skoid", + // Signed object ID + "sktid", + // Signed tenant ID + "skt", + // Signed key start time + "ske", + // Signed key expiry time + "sks", + // Signed key service + "skv", + // Signed key version + "sr", + "sp", + "sig", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "saoid", + "scid" + ]; + const queries = []; + for (const param of params) { + switch (param) { + case "sv": + this.tryAppendQueryParameter(queries, param, this.version); + break; + case "ss": + this.tryAppendQueryParameter(queries, param, this.services); + break; + case "srt": + this.tryAppendQueryParameter(queries, param, this.resourceTypes); + break; + case "spr": + this.tryAppendQueryParameter(queries, param, this.protocol); + break; + case "st": + this.tryAppendQueryParameter(queries, param, this.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(this.startsOn, false) : void 0); + break; + case "se": + this.tryAppendQueryParameter(queries, param, this.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(this.expiresOn, false) : void 0); + break; + case "sip": + this.tryAppendQueryParameter(queries, param, this.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(this.ipRange) : void 0); + break; + case "si": + this.tryAppendQueryParameter(queries, param, this.identifier); + break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; + case "skoid": + this.tryAppendQueryParameter(queries, param, this.signedOid); + break; + case "sktid": + this.tryAppendQueryParameter(queries, param, this.signedTenantId); + break; + case "skt": + this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(this.signedStartsOn, false) : void 0); + break; + case "ske": + this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(this.signedExpiresOn, false) : void 0); + break; + case "sks": + this.tryAppendQueryParameter(queries, param, this.signedService); + break; + case "skv": + this.tryAppendQueryParameter(queries, param, this.signedVersion); + break; + case "sr": + this.tryAppendQueryParameter(queries, param, this.resource); + break; + case "sp": + this.tryAppendQueryParameter(queries, param, this.permissions); + break; + case "sig": + this.tryAppendQueryParameter(queries, param, this.signature); + break; + case "rscc": + this.tryAppendQueryParameter(queries, param, this.cacheControl); + break; + case "rscd": + this.tryAppendQueryParameter(queries, param, this.contentDisposition); + break; + case "rsce": + this.tryAppendQueryParameter(queries, param, this.contentEncoding); + break; + case "rscl": + this.tryAppendQueryParameter(queries, param, this.contentLanguage); + break; + case "rsct": + this.tryAppendQueryParameter(queries, param, this.contentType); + break; + case "saoid": + this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); + break; + case "scid": + this.tryAppendQueryParameter(queries, param, this.correlationId); + break; + } + } + return queries.join("&"); } /** - * The Set Tags operation enables users to set tags on a blob. - * @param options The options parameters. + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - */ - setTags(options) { - return this.client.sendOperationRequest({ options }, setTagsOperationSpec); - } - }; - var xmlSerializer$3 = coreClient__namespace.createSerializer( - Mappers, - /* isXml */ - true - ); - var downloadOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: BlobDownloadHeaders - }, - 206: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: BlobDownloadHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobDownloadExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - rangeGetContentMD5, - rangeGetContentCRC64, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var getPropertiesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "HEAD", - responses: { - 200: { - headersMapper: BlobGetPropertiesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobGetPropertiesExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var deleteOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "DELETE", - responses: { - 202: { - headersMapper: BlobDeleteHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobDeleteExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - blobDeleteType - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - deleteSnapshots - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var undeleteOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobUndeleteHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobUndeleteExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp8], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setExpiryOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetExpiryHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetExpiryExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp11], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - expiryOptions, - expiresOn - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setHttpHeadersOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetHttpHeadersHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetHttpHeadersExceptionHeaders + tryAppendQueryParameter(queries, key, value) { + if (!value) { + return; } - }, - queryParameters: [comp, timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setImmutabilityPolicyOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetImmutabilityPolicyHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetImmutabilityPolicyExceptionHeaders + key = encodeURIComponent(key); + value = encodeURIComponent(value); + if (key.length > 0 && value.length > 0) { + queries.push(`${key}=${value}`); } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - comp12 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifUnmodifiedSince, - immutabilityPolicyExpiry, - immutabilityPolicyMode - ], - isXML: true, - serializer: xmlSerializer$3 + } }; - var deleteImmutabilityPolicyOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "DELETE", - responses: { - 200: { - headersMapper: BlobDeleteImmutabilityPolicyHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders + exports2.SASQueryParameters = SASQueryParameters; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/BlobSASSignatureValues.js +var require_BlobSASSignatureValues = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/BlobSASSignatureValues.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.generateBlobSASQueryParameters = generateBlobSASQueryParameters; + exports2.generateBlobSASQueryParametersInternal = generateBlobSASQueryParametersInternal; + var BlobSASPermissions_js_1 = require_BlobSASPermissions(); + var ContainerSASPermissions_js_1 = require_ContainerSASPermissions(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var UserDelegationKeyCredential_js_1 = require_UserDelegationKeyCredential(); + var SasIPRange_js_1 = require_SasIPRange(); + var SASQueryParameters_js_1 = require_SASQueryParameters(); + var constants_js_1 = require_constants9(); + var utils_common_js_1 = require_utils_common(); + function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + return generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName).sasQueryParameters; + } + function generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : constants_js_1.SERVICE_VERSION; + const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential ? sharedKeyCredentialOrUserDelegationKey : void 0; + let userDelegationKeyCredential; + if (sharedKeyCredential === void 0 && accountName !== void 0) { + userDelegationKeyCredential = new UserDelegationKeyCredential_js_1.UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); + } + if (sharedKeyCredential === void 0 && userDelegationKeyCredential === void 0) { + throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); + } + if (version >= "2020-12-06") { + if (sharedKeyCredential !== void 0) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } else { + if (version >= "2025-07-05") { + return generateBlobSASQueryParametersUDK20250705(blobSASSignatureValues, userDelegationKeyCredential); + } else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + } } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - comp12 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setLegalHoldOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetLegalHoldHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetLegalHoldExceptionHeaders + } + if (version >= "2018-11-09") { + if (sharedKeyCredential !== void 0) { + return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); + } else { + if (version >= "2020-02-10") { + return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); + } else { + return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); + } } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - comp13 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - legalHold - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setMetadataOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetMetadataHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetMetadataExceptionHeaders + } + if (version >= "2015-04-05") { + if (sharedKeyCredential !== void 0) { + return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); + } else { + throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); } - }, - queryParameters: [timeoutInSeconds, comp6], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var acquireLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlobAcquireLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobAcquireLeaseExceptionHeaders + } + throw new RangeError("'version' must be >= '2015-04-05'."); + } + function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + if (blobSASSignatureValues.blobName) { + resource = "b"; + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action, - duration, - proposedLeaseId, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var releaseLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobReleaseLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobReleaseLeaseExceptionHeaders + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), + stringToSign + }; + } + function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; } - }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action1, - leaseId1, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var renewLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobRenewLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobRenewLeaseExceptionHeaders + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - leaseId1, - action2, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var changeLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobChangeLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobChangeLeaseExceptionHeaders + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), + stringToSign + }; + } + function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; } - }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - leaseId1, - action4, - proposedLeaseId1, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var breakLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: BlobBreakLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobBreakLeaseExceptionHeaders + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action3, - breakPeriod, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var createSnapshotOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlobCreateSnapshotHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobCreateSnapshotExceptionHeaders + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, void 0, void 0, void 0, blobSASSignatureValues.encryptionScope), + stringToSign + }; + } + function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; } - }, - queryParameters: [timeoutInSeconds, comp14], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var startCopyFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: BlobStartCopyFromURLHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobStartCopyFromURLExceptionHeaders + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - tier, - rehydratePriority, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceIfTags, - copySource, - blobTagsString, - sealBlob, - legalHold1 - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var copyFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: BlobCopyFromURLHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobCopyFromURLExceptionHeaders + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey), + stringToSign + }; + } + function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; } - }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - tier, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - copySource, - blobTagsString, - legalHold1, - xMsRequiresSync, - sourceContentMD5, - copySourceAuthorization, - copySourceTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var abortCopyFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 204: { - headersMapper: BlobAbortCopyFromURLHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobAbortCopyFromURLExceptionHeaders + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - }, - queryParameters: [ - timeoutInSeconds, - comp15, - copyId - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - copyActionAbortConstant - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setTierOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetTierHeaders - }, - 202: { - headersMapper: BlobSetTierHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetTierExceptionHeaders + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + void 0, + // agentObjectId + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId), + stringToSign + }; + } + function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - comp16 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifTags, - rehydratePriority, - tier1 - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var getAccountInfoOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - headersMapper: BlobGetAccountInfoHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobGetAccountInfoExceptionHeaders + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - }, - queryParameters: [ - comp, - timeoutInSeconds, - restype1 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var queryOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "POST", - responses: { - 200: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: BlobQueryHeaders - }, - 206: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: BlobQueryHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobQueryExceptionHeaders + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + void 0, + // agentObjectId + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope), + stringToSign + }; + } + function generateBlobSASQueryParametersUDK20250705(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; } - }, - requestBody: queryRequest, - queryParameters: [ - timeoutInSeconds, - snapshot, - comp17 - ], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version, - requestId, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$3 - }; - var getTagsOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: BlobTags, - headersMapper: BlobGetTagsHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobGetTagsExceptionHeaders + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions_js_1.BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions_js_1.ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - comp18 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setTagsOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 204: { - headersMapper: BlobSetTagsHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetTagsExceptionHeaders + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + void 0, + // agentObjectId + blobSASSignatureValues.correlationId, + void 0, + // SignedKeyDelegatedUserTenantId, will be added in a future release. + void 0, + // SignedDelegatedUserObjectId, will be added in future release. + blobSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope), + stringToSign + }; + } + function getCanonicalName(accountName, containerName, blobName) { + const elements = [`/blob/${accountName}/${containerName}`]; + if (blobName) { + elements.push(`/${blobName}`); + } + return elements.join(""); + } + function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : constants_js_1.SERVICE_VERSION; + if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { + throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.snapshotTime) { + throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.versionId && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); + } + if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.versionId) { + throw RangeError("Must provide 'blobName' when providing 'versionId'."); + } + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.setImmutabilityPolicy && version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.deleteVersion && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); + } + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.permanentDelete && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + } + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.tag && version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); + } + if (version < "2020-02-10" && blobSASSignatureValues.permissions && (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { + throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); + } + if (version < "2021-04-10" && blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + } + if (version < "2020-02-10" && (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { + throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); + } + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + blobSASSignatureValues.version = version; + return blobSASSignatureValues; + } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BlobLeaseClient.js +var require_BlobLeaseClient = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BlobLeaseClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobLeaseClient = void 0; + var core_util_1 = require_commonjs4(); + var constants_js_1 = require_constants9(); + var tracing_js_1 = require_tracing(); + var utils_common_js_1 = require_utils_common(); + var BlobLeaseClient = class { + _leaseId; + _url; + _containerOrBlobOperation; + _isContainer; + /** + * Gets the lease Id. + * + * @readonly + */ + get leaseId() { + return this._leaseId; + } + /** + * Gets the url. + * + * @readonly + */ + get url() { + return this._url; + } + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client, leaseId) { + const clientContext = client.storageClientContext; + this._url = client.url; + if (client.name === void 0) { + this._isContainer = true; + this._containerOrBlobOperation = clientContext.container; + } else { + this._isContainer = false; + this._containerOrBlobOperation = clientContext.blob; } - }, - requestBody: tags, - queryParameters: [ - timeoutInSeconds, - versionId, - comp18 - ], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version, - requestId, - leaseId, - ifTags, - transactionalContentMD5, - transactionalContentCrc64 - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$3 - }; - var PageBlobImpl = class { + if (!leaseId) { + leaseId = (0, core_util_1.randomUUID)(); + } + this._leaseId = leaseId; + } /** - * Initialize a new instance of the class PageBlob class. - * @param client Reference to the service client + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://learn.microsoft.com/rest/api/storageservices/lease-container + * and + * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. */ - constructor(client) { - this.client = client; + async acquireLease(duration, options = {}) { + if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-acquireLease", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this._containerOrBlobOperation.acquireLease({ + abortSignal: options.abortSignal, + duration, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + proposedLeaseId: this._leaseId, + tracingOptions: updatedOptions.tracingOptions + })); + }); } /** - * The Create operation creates a new page blob. - * @param contentLength The length of the request. - * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The - * page blob size must be aligned to a 512-byte boundary. - * @param options The options parameters. + * To change the ID of the lease. + * @see https://learn.microsoft.com/rest/api/storageservices/lease-container + * and + * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. */ - create(contentLength2, blobContentLength2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, blobContentLength: blobContentLength2, options }, createOperationSpec$1); + async changeLease(proposedLeaseId, options = {}) { + if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-changeLease", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + })); + this._leaseId = proposedLeaseId; + return response; + }); } /** - * The Upload Pages operation writes a range of pages to a page blob - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://learn.microsoft.com/rest/api/storageservices/lease-container + * and + * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. */ - uploadPages(contentLength2, body2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, body: body2, options }, uploadPagesOperationSpec); + async releaseLease(options = {}) { + if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-releaseLease", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this._containerOrBlobOperation.releaseLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + })); + }); } /** - * The Clear Pages operation clears a set of pages from a page blob - * @param contentLength The length of the request. - * @param options The options parameters. + * To renew the lease. + * @see https://learn.microsoft.com/rest/api/storageservices/lease-container + * and + * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. */ - clearPages(contentLength2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, options }, clearPagesOperationSpec); + async renewLease(options = {}) { + if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-renewLease", options, async (updatedOptions) => { + return this._containerOrBlobOperation.renewLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + }); + }); } /** - * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a - * URL - * @param sourceUrl Specify a URL to the copy source. - * @param sourceRange Bytes of source data in the specified range. The length of this range should - * match the ContentLength header and x-ms-range/Range destination range header. - * @param contentLength The length of the request. - * @param range The range of bytes to which the source range would be written. The range should be 512 - * aligned and range-end is required. - * @param options The options parameters. + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://learn.microsoft.com/rest/api/storageservices/lease-container + * and + * @see https://learn.microsoft.com/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. */ - uploadPagesFromURL(sourceUrl2, sourceRange2, contentLength2, range2, options) { - return this.client.sendOperationRequest({ sourceUrl: sourceUrl2, sourceRange: sourceRange2, contentLength: contentLength2, range: range2, options }, uploadPagesFromURLOperationSpec); + async breakLease(breakPeriod, options = {}) { + if (this._isContainer && (options.conditions?.ifMatch && options.conditions?.ifMatch !== constants_js_1.ETagNone || options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== constants_js_1.ETagNone || options.conditions?.tagConditions)) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracing_js_1.tracingClient.withSpan("BlobLeaseClient-breakLease", options, async (updatedOptions) => { + const operationOptions = { + abortSignal: options.abortSignal, + breakPeriod, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + }; + return (0, utils_common_js_1.assertResponse)(await this._containerOrBlobOperation.breakLease(operationOptions)); + }); } + }; + exports2.BlobLeaseClient = BlobLeaseClient; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/RetriableReadableStream.js +var require_RetriableReadableStream = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/RetriableReadableStream.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.RetriableReadableStream = void 0; + var abort_controller_1 = require_commonjs3(); + var node_stream_1 = require("node:stream"); + var RetriableReadableStream = class extends node_stream_1.Readable { + start; + offset; + end; + getter; + source; + retries = 0; + maxRetryRequests; + onProgress; + options; /** - * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a - * page blob - * @param options The options parameters. + * Creates an instance of RetriableReadableStream. + * + * @param source - The current ReadableStream returned from getter + * @param getter - A method calling downloading request returning + * a new ReadableStream from specified offset + * @param offset - Offset position in original data source to read + * @param count - How much data in original data source to read + * @param options - */ - getPageRanges(options) { - return this.client.sendOperationRequest({ options }, getPageRangesOperationSpec); + constructor(source, getter, offset, count, options = {}) { + super({ highWaterMark: options.highWaterMark }); + this.getter = getter; + this.source = source; + this.start = offset; + this.offset = offset; + this.end = offset + count - 1; + this.maxRetryRequests = options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; + this.onProgress = options.onProgress; + this.options = options; + this.setSourceEventHandlers(); } + _read() { + this.source.resume(); + } + setSourceEventHandlers() { + this.source.on("data", this.sourceDataHandler); + this.source.on("end", this.sourceErrorOrEndHandler); + this.source.on("error", this.sourceErrorOrEndHandler); + this.source.on("aborted", this.sourceAbortedHandler); + } + removeSourceEventHandlers() { + this.source.removeListener("data", this.sourceDataHandler); + this.source.removeListener("end", this.sourceErrorOrEndHandler); + this.source.removeListener("error", this.sourceErrorOrEndHandler); + this.source.removeListener("aborted", this.sourceAbortedHandler); + } + sourceDataHandler = (data) => { + if (this.options.doInjectErrorOnce) { + this.options.doInjectErrorOnce = void 0; + this.source.pause(); + this.sourceErrorOrEndHandler(); + this.source.destroy(); + return; + } + this.offset += data.length; + if (this.onProgress) { + this.onProgress({ loadedBytes: this.offset - this.start }); + } + if (!this.push(data)) { + this.source.pause(); + } + }; + sourceAbortedHandler = () => { + const abortError = new abort_controller_1.AbortError("The operation was aborted."); + this.destroy(abortError); + }; + sourceErrorOrEndHandler = (err) => { + if (err && err.name === "AbortError") { + this.destroy(err); + return; + } + this.removeSourceEventHandlers(); + if (this.offset - 1 === this.end) { + this.push(null); + } else if (this.offset <= this.end) { + if (this.retries < this.maxRetryRequests) { + this.retries += 1; + this.getter(this.offset).then((newSource) => { + this.source = newSource; + this.setSourceEventHandlers(); + return; + }).catch((error2) => { + this.destroy(error2); + }); + } else { + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + } + } else { + this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); + } + }; + _destroy(error2, callback) { + this.removeSourceEventHandlers(); + this.source.destroy(); + callback(error2 === null ? void 0 : error2); + } + }; + exports2.RetriableReadableStream = RetriableReadableStream; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BlobDownloadResponse.js +var require_BlobDownloadResponse = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BlobDownloadResponse.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobDownloadResponse = void 0; + var core_util_1 = require_commonjs4(); + var RetriableReadableStream_js_1 = require_RetriableReadableStream(); + var BlobDownloadResponse = class { /** - * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were - * changed between target blob and previous snapshot. - * @param options The options parameters. + * Indicates that the service supports + * requests for partial file content. + * + * @readonly */ - getPageRangesDiff(options) { - return this.client.sendOperationRequest({ options }, getPageRangesDiffOperationSpec); + get acceptRanges() { + return this.originalResponse.acceptRanges; } /** - * Resize the Blob - * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The - * page blob size must be aligned to a 512-byte boundary. - * @param options The options parameters. + * Returns if it was previously specified + * for the file. + * + * @readonly */ - resize(blobContentLength2, options) { - return this.client.sendOperationRequest({ blobContentLength: blobContentLength2, options }, resizeOperationSpec); + get cacheControl() { + return this.originalResponse.cacheControl; } /** - * Update the sequence number of the blob - * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. - * This property applies to page blobs only. This property indicates how the service should modify the - * blob's sequence number - * @param options The options parameters. + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly */ - updateSequenceNumber(sequenceNumberAction2, options) { - return this.client.sendOperationRequest({ sequenceNumberAction: sequenceNumberAction2, options }, updateSequenceNumberOperationSpec); + get contentDisposition() { + return this.originalResponse.contentDisposition; } /** - * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. - * The snapshot is copied such that only the differential changes between the previously copied - * snapshot are transferred to the destination. The copied snapshots are complete copies of the - * original snapshot and can be read or copied from as usual. This API is supported since REST version - * 2016-05-31. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly */ - copyIncremental(copySource2, options) { - return this.client.sendOperationRequest({ copySource: copySource2, options }, copyIncrementalOperationSpec); + get contentEncoding() { + return this.originalResponse.contentEncoding; } - }; - var xmlSerializer$2 = coreClient__namespace.createSerializer( - Mappers, - /* isXml */ - true - ); - var createOperationSpec$1 = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobCreateHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobCreateExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - tier, - blobTagsString, - legalHold1, - blobType, - blobContentLength, - blobSequenceNumber - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var uploadPagesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobUploadPagesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobUploadPagesExceptionHeaders - } - }, - requestBody: body1, - queryParameters: [timeoutInSeconds, comp19], - urlParameters: [url], - headerParameters: [ - version, - requestId, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - transactionalContentMD5, - transactionalContentCrc64, - contentType1, - accept2, - pageWrite, - ifSequenceNumberLessThanOrEqualTo, - ifSequenceNumberLessThan, - ifSequenceNumberEqualTo - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "binary", - serializer: xmlSerializer$2 - }; - var clearPagesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobClearPagesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobClearPagesExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp19], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - ifSequenceNumberLessThanOrEqualTo, - ifSequenceNumberLessThan, - ifSequenceNumberEqualTo, - pageWrite1 - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var uploadPagesFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobUploadPagesFromURLHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobUploadPagesFromURLExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp19], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceContentMD5, - copySourceAuthorization, - pageWrite, - ifSequenceNumberLessThanOrEqualTo, - ifSequenceNumberLessThan, - ifSequenceNumberEqualTo, - sourceUrl, - sourceRange, - sourceContentCrc64, - range1 - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var getPageRangesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: PageList, - headersMapper: PageBlobGetPageRangesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobGetPageRangesExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - marker, - maxPageSize, - snapshot, - comp20 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var getPageRangesDiffOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: PageList, - headersMapper: PageBlobGetPageRangesDiffHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobGetPageRangesDiffExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - marker, - maxPageSize, - snapshot, - comp20, - prevsnapshot - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - ifMatch, - ifNoneMatch, - ifTags, - prevSnapshotUrl - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var resizeOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: PageBlobResizeHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobResizeExceptionHeaders - } - }, - queryParameters: [comp, timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - blobContentLength - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var updateSequenceNumberOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: PageBlobUpdateSequenceNumberHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders - } - }, - queryParameters: [comp, timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - blobSequenceNumber, - sequenceNumberAction - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var copyIncrementalOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: PageBlobCopyIncrementalHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobCopyIncrementalExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp21], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - copySource - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var AppendBlobImpl = class { /** - * Initialize a new instance of the class AppendBlob class. - * @param client Reference to the service client + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly */ - constructor(client) { - this.client = client; + get contentLanguage() { + return this.originalResponse.contentLanguage; } /** - * The Create Append Blob operation creates a new append blob. - * @param contentLength The length of the request. - * @param options The options parameters. + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly */ - create(contentLength2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, options }, createOperationSpec); + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; } /** - * The Append Block operation commits a new block of data to the end of an existing append blob. The - * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to - * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly */ - appendBlock(contentLength2, body2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, body: body2, options }, appendBlockOperationSpec); + get blobType() { + return this.originalResponse.blobType; } /** - * The Append Block operation commits a new block of data to the end of an existing append blob where - * the contents are read from a source url. The Append Block operation is permitted only if the blob - * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version - * 2015-02-21 version or later. - * @param sourceUrl Specify a URL to the copy source. - * @param contentLength The length of the request. - * @param options The options parameters. + * The number of bytes present in the + * response body. + * + * @readonly */ - appendBlockFromUrl(sourceUrl2, contentLength2, options) { - return this.client.sendOperationRequest({ sourceUrl: sourceUrl2, contentLength: contentLength2, options }, appendBlockFromUrlOperationSpec); + get contentLength() { + return this.originalResponse.contentLength; } /** - * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version - * 2019-12-12 version or later. - * @param options The options parameters. + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly */ - seal(options) { - return this.client.sendOperationRequest({ options }, sealOperationSpec); + get contentMD5() { + return this.originalResponse.contentMD5; } - }; - var xmlSerializer$1 = coreClient__namespace.createSerializer( - Mappers, - /* isXml */ - true - ); - var createOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: AppendBlobCreateHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: AppendBlobCreateExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - blobTagsString, - legalHold1, - blobType1 - ], - isXML: true, - serializer: xmlSerializer$1 - }; - var appendBlockOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: AppendBlobAppendBlockHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: AppendBlobAppendBlockExceptionHeaders - } - }, - requestBody: body1, - queryParameters: [timeoutInSeconds, comp22], - urlParameters: [url], - headerParameters: [ - version, - requestId, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - transactionalContentMD5, - transactionalContentCrc64, - contentType1, - accept2, - maxSize, - appendPosition - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "binary", - serializer: xmlSerializer$1 - }; - var appendBlockFromUrlOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: AppendBlobAppendBlockFromUrlHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp22], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceContentMD5, - copySourceAuthorization, - transactionalContentMD5, - sourceUrl, - sourceContentCrc64, - maxSize, - appendPosition, - sourceRange1 - ], - isXML: true, - serializer: xmlSerializer$1 - }; - var sealOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: AppendBlobSealHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: AppendBlobSealExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp23], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - appendPosition - ], - isXML: true, - serializer: xmlSerializer$1 - }; - var BlockBlobImpl = class { /** - * Initialize a new instance of the class BlockBlob class. - * @param client Reference to the service client + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly */ - constructor(client) { - this.client = client; + get contentRange() { + return this.originalResponse.contentRange; } /** - * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing - * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put - * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a - * partial update of the content of a block blob, use the Put Block List operation. - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly */ - upload(contentLength2, body2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, body: body2, options }, uploadOperationSpec); + get contentType() { + return this.originalResponse.contentType; } /** - * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read - * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are - * not supported with Put Blob from URL; the content of an existing blob is overwritten with the - * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, - * use the Put Block from URL API in conjunction with Put Block List. - * @param contentLength The length of the request. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly */ - putBlobFromUrl(contentLength2, copySource2, options) { - return this.client.sendOperationRequest({ contentLength: contentLength2, copySource: copySource2, options }, putBlobFromUrlOperationSpec); + get copyCompletedOn() { + return this.originalResponse.copyCompletedOn; } /** - * The Stage Block operation creates a new block to be committed as part of a blob - * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string - * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified - * for the blockid parameter must be the same size for each block. - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly */ - stageBlock(blockId2, contentLength2, body2, options) { - return this.client.sendOperationRequest({ blockId: blockId2, contentLength: contentLength2, body: body2, options }, stageBlockOperationSpec); + get copyId() { + return this.originalResponse.copyId; } /** - * The Stage Block operation creates a new block to be committed as part of a blob where the contents - * are read from a URL. - * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string - * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified - * for the blockid parameter must be the same size for each block. - * @param contentLength The length of the request. - * @param sourceUrl Specify a URL to the copy source. - * @param options The options parameters. + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly */ - stageBlockFromURL(blockId2, contentLength2, sourceUrl2, options) { - return this.client.sendOperationRequest({ blockId: blockId2, contentLength: contentLength2, sourceUrl: sourceUrl2, options }, stageBlockFromURLOperationSpec); + get copyProgress() { + return this.originalResponse.copyProgress; } /** - * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the - * blob. In order to be written as part of a blob, a block must have been successfully written to the - * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading - * only those blocks that have changed, then committing the new and existing blocks together. You can - * do this by specifying whether to commit a block from the committed block list or from the - * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list - * it may belong to. - * @param blocks Blob Blocks. - * @param options The options parameters. + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly */ - commitBlockList(blocks2, options) { - return this.client.sendOperationRequest({ blocks: blocks2, options }, commitBlockListOperationSpec); + get copySource() { + return this.originalResponse.copySource; } /** - * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block - * blob - * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted - * blocks, or both lists together. - * @param options The options parameters. + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly */ - getBlockList(listType2, options) { - return this.client.sendOperationRequest({ listType: listType2, options }, getBlockListOperationSpec); + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; } - }; - var xmlSerializer = coreClient__namespace.createSerializer( - Mappers, - /* isXml */ - true - ); - var uploadOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobUploadHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobUploadExceptionHeaders - } - }, - requestBody: body1, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - contentLength, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - tier, - blobTagsString, - legalHold1, - transactionalContentMD5, - transactionalContentCrc64, - contentType1, - accept2, - blobType2 - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "binary", - serializer: xmlSerializer - }; - var putBlobFromUrlOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobPutBlobFromUrlHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - encryptionScope, - tier, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceIfTags, - copySource, - blobTagsString, - sourceContentMD5, - copySourceAuthorization, - copySourceTags, - transactionalContentMD5, - blobType2, - copySourceBlobProperties - ], - isXML: true, - serializer: xmlSerializer - }; - var stageBlockOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobStageBlockHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobStageBlockExceptionHeaders - } - }, - requestBody: body1, - queryParameters: [ - timeoutInSeconds, - comp24, - blockId - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - contentLength, - leaseId, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - encryptionScope, - transactionalContentMD5, - transactionalContentCrc64, - contentType1, - accept2 - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "binary", - serializer: xmlSerializer - }; - var stageBlockFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobStageBlockFromURLHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobStageBlockFromURLExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - comp24, - blockId - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - leaseId, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - encryptionScope, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceContentMD5, - copySourceAuthorization, - sourceUrl, - sourceContentCrc64, - sourceRange1 - ], - isXML: true, - serializer: xmlSerializer - }; - var commitBlockListOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobCommitBlockListHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobCommitBlockListExceptionHeaders - } - }, - requestBody: blocks, - queryParameters: [timeoutInSeconds, comp25], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version, - requestId, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - tier, - blobTagsString, - legalHold1, - transactionalContentMD5, - transactionalContentCrc64 - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer - }; - var getBlockListOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: BlockList, - headersMapper: BlockBlobGetBlockListHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobGetBlockListExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - comp25, - listType - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifTags - ], - isXML: true, - serializer: xmlSerializer - }; - var StorageClient$1 = class StorageClient extends coreHttpCompat__namespace.ExtendedServiceClient { /** - * Initializes a new instance of the StorageClient class. - * @param url The URL of the service account, container, or blob that is the target of the desired - * operation. - * @param options The parameter options + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly */ - constructor(url2, options) { - var _a, _b; - if (url2 === void 0) { - throw new Error("'url' cannot be null"); - } - if (!options) { - options = {}; - } - const defaults = { - requestContentType: "application/json; charset=utf-8" - }; - const packageDetails = `azsdk-js-azure-storage-blob/12.27.0`; - const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` : `${packageDetails}`; - const optionsWithDefaults = Object.assign(Object.assign(Object.assign({}, defaults), options), { userAgentOptions: { - userAgentPrefix - }, endpoint: (_b = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri) !== null && _b !== void 0 ? _b : "{url}" }); - super(optionsWithDefaults); - this.url = url2; - this.version = options.version || "2025-05-05"; - this.service = new ServiceImpl(this); - this.container = new ContainerImpl(this); - this.blob = new BlobImpl(this); - this.pageBlob = new PageBlobImpl(this); - this.appendBlob = new AppendBlobImpl(this); - this.blockBlob = new BlockBlobImpl(this); + get leaseDuration() { + return this.originalResponse.leaseDuration; } - }; - var StorageContextClient = class extends StorageClient$1 { - async sendOperationRequest(operationArguments, operationSpec) { - const operationSpecToSend = Object.assign({}, operationSpec); - if (operationSpecToSend.path === "/{containerName}" || operationSpecToSend.path === "/{containerName}/{blob}") { - operationSpecToSend.path = ""; - } - return super.sendOperationRequest(operationArguments, operationSpecToSend); + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; } - }; - var StorageClient = class { /** - * Creates an instance of StorageClient. - * @param url - url to resource - * @param pipeline - request policy pipeline. + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly */ - constructor(url2, pipeline) { - this.url = escapeURLPath(url2); - this.accountName = getAccountNameFromUrl(url2); - this.pipeline = pipeline; - this.storageClientContext = new StorageContextClient(this.url, getCoreClientOptions(pipeline)); - this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); - this.credential = getCredentialFromPipeline(pipeline); - const storageClientContext = this.storageClientContext; - storageClientContext.requestContentType = void 0; + get leaseStatus() { + return this.originalResponse.leaseStatus; } - }; - var tracingClient = coreTracing.createTracingClient({ - packageName: "@azure/storage-blob", - packageVersion: SDK_VERSION, - namespace: "Microsoft.Storage" - }); - var BlobSASPermissions = class _BlobSASPermissions { - constructor() { - this.read = false; - this.add = false; - this.create = false; - this.write = false; - this.delete = false; - this.deleteVersion = false; - this.tag = false; - this.move = false; - this.execute = false; - this.setImmutabilityPolicy = false; - this.permanentDelete = false; + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; } /** - * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an - * Error if it encounters a character that does not correspond to a valid permission. + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. * - * @param permissions - + * @readonly */ - static parse(permissions) { - const blobSASPermissions = new _BlobSASPermissions(); - for (const char of permissions) { - switch (char) { - case "r": - blobSASPermissions.read = true; - break; - case "a": - blobSASPermissions.add = true; - break; - case "c": - blobSASPermissions.create = true; - break; - case "w": - blobSASPermissions.write = true; - break; - case "d": - blobSASPermissions.delete = true; - break; - case "x": - blobSASPermissions.deleteVersion = true; - break; - case "t": - blobSASPermissions.tag = true; - break; - case "m": - blobSASPermissions.move = true; - break; - case "e": - blobSASPermissions.execute = true; - break; - case "i": - blobSASPermissions.setImmutabilityPolicy = true; - break; - case "y": - blobSASPermissions.permanentDelete = true; - break; - default: - throw new RangeError(`Invalid permission: ${char}`); - } - } - return blobSASPermissions; + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; } /** - * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it - * and boolean values for them. + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. * - * @param permissionLike - + * @readonly */ - static from(permissionLike) { - const blobSASPermissions = new _BlobSASPermissions(); - if (permissionLike.read) { - blobSASPermissions.read = true; - } - if (permissionLike.add) { - blobSASPermissions.add = true; - } - if (permissionLike.create) { - blobSASPermissions.create = true; - } - if (permissionLike.write) { - blobSASPermissions.write = true; - } - if (permissionLike.delete) { - blobSASPermissions.delete = true; - } - if (permissionLike.deleteVersion) { - blobSASPermissions.deleteVersion = true; - } - if (permissionLike.tag) { - blobSASPermissions.tag = true; - } - if (permissionLike.move) { - blobSASPermissions.move = true; - } - if (permissionLike.execute) { - blobSASPermissions.execute = true; - } - if (permissionLike.setImmutabilityPolicy) { - blobSASPermissions.setImmutabilityPolicy = true; - } - if (permissionLike.permanentDelete) { - blobSASPermissions.permanentDelete = true; - } - return blobSASPermissions; + get etag() { + return this.originalResponse.etag; } /** - * Converts the given permissions to a string. Using this method will guarantee the permissions are in an - * order accepted by the service. + * The number of tags associated with the blob * - * @returns A string which represents the BlobSASPermissions + * @readonly */ - toString() { - const permissions = []; - if (this.read) { - permissions.push("r"); - } - if (this.add) { - permissions.push("a"); - } - if (this.create) { - permissions.push("c"); - } - if (this.write) { - permissions.push("w"); - } - if (this.delete) { - permissions.push("d"); - } - if (this.deleteVersion) { - permissions.push("x"); - } - if (this.tag) { - permissions.push("t"); - } - if (this.move) { - permissions.push("m"); - } - if (this.execute) { - permissions.push("e"); - } - if (this.setImmutabilityPolicy) { - permissions.push("i"); - } - if (this.permanentDelete) { - permissions.push("y"); - } - return permissions.join(""); + get tagCount() { + return this.originalResponse.tagCount; } - }; - var ContainerSASPermissions = class _ContainerSASPermissions { - constructor() { - this.read = false; - this.add = false; - this.create = false; - this.write = false; - this.delete = false; - this.deleteVersion = false; - this.list = false; - this.tag = false; - this.move = false; - this.execute = false; - this.setImmutabilityPolicy = false; - this.permanentDelete = false; - this.filterByTags = false; + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; } /** - * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an - * Error if it encounters a character that does not correspond to a valid permission. + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). * - * @param permissions - + * @readonly */ - static parse(permissions) { - const containerSASPermissions = new _ContainerSASPermissions(); - for (const char of permissions) { - switch (char) { - case "r": - containerSASPermissions.read = true; - break; - case "a": - containerSASPermissions.add = true; - break; - case "c": - containerSASPermissions.create = true; - break; - case "w": - containerSASPermissions.write = true; - break; - case "d": - containerSASPermissions.delete = true; - break; - case "l": - containerSASPermissions.list = true; - break; - case "t": - containerSASPermissions.tag = true; - break; - case "x": - containerSASPermissions.deleteVersion = true; - break; - case "m": - containerSASPermissions.move = true; - break; - case "e": - containerSASPermissions.execute = true; - break; - case "i": - containerSASPermissions.setImmutabilityPolicy = true; - break; - case "y": - containerSASPermissions.permanentDelete = true; - break; - case "f": - containerSASPermissions.filterByTags = true; - break; - default: - throw new RangeError(`Invalid permission ${char}`); - } - } - return containerSASPermissions; + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; } /** - * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it - * and boolean values for them. + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. * - * @param permissionLike - + * @readonly */ - static from(permissionLike) { - const containerSASPermissions = new _ContainerSASPermissions(); - if (permissionLike.read) { - containerSASPermissions.read = true; - } - if (permissionLike.add) { - containerSASPermissions.add = true; - } - if (permissionLike.create) { - containerSASPermissions.create = true; - } - if (permissionLike.write) { - containerSASPermissions.write = true; - } - if (permissionLike.delete) { - containerSASPermissions.delete = true; - } - if (permissionLike.list) { - containerSASPermissions.list = true; - } - if (permissionLike.deleteVersion) { - containerSASPermissions.deleteVersion = true; - } - if (permissionLike.tag) { - containerSASPermissions.tag = true; - } - if (permissionLike.move) { - containerSASPermissions.move = true; - } - if (permissionLike.execute) { - containerSASPermissions.execute = true; - } - if (permissionLike.setImmutabilityPolicy) { - containerSASPermissions.setImmutabilityPolicy = true; - } - if (permissionLike.permanentDelete) { - containerSASPermissions.permanentDelete = true; - } - if (permissionLike.filterByTags) { - containerSASPermissions.filterByTags = true; - } - return containerSASPermissions; + get blobContentMD5() { + return this.originalResponse.blobContentMD5; } /** - * Converts the given permissions to a string. Using this method will guarantee the permissions are in an - * order accepted by the service. + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. * - * The order of the characters should be as specified here to ensure correctness. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * Returns the UTC date and time generated by the service that indicates the time at which the blob was + * last read or written to. * + * @readonly */ - toString() { - const permissions = []; - if (this.read) { - permissions.push("r"); - } - if (this.add) { - permissions.push("a"); - } - if (this.create) { - permissions.push("c"); - } - if (this.write) { - permissions.push("w"); - } - if (this.delete) { - permissions.push("d"); - } - if (this.deleteVersion) { - permissions.push("x"); - } - if (this.list) { - permissions.push("l"); - } - if (this.tag) { - permissions.push("t"); - } - if (this.move) { - permissions.push("m"); - } - if (this.execute) { - permissions.push("e"); - } - if (this.setImmutabilityPolicy) { - permissions.push("i"); - } - if (this.permanentDelete) { - permissions.push("y"); - } - if (this.filterByTags) { - permissions.push("f"); - } - return permissions.join(""); + get lastAccessed() { + return this.originalResponse.lastAccessed; } - }; - var UserDelegationKeyCredential = class { /** - * Creates an instance of UserDelegationKeyCredential. - * @param accountName - - * @param userDelegationKey - + * Returns the date and time the blob was created. + * + * @readonly */ - constructor(accountName, userDelegationKey) { - this.accountName = accountName; - this.userDelegationKey = userDelegationKey; - this.key = Buffer.from(userDelegationKey.value, "base64"); + get createdOn() { + return this.originalResponse.createdOn; } /** - * Generates a hash signature for an HTTP request or for a SAS. + * A name-value pair + * to associate with a file storage object. * - * @param stringToSign - + * @readonly */ - computeHMACSHA256(stringToSign) { - return crypto2.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + get metadata() { + return this.originalResponse.metadata; } - }; - function ipRangeToString(ipRange) { - return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; - } - exports2.SASProtocol = void 0; - (function(SASProtocol) { - SASProtocol["Https"] = "https"; - SASProtocol["HttpsAndHttp"] = "https,http"; - })(exports2.SASProtocol || (exports2.SASProtocol = {})); - var SASQueryParameters = class { /** - * Optional. IP range allowed for this SAS. + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. * * @readonly */ - get ipRange() { - if (this.ipRangeInner) { - return { - end: this.ipRangeInner.end, - start: this.ipRangeInner.start - }; - } - return void 0; + get requestId() { + return this.originalResponse.requestId; } - constructor(version2, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn2, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType2, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope2) { - this.version = version2; - this.signature = signature; - if (permissionsOrOptions !== void 0 && typeof permissionsOrOptions !== "string") { - this.permissions = permissionsOrOptions.permissions; - this.services = permissionsOrOptions.services; - this.resourceTypes = permissionsOrOptions.resourceTypes; - this.protocol = permissionsOrOptions.protocol; - this.startsOn = permissionsOrOptions.startsOn; - this.expiresOn = permissionsOrOptions.expiresOn; - this.ipRangeInner = permissionsOrOptions.ipRange; - this.identifier = permissionsOrOptions.identifier; - this.encryptionScope = permissionsOrOptions.encryptionScope; - this.resource = permissionsOrOptions.resource; - this.cacheControl = permissionsOrOptions.cacheControl; - this.contentDisposition = permissionsOrOptions.contentDisposition; - this.contentEncoding = permissionsOrOptions.contentEncoding; - this.contentLanguage = permissionsOrOptions.contentLanguage; - this.contentType = permissionsOrOptions.contentType; - if (permissionsOrOptions.userDelegationKey) { - this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; - this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; - this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; - this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; - this.signedService = permissionsOrOptions.userDelegationKey.signedService; - this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; - this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; - this.correlationId = permissionsOrOptions.correlationId; - } - } else { - this.services = services; - this.resourceTypes = resourceTypes; - this.expiresOn = expiresOn2; - this.permissions = permissionsOrOptions; - this.protocol = protocol; - this.startsOn = startsOn; - this.ipRangeInner = ipRange; - this.encryptionScope = encryptionScope2; - this.identifier = identifier; - this.resource = resource; - this.cacheControl = cacheControl; - this.contentDisposition = contentDisposition; - this.contentEncoding = contentEncoding; - this.contentLanguage = contentLanguage; - this.contentType = contentType2; - if (userDelegationKey) { - this.signedOid = userDelegationKey.signedObjectId; - this.signedTenantId = userDelegationKey.signedTenantId; - this.signedStartsOn = userDelegationKey.signedStartsOn; - this.signedExpiresOn = userDelegationKey.signedExpiresOn; - this.signedService = userDelegationKey.signedService; - this.signedVersion = userDelegationKey.signedVersion; - this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; - this.correlationId = correlationId; - } - } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the Blob service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * Indicates the versionId of the downloaded blob version. + * + * @readonly + */ + get versionId() { + return this.originalResponse.versionId; + } + /** + * Indicates whether version of this blob is a current version. + * + * @readonly + */ + get isCurrentVersion() { + return this.originalResponse.isCurrentVersion; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; } /** - * Encodes all SAS query parameters into a string that can be appended to a URL. + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * Object Replication Policy Id of the destination blob. * + * @readonly */ - toString() { - const params = [ - "sv", - "ss", - "srt", - "spr", - "st", - "se", - "sip", - "si", - "ses", - "skoid", - // Signed object ID - "sktid", - // Signed tenant ID - "skt", - // Signed key start time - "ske", - // Signed key expiry time - "sks", - // Signed key service - "skv", - // Signed key version - "sr", - "sp", - "sig", - "rscc", - "rscd", - "rsce", - "rscl", - "rsct", - "saoid", - "scid" - ]; - const queries = []; - for (const param of params) { - switch (param) { - case "sv": - this.tryAppendQueryParameter(queries, param, this.version); - break; - case "ss": - this.tryAppendQueryParameter(queries, param, this.services); - break; - case "srt": - this.tryAppendQueryParameter(queries, param, this.resourceTypes); - break; - case "spr": - this.tryAppendQueryParameter(queries, param, this.protocol); - break; - case "st": - this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : void 0); - break; - case "se": - this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : void 0); - break; - case "sip": - this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : void 0); - break; - case "si": - this.tryAppendQueryParameter(queries, param, this.identifier); - break; - case "ses": - this.tryAppendQueryParameter(queries, param, this.encryptionScope); - break; - case "skoid": - this.tryAppendQueryParameter(queries, param, this.signedOid); - break; - case "sktid": - this.tryAppendQueryParameter(queries, param, this.signedTenantId); - break; - case "skt": - this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : void 0); - break; - case "ske": - this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : void 0); - break; - case "sks": - this.tryAppendQueryParameter(queries, param, this.signedService); - break; - case "skv": - this.tryAppendQueryParameter(queries, param, this.signedVersion); - break; - case "sr": - this.tryAppendQueryParameter(queries, param, this.resource); - break; - case "sp": - this.tryAppendQueryParameter(queries, param, this.permissions); - break; - case "sig": - this.tryAppendQueryParameter(queries, param, this.signature); - break; - case "rscc": - this.tryAppendQueryParameter(queries, param, this.cacheControl); - break; - case "rscd": - this.tryAppendQueryParameter(queries, param, this.contentDisposition); - break; - case "rsce": - this.tryAppendQueryParameter(queries, param, this.contentEncoding); - break; - case "rscl": - this.tryAppendQueryParameter(queries, param, this.contentLanguage); - break; - case "rsct": - this.tryAppendQueryParameter(queries, param, this.contentType); - break; - case "saoid": - this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); - break; - case "scid": - this.tryAppendQueryParameter(queries, param, this.correlationId); - break; - } - } - return queries.join("&"); + get objectReplicationDestinationPolicyId() { + return this.originalResponse.objectReplicationDestinationPolicyId; } /** - * A private helper method used to filter and append query key/value pairs into an array. + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. * - * @param queries - - * @param key - - * @param value - + * @readonly */ - tryAppendQueryParameter(queries, key, value) { - if (!value) { - return; - } - key = encodeURIComponent(key); - value = encodeURIComponent(value); - if (key.length > 0 && value.length > 0) { - queries.push(`${key}=${value}`); - } + get objectReplicationSourceProperties() { + return this.originalResponse.objectReplicationSourceProperties; } - }; - function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { - return generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName).sasQueryParameters; - } - function generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { - const version2 = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; - const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential ? sharedKeyCredentialOrUserDelegationKey : void 0; - let userDelegationKeyCredential; - if (sharedKeyCredential === void 0 && accountName !== void 0) { - userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); + /** + * If this blob has been sealed. + * + * @readonly + */ + get isSealed() { + return this.originalResponse.isSealed; } - if (sharedKeyCredential === void 0 && userDelegationKeyCredential === void 0) { - throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); + /** + * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. + * + * @readonly + */ + get immutabilityPolicyExpiresOn() { + return this.originalResponse.immutabilityPolicyExpiresOn; } - if (version2 >= "2020-12-06") { - if (sharedKeyCredential !== void 0) { - return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); - } else { - return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); - } + /** + * Indicates immutability policy mode. + * + * @readonly + */ + get immutabilityPolicyMode() { + return this.originalResponse.immutabilityPolicyMode; } - if (version2 >= "2018-11-09") { - if (sharedKeyCredential !== void 0) { - return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); - } else { - if (version2 >= "2020-02-10") { - return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); - } else { - return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); - } - } + /** + * Indicates if a legal hold is present on the blob. + * + * @readonly + */ + get legalHold() { + return this.originalResponse.legalHold; } - if (version2 >= "2015-04-05") { - if (sharedKeyCredential !== void 0) { - return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); - } else { - throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); - } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get contentAsBlob() { + return this.originalResponse.blobBody; } - throw new RangeError("'version' must be >= '2015-04-05'."); - } - function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will automatically retry when internal read stream unexpected ends. + * + * @readonly + */ + get readableStreamBody() { + return core_util_1.isNodeLike ? this.blobDownloadStream : void 0; } - let resource = "c"; - if (blobSASSignatureValues.blobName) { - resource = "b"; + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + originalResponse; + blobDownloadStream; + /** + * Creates an instance of BlobDownloadResponse. + * + * @param originalResponse - + * @param getter - + * @param offset - + * @param count - + * @param options - + */ + constructor(originalResponse, getter, offset, count, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new RetriableReadableStream_js_1.RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); + } + }; + exports2.BlobDownloadResponse = BlobDownloadResponse; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroConstants.js +var require_AvroConstants = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroConstants.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AVRO_SCHEMA_KEY = exports2.AVRO_CODEC_KEY = exports2.AVRO_INIT_BYTES = exports2.AVRO_SYNC_MARKER_SIZE = void 0; + exports2.AVRO_SYNC_MARKER_SIZE = 16; + exports2.AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); + exports2.AVRO_CODEC_KEY = "avro.codec"; + exports2.AVRO_SCHEMA_KEY = "avro.schema"; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroParser.js +var require_AvroParser = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroParser.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AvroType = exports2.AvroParser = void 0; + var AvroParser = class _AvroParser { + /** + * Reads a fixed number of bytes from the stream. + * + * @param stream - + * @param length - + * @param options - + */ + static async readFixedBytes(stream, length, options = {}) { + const bytes = await stream.read(length, { abortSignal: options.abortSignal }); + if (bytes.length !== length) { + throw new Error("Hit stream end."); } + return bytes; } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), - stringToSign - }; - } - function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + /** + * Reads a single byte from the stream. + * + * @param stream - + * @param options - + */ + static async readByte(stream, options = {}) { + const buf = await _AvroParser.readFixedBytes(stream, 1, options); + return buf[0]; } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; + // int and long are stored in variable-length zig-zag coding. + // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt + // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types + static async readZigZagLong(stream, options = {}) { + let zigZagEncoded = 0; + let significanceInBit = 0; + let byte, haveMoreByte, significanceInFloat; + do { + byte = await _AvroParser.readByte(stream, options); + haveMoreByte = byte & 128; + zigZagEncoded |= (byte & 127) << significanceInBit; + significanceInBit += 7; + } while (haveMoreByte && significanceInBit < 28); + if (haveMoreByte) { + zigZagEncoded = zigZagEncoded; + significanceInFloat = 268435456; + do { + byte = await _AvroParser.readByte(stream, options); + zigZagEncoded += (byte & 127) * significanceInFloat; + significanceInFloat *= 128; + } while (byte & 128); + const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; + if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { + throw new Error("Integer overflow."); + } + return res; } + return zigZagEncoded >> 1 ^ -(zigZagEncoded & 1); } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } + static async readLong(stream, options = {}) { + return _AvroParser.readZigZagLong(stream, options); } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), - stringToSign - }; - } - function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + static async readInt(stream, options = {}) { + return _AvroParser.readZigZagLong(stream, options); } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; - } + static async readNull() { + return null; } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + static async readBoolean(stream, options = {}) { + const b = await _AvroParser.readByte(stream, options); + if (b === 1) { + return true; + } else if (b === 0) { + return false; } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + throw new Error("Byte was not a boolean."); } } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.encryptionScope, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, void 0, void 0, void 0, blobSASSignatureValues.encryptionScope), - stringToSign - }; - } - function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + static async readFloat(stream, options = {}) { + const u8arr = await _AvroParser.readFixedBytes(stream, 4, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat32(0, true); } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; - } + static async readDouble(stream, options = {}) { + const u8arr = await _AvroParser.readFixedBytes(stream, 8, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat64(0, true); } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + static async readBytes(stream, options = {}) { + const size = await _AvroParser.readLong(stream, options); + if (size < 0) { + throw new Error("Bytes size was negative."); } + return stream.read(size, { abortSignal: options.abortSignal }); } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey), - stringToSign - }; - } - function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + static async readString(stream, options = {}) { + const u8arr = await _AvroParser.readBytes(stream, options); + const utf8decoder = new TextDecoder(); + return utf8decoder.decode(u8arr); } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; - } + static async readMapPair(stream, readItemMethod, options = {}) { + const key = await _AvroParser.readString(stream, options); + const value = await readItemMethod(stream, options); + return { key, value }; } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + static async readMap(stream, readItemMethod, options = {}) { + const readPairMethod = (s, opts = {}) => { + return _AvroParser.readMapPair(s, readItemMethod, opts); + }; + const pairs = await _AvroParser.readArray(stream, readPairMethod, options); + const dict = {}; + for (const pair of pairs) { + dict[pair.key] = pair.value; } - } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.preauthorizedAgentObjectId, - void 0, - // agentObjectId - blobSASSignatureValues.correlationId, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId), - stringToSign - }; - } - function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); - } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; + return dict; + } + static async readArray(stream, readItemMethod, options = {}) { + const items = []; + for (let count = await _AvroParser.readLong(stream, options); count !== 0; count = await _AvroParser.readLong(stream, options)) { + if (count < 0) { + await _AvroParser.readLong(stream, options); + count = -count; + } + while (count--) { + const item = await readItemMethod(stream, options); + items.push(item); + } } + return items; } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + }; + exports2.AvroParser = AvroParser; + var AvroComplex; + (function(AvroComplex2) { + AvroComplex2["RECORD"] = "record"; + AvroComplex2["ENUM"] = "enum"; + AvroComplex2["ARRAY"] = "array"; + AvroComplex2["MAP"] = "map"; + AvroComplex2["UNION"] = "union"; + AvroComplex2["FIXED"] = "fixed"; + })(AvroComplex || (AvroComplex = {})); + var AvroPrimitive; + (function(AvroPrimitive2) { + AvroPrimitive2["NULL"] = "null"; + AvroPrimitive2["BOOLEAN"] = "boolean"; + AvroPrimitive2["INT"] = "int"; + AvroPrimitive2["LONG"] = "long"; + AvroPrimitive2["FLOAT"] = "float"; + AvroPrimitive2["DOUBLE"] = "double"; + AvroPrimitive2["BYTES"] = "bytes"; + AvroPrimitive2["STRING"] = "string"; + })(AvroPrimitive || (AvroPrimitive = {})); + var AvroType = class _AvroType { + /** + * Determines the AvroType from the Avro Schema. + */ + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + static fromSchema(schema) { + if (typeof schema === "string") { + return _AvroType.fromStringSchema(schema); + } else if (Array.isArray(schema)) { + return _AvroType.fromArraySchema(schema); } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + return _AvroType.fromObjectSchema(schema); } } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.preauthorizedAgentObjectId, - void 0, - // agentObjectId - blobSASSignatureValues.correlationId, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.encryptionScope, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope), - stringToSign - }; - } - function getCanonicalName(accountName, containerName, blobName) { - const elements = [`/blob/${accountName}/${containerName}`]; - if (blobName) { - elements.push(`/${blobName}`); + static fromStringSchema(schema) { + switch (schema) { + case AvroPrimitive.NULL: + case AvroPrimitive.BOOLEAN: + case AvroPrimitive.INT: + case AvroPrimitive.LONG: + case AvroPrimitive.FLOAT: + case AvroPrimitive.DOUBLE: + case AvroPrimitive.BYTES: + case AvroPrimitive.STRING: + return new AvroPrimitiveType(schema); + default: + throw new Error(`Unexpected Avro type ${schema}`); + } } - return elements.join(""); - } - function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { - const version2 = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; - if (blobSASSignatureValues.snapshotTime && version2 < "2018-11-09") { - throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); + static fromArraySchema(schema) { + return new AvroUnionType(schema.map(_AvroType.fromSchema)); } - if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.snapshotTime) { - throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); + static fromObjectSchema(schema) { + const type = schema.type; + try { + return _AvroType.fromStringSchema(type); + } catch { + } + switch (type) { + case AvroComplex.RECORD: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.name) { + throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); + } + const fields = {}; + if (!schema.fields) { + throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); + } + for (const field of schema.fields) { + fields[field.name] = _AvroType.fromSchema(field.type); + } + return new AvroRecordType(fields, schema.name); + case AvroComplex.ENUM: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.symbols) { + throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + } + return new AvroEnumType(schema.symbols); + case AvroComplex.MAP: + if (!schema.values) { + throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + } + return new AvroMapType(_AvroType.fromSchema(schema.values)); + case AvroComplex.ARRAY: + // Unused today + case AvroComplex.FIXED: + // Unused today + default: + throw new Error(`Unexpected Avro type ${type} in ${schema}`); + } } - if (blobSASSignatureValues.versionId && version2 < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); + }; + exports2.AvroType = AvroType; + var AvroPrimitiveType = class extends AvroType { + _primitive; + constructor(primitive) { + super(); + this._primitive = primitive; } - if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.versionId) { - throw RangeError("Must provide 'blobName' when providing 'versionId'."); + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + read(stream, options = {}) { + switch (this._primitive) { + case AvroPrimitive.NULL: + return AvroParser.readNull(); + case AvroPrimitive.BOOLEAN: + return AvroParser.readBoolean(stream, options); + case AvroPrimitive.INT: + return AvroParser.readInt(stream, options); + case AvroPrimitive.LONG: + return AvroParser.readLong(stream, options); + case AvroPrimitive.FLOAT: + return AvroParser.readFloat(stream, options); + case AvroPrimitive.DOUBLE: + return AvroParser.readDouble(stream, options); + case AvroPrimitive.BYTES: + return AvroParser.readBytes(stream, options); + case AvroPrimitive.STRING: + return AvroParser.readString(stream, options); + default: + throw new Error("Unknown Avro Primitive"); + } } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.setImmutabilityPolicy && version2 < "2020-08-04") { - throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + }; + var AvroEnumType = class extends AvroType { + _symbols; + constructor(symbols) { + super(); + this._symbols = symbols; } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.deleteVersion && version2 < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + async read(stream, options = {}) { + const value = await AvroParser.readInt(stream, options); + return this._symbols[value]; } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.permanentDelete && version2 < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + }; + var AvroUnionType = class extends AvroType { + _types; + constructor(types) { + super(); + this._types = types; } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.tag && version2 < "2019-12-12") { - throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); + async read(stream, options = {}) { + const typeIndex = await AvroParser.readInt(stream, options); + return this._types[typeIndex].read(stream, options); } - if (version2 < "2020-02-10" && blobSASSignatureValues.permissions && (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { - throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); + }; + var AvroMapType = class extends AvroType { + _itemType; + constructor(itemType) { + super(); + this._itemType = itemType; } - if (version2 < "2021-04-10" && blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.filterByTags) { - throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + read(stream, options = {}) { + const readItemMethod = (s, opts) => { + return this._itemType.read(s, opts); + }; + return AvroParser.readMap(stream, readItemMethod, options); } - if (version2 < "2020-02-10" && (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { - throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); + }; + var AvroRecordType = class extends AvroType { + _name; + _fields; + constructor(fields, name) { + super(); + this._fields = fields; + this._name = name; } - if (blobSASSignatureValues.encryptionScope && version2 < "2020-12-06") { - throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types + async read(stream, options = {}) { + const record = {}; + record["$schema"] = this._name; + for (const key in this._fields) { + if (Object.prototype.hasOwnProperty.call(this._fields, key)) { + record[key] = await this._fields[key].read(stream, options); + } + } + return record; } - blobSASSignatureValues.version = version2; - return blobSASSignatureValues; + }; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/utils/utils.common.js +var require_utils_common3 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/utils/utils.common.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.arraysEqual = arraysEqual; + function arraysEqual(a, b) { + if (a === b) + return true; + if (a == null || b == null) + return false; + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; ++i) { + if (a[i] !== b[i]) + return false; + } + return true; } - var BlobLeaseClient = class { - /** - * Gets the lease Id. - * - * @readonly - */ - get leaseId() { - return this._leaseId; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReader.js +var require_AvroReader = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReader.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AvroReader = void 0; + var AvroConstants_js_1 = require_AvroConstants(); + var AvroParser_js_1 = require_AvroParser(); + var utils_common_js_1 = require_utils_common3(); + var AvroReader = class { + _dataStream; + _headerStream; + _syncMarker; + _metadata; + _itemType; + _itemsRemainingInBlock; + // Remembers where we started if partial data stream was provided. + _initialBlockOffset; + /// The byte offset within the Avro file (both header and data) + /// of the start of the current block. + _blockOffset; + get blockOffset() { + return this._blockOffset; } - /** - * Gets the url. - * - * @readonly - */ - get url() { - return this._url; + _objectIndex; + get objectIndex() { + return this._objectIndex; } - /** - * Creates an instance of BlobLeaseClient. - * @param client - The client to make the lease operation requests. - * @param leaseId - Initial proposed lease id. - */ - constructor(client, leaseId2) { - const clientContext = client.storageClientContext; - this._url = client.url; - if (client.name === void 0) { - this._isContainer = true; - this._containerOrBlobOperation = clientContext.container; - } else { - this._isContainer = false; - this._containerOrBlobOperation = clientContext.blob; - } - if (!leaseId2) { - leaseId2 = coreUtil.randomUUID(); - } - this._leaseId = leaseId2; + _initialized; + constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { + this._dataStream = dataStream; + this._headerStream = headerStream || dataStream; + this._initialized = false; + this._blockOffset = currentBlockOffset || 0; + this._objectIndex = indexWithinCurrentBlock || 0; + this._initialBlockOffset = currentBlockOffset || 0; } - /** - * Establishes and manages a lock on a container for delete operations, or on a blob - * for write and delete operations. - * The lock duration can be 15 to 60 seconds, or can be infinite. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-blob - * - * @param duration - Must be between 15 to 60 seconds, or infinite (-1) - * @param options - option to configure lease management operations. - * @returns Response data for acquire lease operation. - */ - async acquireLease(duration2, options = {}) { - var _a, _b, _c, _d, _e; - if (this._isContainer && (((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + async initialize(options = {}) { + const header = await AvroParser_js_1.AvroParser.readFixedBytes(this._headerStream, AvroConstants_js_1.AVRO_INIT_BYTES.length, { + abortSignal: options.abortSignal + }); + if (!(0, utils_common_js_1.arraysEqual)(header, AvroConstants_js_1.AVRO_INIT_BYTES)) { + throw new Error("Stream is not an Avro file."); } - return tracingClient.withSpan("BlobLeaseClient-acquireLease", options, async (updatedOptions) => { - var _a2; - return assertResponse(await this._containerOrBlobOperation.acquireLease({ - abortSignal: options.abortSignal, - duration: duration2, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a2 = options.conditions) === null || _a2 === void 0 ? void 0 : _a2.tagConditions }), - proposedLeaseId: this._leaseId, - tracingOptions: updatedOptions.tracingOptions - })); + this._metadata = await AvroParser_js_1.AvroParser.readMap(this._headerStream, AvroParser_js_1.AvroParser.readString, { + abortSignal: options.abortSignal }); - } - /** - * To change the ID of the lease. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-blob - * - * @param proposedLeaseId - the proposed new lease Id. - * @param options - option to configure lease management operations. - * @returns Response data for change lease operation. - */ - async changeLease(proposedLeaseId2, options = {}) { - var _a, _b, _c, _d, _e; - if (this._isContainer && (((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + const codec = this._metadata[AvroConstants_js_1.AVRO_CODEC_KEY]; + if (!(codec === void 0 || codec === null || codec === "null")) { + throw new Error("Codecs are not supported"); } - return tracingClient.withSpan("BlobLeaseClient-changeLease", options, async (updatedOptions) => { - var _a2; - const response = assertResponse(await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId2, { - abortSignal: options.abortSignal, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a2 = options.conditions) === null || _a2 === void 0 ? void 0 : _a2.tagConditions }), - tracingOptions: updatedOptions.tracingOptions - })); - this._leaseId = proposedLeaseId2; - return response; + this._syncMarker = await AvroParser_js_1.AvroParser.readFixedBytes(this._headerStream, AvroConstants_js_1.AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal }); - } - /** - * To free the lease if it is no longer needed so that another client may - * immediately acquire a lease against the container or the blob. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-blob - * - * @param options - option to configure lease management operations. - * @returns Response data for release lease operation. - */ - async releaseLease(options = {}) { - var _a, _b, _c, _d, _e; - if (this._isContainer && (((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + const schema = JSON.parse(this._metadata[AvroConstants_js_1.AVRO_SCHEMA_KEY]); + this._itemType = AvroParser_js_1.AvroType.fromSchema(schema); + if (this._blockOffset === 0) { + this._blockOffset = this._initialBlockOffset + this._dataStream.position; } - return tracingClient.withSpan("BlobLeaseClient-releaseLease", options, async (updatedOptions) => { - var _a2; - return assertResponse(await this._containerOrBlobOperation.releaseLease(this._leaseId, { - abortSignal: options.abortSignal, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a2 = options.conditions) === null || _a2 === void 0 ? void 0 : _a2.tagConditions }), - tracingOptions: updatedOptions.tracingOptions - })); + this._itemsRemainingInBlock = await AvroParser_js_1.AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal }); + await AvroParser_js_1.AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + this._initialized = true; + if (this._objectIndex && this._objectIndex > 0) { + for (let i = 0; i < this._objectIndex; i++) { + await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); + this._itemsRemainingInBlock--; + } + } } - /** - * To renew the lease. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-blob - * - * @param options - Optional option to configure lease management operations. - * @returns Response data for renew lease operation. - */ - async renewLease(options = {}) { - var _a, _b, _c, _d, _e; - if (this._isContainer && (((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + hasNext() { + return !this._initialized || this._itemsRemainingInBlock > 0; + } + async *parseObjects(options = {}) { + if (!this._initialized) { + await this.initialize(options); } - return tracingClient.withSpan("BlobLeaseClient-renewLease", options, async (updatedOptions) => { - var _a2; - return this._containerOrBlobOperation.renewLease(this._leaseId, { - abortSignal: options.abortSignal, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a2 = options.conditions) === null || _a2 === void 0 ? void 0 : _a2.tagConditions }), - tracingOptions: updatedOptions.tracingOptions + while (this.hasNext()) { + const result2 = await this._itemType.read(this._dataStream, { + abortSignal: options.abortSignal }); - }); + this._itemsRemainingInBlock--; + this._objectIndex++; + if (this._itemsRemainingInBlock === 0) { + const marker = await AvroParser_js_1.AvroParser.readFixedBytes(this._dataStream, AvroConstants_js_1.AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal + }); + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + this._objectIndex = 0; + if (!(0, utils_common_js_1.arraysEqual)(this._syncMarker, marker)) { + throw new Error("Stream is not a valid Avro file."); + } + try { + this._itemsRemainingInBlock = await AvroParser_js_1.AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal + }); + } catch { + this._itemsRemainingInBlock = 0; + } + if (this._itemsRemainingInBlock > 0) { + await AvroParser_js_1.AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + } + } + yield result2; + } } - /** - * To end the lease but ensure that another client cannot acquire a new lease - * until the current lease period has expired. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-blob - * - * @param breakPeriod - Break period - * @param options - Optional options to configure lease management operations. - * @returns Response data for break lease operation. - */ - async breakLease(breakPeriod2, options = {}) { - var _a, _b, _c, _d, _e; - if (this._isContainer && (((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + }; + exports2.AvroReader = AvroReader; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReadable.js +var require_AvroReadable = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReadable.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AvroReadable = void 0; + var AvroReadable = class { + }; + exports2.AvroReadable = AvroReadable; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReadableFromStream.js +var require_AvroReadableFromStream = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/AvroReadableFromStream.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AvroReadableFromStream = void 0; + var AvroReadable_js_1 = require_AvroReadable(); + var abort_controller_1 = require_commonjs3(); + var buffer_1 = require("buffer"); + var ABORT_ERROR = new abort_controller_1.AbortError("Reading from the avro stream was aborted."); + var AvroReadableFromStream = class extends AvroReadable_js_1.AvroReadable { + _position; + _readable; + toUint8Array(data) { + if (typeof data === "string") { + return buffer_1.Buffer.from(data); + } + return data; + } + constructor(readable) { + super(); + this._readable = readable; + this._position = 0; + } + get position() { + return this._position; + } + async read(size, options = {}) { + if (options.abortSignal?.aborted) { + throw ABORT_ERROR; + } + if (size < 0) { + throw new Error(`size parameter should be positive: ${size}`); + } + if (size === 0) { + return new Uint8Array(); + } + if (!this._readable.readable) { + throw new Error("Stream no longer readable."); + } + const chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + return this.toUint8Array(chunk); + } else { + return new Promise((resolve, reject) => { + const cleanUp = () => { + this._readable.removeListener("readable", readableCallback); + this._readable.removeListener("error", rejectCallback); + this._readable.removeListener("end", rejectCallback); + this._readable.removeListener("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + }; + const readableCallback = () => { + const callbackChunk = this._readable.read(size); + if (callbackChunk) { + this._position += callbackChunk.length; + cleanUp(); + resolve(this.toUint8Array(callbackChunk)); + } + }; + const rejectCallback = () => { + cleanUp(); + reject(); + }; + const abortHandler = () => { + cleanUp(); + reject(ABORT_ERROR); + }; + this._readable.on("readable", readableCallback); + this._readable.once("error", rejectCallback); + this._readable.once("end", rejectCallback); + this._readable.once("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + }); } - return tracingClient.withSpan("BlobLeaseClient-breakLease", options, async (updatedOptions) => { - var _a2; - const operationOptions = { - abortSignal: options.abortSignal, - breakPeriod: breakPeriod2, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a2 = options.conditions) === null || _a2 === void 0 ? void 0 : _a2.tagConditions }), - tracingOptions: updatedOptions.tracingOptions - }; - return assertResponse(await this._containerOrBlobOperation.breakLease(operationOptions)); - }); } }; - var RetriableReadableStream = class extends stream.Readable { + exports2.AvroReadableFromStream = AvroReadableFromStream; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/index.js +var require_internal_avro = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/internal-avro/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AvroReadableFromStream = exports2.AvroReadable = exports2.AvroReader = void 0; + var AvroReader_js_1 = require_AvroReader(); + Object.defineProperty(exports2, "AvroReader", { enumerable: true, get: function() { + return AvroReader_js_1.AvroReader; + } }); + var AvroReadable_js_1 = require_AvroReadable(); + Object.defineProperty(exports2, "AvroReadable", { enumerable: true, get: function() { + return AvroReadable_js_1.AvroReadable; + } }); + var AvroReadableFromStream_js_1 = require_AvroReadableFromStream(); + Object.defineProperty(exports2, "AvroReadableFromStream", { enumerable: true, get: function() { + return AvroReadableFromStream_js_1.AvroReadableFromStream; + } }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/BlobQuickQueryStream.js +var require_BlobQuickQueryStream = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/BlobQuickQueryStream.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobQuickQueryStream = void 0; + var node_stream_1 = require("node:stream"); + var index_js_1 = require_internal_avro(); + var BlobQuickQueryStream = class extends node_stream_1.Readable { + source; + avroReader; + avroIter; + avroPaused = true; + onProgress; + onError; /** - * Creates an instance of RetriableReadableStream. + * Creates an instance of BlobQuickQueryStream. * * @param source - The current ReadableStream returned from getter - * @param getter - A method calling downloading request returning - * a new ReadableStream from specified offset - * @param offset - Offset position in original data source to read - * @param count - How much data in original data source to read * @param options - */ - constructor(source, getter, offset, count, options = {}) { - super({ highWaterMark: options.highWaterMark }); - this.retries = 0; - this.sourceDataHandler = (data) => { - if (this.options.doInjectErrorOnce) { - this.options.doInjectErrorOnce = void 0; - this.source.pause(); - this.sourceErrorOrEndHandler(); - this.source.destroy(); - return; - } - this.offset += data.length; - if (this.onProgress) { - this.onProgress({ loadedBytes: this.offset - this.start }); - } - if (!this.push(data)) { - this.source.pause(); - } - }; - this.sourceAbortedHandler = () => { - const abortError = new abortController.AbortError("The operation was aborted."); - this.destroy(abortError); - }; - this.sourceErrorOrEndHandler = (err) => { - if (err && err.name === "AbortError") { - this.destroy(err); - return; - } - this.removeSourceEventHandlers(); - if (this.offset - 1 === this.end) { - this.push(null); - } else if (this.offset <= this.end) { - if (this.retries < this.maxRetryRequests) { - this.retries += 1; - this.getter(this.offset).then((newSource) => { - this.source = newSource; - this.setSourceEventHandlers(); - return; - }).catch((error2) => { - this.destroy(error2); - }); - } else { - this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); - } - } else { - this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); - } - }; - this.getter = getter; + constructor(source, options = {}) { + super(); this.source = source; - this.start = offset; - this.offset = offset; - this.end = offset + count - 1; - this.maxRetryRequests = options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; this.onProgress = options.onProgress; - this.options = options; - this.setSourceEventHandlers(); - } - _read() { - this.source.resume(); - } - setSourceEventHandlers() { - this.source.on("data", this.sourceDataHandler); - this.source.on("end", this.sourceErrorOrEndHandler); - this.source.on("error", this.sourceErrorOrEndHandler); - this.source.on("aborted", this.sourceAbortedHandler); - } - removeSourceEventHandlers() { - this.source.removeListener("data", this.sourceDataHandler); - this.source.removeListener("end", this.sourceErrorOrEndHandler); - this.source.removeListener("error", this.sourceErrorOrEndHandler); - this.source.removeListener("aborted", this.sourceAbortedHandler); + this.onError = options.onError; + this.avroReader = new index_js_1.AvroReader(new index_js_1.AvroReadableFromStream(this.source)); + this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); } - _destroy(error2, callback) { - this.removeSourceEventHandlers(); - this.source.destroy(); - callback(error2 === null ? void 0 : error2); + _read() { + if (this.avroPaused) { + this.readInternal().catch((err) => { + this.emit("error", err); + }); + } + } + async readInternal() { + this.avroPaused = false; + let avroNext; + do { + avroNext = await this.avroIter.next(); + if (avroNext.done) { + break; + } + const obj = avroNext.value; + const schema = obj.$schema; + if (typeof schema !== "string") { + throw Error("Missing schema in avro record."); + } + switch (schema) { + case "com.microsoft.azure.storage.queryBlobContents.resultData": + { + const data = obj.data; + if (data instanceof Uint8Array === false) { + throw Error("Invalid data in avro result record."); + } + if (!this.push(Buffer.from(data))) { + this.avroPaused = true; + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.progress": + { + const bytesScanned = obj.bytesScanned; + if (typeof bytesScanned !== "number") { + throw Error("Invalid bytesScanned in avro progress record."); + } + if (this.onProgress) { + this.onProgress({ loadedBytes: bytesScanned }); + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.end": + if (this.onProgress) { + const totalBytes = obj.totalBytes; + if (typeof totalBytes !== "number") { + throw Error("Invalid totalBytes in avro end record."); + } + this.onProgress({ loadedBytes: totalBytes }); + } + this.push(null); + break; + case "com.microsoft.azure.storage.queryBlobContents.error": + if (this.onError) { + const fatal = obj.fatal; + if (typeof fatal !== "boolean") { + throw Error("Invalid fatal in avro error record."); + } + const name = obj.name; + if (typeof name !== "string") { + throw Error("Invalid name in avro error record."); + } + const description = obj.description; + if (typeof description !== "string") { + throw Error("Invalid description in avro error record."); + } + const position = obj.position; + if (typeof position !== "number") { + throw Error("Invalid position in avro error record."); + } + this.onError({ + position, + name, + isFatal: fatal, + description + }); + } + break; + default: + throw Error(`Unknown schema ${schema} in avro progress record.`); + } + } while (!avroNext.done && !this.avroPaused); } }; - var BlobDownloadResponse = class { + exports2.BlobQuickQueryStream = BlobQuickQueryStream; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BlobQueryResponse.js +var require_BlobQueryResponse = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BlobQueryResponse.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobQueryResponse = void 0; + var core_util_1 = require_commonjs4(); + var BlobQuickQueryStream_js_1 = require_BlobQuickQueryStream(); + var BlobQueryResponse = class { /** * Indicates that the service supports * requests for partial file content. @@ -50900,7 +54162,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @readonly */ get copyCompletedOn() { - return this.originalResponse.copyCompletedOn; + return void 0; } /** * String identifier for the last attempted Copy @@ -51007,14 +54269,6 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; get etag() { return this.originalResponse.etag; } - /** - * The number of tags associated with the blob - * - * @readonly - */ - get tagCount() { - return this.originalResponse.tagCount; - } /** * The error code. * @@ -51057,23 +54311,6 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; get lastModified() { return this.originalResponse.lastModified; } - /** - * Returns the UTC date and time generated by the service that indicates the time at which the blob was - * last read or written to. - * - * @readonly - */ - get lastAccessed() { - return this.originalResponse.lastAccessed; - } - /** - * Returns the date and time the blob was created. - * - * @readonly - */ - get createdOn() { - return this.originalResponse.createdOn; - } /** * A name-value pair * to associate with a file storage object. @@ -51102,7 +54339,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return this.originalResponse.clientRequestId; } /** - * Indicates the version of the Blob service used + * Indicates the version of the File service used * to execute the request. * * @readonly @@ -51110,22 +54347,6 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; get version() { return this.originalResponse.version; } - /** - * Indicates the versionId of the downloaded blob version. - * - * @readonly - */ - get versionId() { - return this.originalResponse.versionId; - } - /** - * Indicates whether version of this blob is a current version. - * - * @readonly - */ - get isCurrentVersion() { - return this.originalResponse.isCurrentVersion; - } /** * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned * when the blob was encrypted with a customer-provided key. @@ -51144,73 +54365,25 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; get contentCrc64() { return this.originalResponse.contentCrc64; } - /** - * Object Replication Policy Id of the destination blob. - * - * @readonly - */ - get objectReplicationDestinationPolicyId() { - return this.originalResponse.objectReplicationDestinationPolicyId; - } - /** - * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. - * - * @readonly - */ - get objectReplicationSourceProperties() { - return this.originalResponse.objectReplicationSourceProperties; - } - /** - * If this blob has been sealed. - * - * @readonly - */ - get isSealed() { - return this.originalResponse.isSealed; - } - /** - * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. - * - * @readonly - */ - get immutabilityPolicyExpiresOn() { - return this.originalResponse.immutabilityPolicyExpiresOn; - } - /** - * Indicates immutability policy mode. - * - * @readonly - */ - get immutabilityPolicyMode() { - return this.originalResponse.immutabilityPolicyMode; - } - /** - * Indicates if a legal hold is present on the blob. - * - * @readonly - */ - get legalHold() { - return this.originalResponse.legalHold; - } /** * The response body as a browser Blob. * Always undefined in node.js. * * @readonly */ - get contentAsBlob() { - return this.originalResponse.blobBody; + get blobBody() { + return void 0; } /** * The response body as a node.js Readable stream. * Always undefined in the browser. * - * It will automatically retry when internal read stream unexpected ends. + * It will parse avor data returned by blob query. * * @readonly */ get readableStreamBody() { - return coreUtil.isNode ? this.blobDownloadStream : void 0; + return core_util_1.isNodeLike ? this.blobDownloadStream : void 0; } /** * The HTTP response. @@ -51218,1032 +54391,1289 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; get _response() { return this.originalResponse._response; } + originalResponse; + blobDownloadStream; /** - * Creates an instance of BlobDownloadResponse. + * Creates an instance of BlobQueryResponse. * * @param originalResponse - - * @param getter - - * @param offset - - * @param count - * @param options - */ - constructor(originalResponse, getter, offset, count, options = {}) { + constructor(originalResponse, options = {}) { this.originalResponse = originalResponse; - this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); + this.blobDownloadStream = new BlobQuickQueryStream_js_1.BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); } }; - var AVRO_SYNC_MARKER_SIZE = 16; - var AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); - var AVRO_CODEC_KEY = "avro.codec"; - var AVRO_SCHEMA_KEY = "avro.schema"; - var AvroParser = class _AvroParser { - /** - * Reads a fixed number of bytes from the stream. - * - * @param stream - - * @param length - - * @param options - - */ - static async readFixedBytes(stream2, length, options = {}) { - const bytes = await stream2.read(length, { abortSignal: options.abortSignal }); - if (bytes.length !== length) { - throw new Error("Hit stream end."); - } - return bytes; + exports2.BlobQueryResponse = BlobQueryResponse; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/models.js +var require_models2 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/models.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.StorageBlobAudience = exports2.PremiumPageBlobTier = exports2.BlockBlobTier = void 0; + exports2.toAccessTier = toAccessTier; + exports2.ensureCpkIfSpecified = ensureCpkIfSpecified; + exports2.getBlobServiceAccountAudience = getBlobServiceAccountAudience; + var constants_js_1 = require_constants9(); + var BlockBlobTier; + (function(BlockBlobTier2) { + BlockBlobTier2["Hot"] = "Hot"; + BlockBlobTier2["Cool"] = "Cool"; + BlockBlobTier2["Cold"] = "Cold"; + BlockBlobTier2["Archive"] = "Archive"; + })(BlockBlobTier || (exports2.BlockBlobTier = BlockBlobTier = {})); + var PremiumPageBlobTier; + (function(PremiumPageBlobTier2) { + PremiumPageBlobTier2["P4"] = "P4"; + PremiumPageBlobTier2["P6"] = "P6"; + PremiumPageBlobTier2["P10"] = "P10"; + PremiumPageBlobTier2["P15"] = "P15"; + PremiumPageBlobTier2["P20"] = "P20"; + PremiumPageBlobTier2["P30"] = "P30"; + PremiumPageBlobTier2["P40"] = "P40"; + PremiumPageBlobTier2["P50"] = "P50"; + PremiumPageBlobTier2["P60"] = "P60"; + PremiumPageBlobTier2["P70"] = "P70"; + PremiumPageBlobTier2["P80"] = "P80"; + })(PremiumPageBlobTier || (exports2.PremiumPageBlobTier = PremiumPageBlobTier = {})); + function toAccessTier(tier) { + if (tier === void 0) { + return void 0; } - /** - * Reads a single byte from the stream. - * - * @param stream - - * @param options - - */ - static async readByte(stream2, options = {}) { - const buf = await _AvroParser.readFixedBytes(stream2, 1, options); - return buf[0]; + return tier; + } + function ensureCpkIfSpecified(cpk, isHttps) { + if (cpk && !isHttps) { + throw new RangeError("Customer-provided encryption key must be used over HTTPS."); } - // int and long are stored in variable-length zig-zag coding. - // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt - // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types - static async readZigZagLong(stream2, options = {}) { - let zigZagEncoded = 0; - let significanceInBit = 0; - let byte, haveMoreByte, significanceInFloat; - do { - byte = await _AvroParser.readByte(stream2, options); - haveMoreByte = byte & 128; - zigZagEncoded |= (byte & 127) << significanceInBit; - significanceInBit += 7; - } while (haveMoreByte && significanceInBit < 28); - if (haveMoreByte) { - zigZagEncoded = zigZagEncoded; - significanceInFloat = 268435456; - do { - byte = await _AvroParser.readByte(stream2, options); - zigZagEncoded += (byte & 127) * significanceInFloat; - significanceInFloat *= 128; - } while (byte & 128); - const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; - if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { - throw new Error("Integer overflow."); + if (cpk && !cpk.encryptionAlgorithm) { + cpk.encryptionAlgorithm = constants_js_1.EncryptionAlgorithmAES25; + } + } + var StorageBlobAudience; + (function(StorageBlobAudience2) { + StorageBlobAudience2["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + StorageBlobAudience2["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; + })(StorageBlobAudience || (exports2.StorageBlobAudience = StorageBlobAudience = {})); + function getBlobServiceAccountAudience(storageAccountName) { + return `https://${storageAccountName}.blob.core.windows.net/.default`; + } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/PageBlobRangeResponse.js +var require_PageBlobRangeResponse = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/PageBlobRangeResponse.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.rangeResponseFromModel = rangeResponseFromModel; + function rangeResponseFromModel(response) { + const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start + })); + const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start + })); + return { + ...response, + pageRange, + clearRange, + _response: { + ...response._response, + parsedBody: { + pageRange, + clearRange } - return res; } - return zigZagEncoded >> 1 ^ -(zigZagEncoded & 1); - } - static async readLong(stream2, options = {}) { - return _AvroParser.readZigZagLong(stream2, options); + }; + } + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/logger.js +var require_logger2 = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/logger.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.logger = void 0; + var logger_1 = require_commonjs2(); + exports2.logger = (0, logger_1.createClientLogger)("core-lro"); + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/poller/constants.js +var require_constants11 = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/poller/constants.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.terminalStates = exports2.POLL_INTERVAL_IN_MS = void 0; + exports2.POLL_INTERVAL_IN_MS = 2e3; + exports2.terminalStates = ["succeeded", "canceled", "failed"]; + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/poller/operation.js +var require_operation = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/poller/operation.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.pollOperation = exports2.initOperation = exports2.deserializeState = void 0; + var logger_js_1 = require_logger2(); + var constants_js_1 = require_constants11(); + function deserializeState(serializedState) { + try { + return JSON.parse(serializedState).state; + } catch (e) { + throw new Error(`Unable to deserialize input state: ${serializedState}`); } - static async readInt(stream2, options = {}) { - return _AvroParser.readZigZagLong(stream2, options); + } + exports2.deserializeState = deserializeState; + function setStateError(inputs) { + const { state, stateProxy, isOperationError } = inputs; + return (error2) => { + if (isOperationError(error2)) { + stateProxy.setError(state, error2); + stateProxy.setFailed(state); + } + throw error2; + }; + } + function appendReadableErrorMessage(currentMessage, innerMessage) { + let message = currentMessage; + if (message.slice(-1) !== ".") { + message = message + "."; } - static async readNull() { - return null; + return message + " " + innerMessage; + } + function simplifyError(err) { + let message = err.message; + let code = err.code; + let curErr = err; + while (curErr.innererror) { + curErr = curErr.innererror; + code = curErr.code; + message = appendReadableErrorMessage(message, curErr.message); } - static async readBoolean(stream2, options = {}) { - const b = await _AvroParser.readByte(stream2, options); - if (b === 1) { - return true; - } else if (b === 0) { - return false; - } else { - throw new Error("Byte was not a boolean."); + return { + code, + message + }; + } + function processOperationStatus(result2) { + const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } = result2; + switch (status) { + case "succeeded": { + stateProxy.setSucceeded(state); + break; + } + case "failed": { + const err = getError === null || getError === void 0 ? void 0 : getError(response); + let postfix = ""; + if (err) { + const { code, message } = simplifyError(err); + postfix = `. ${code}. ${message}`; + } + const errStr = `The long-running operation has failed${postfix}`; + stateProxy.setError(state, new Error(errStr)); + stateProxy.setFailed(state); + logger_js_1.logger.warning(errStr); + break; + } + case "canceled": { + stateProxy.setCanceled(state); + break; } } - static async readFloat(stream2, options = {}) { - const u8arr = await _AvroParser.readFixedBytes(stream2, 4, options); - const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); - return view.getFloat32(0, true); + if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || isDone === void 0 && ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status)) { + stateProxy.setResult(state, buildResult({ + response, + state, + processResult + })); } - static async readDouble(stream2, options = {}) { - const u8arr = await _AvroParser.readFixedBytes(stream2, 8, options); - const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); - return view.getFloat64(0, true); + } + function buildResult(inputs) { + const { processResult, response, state } = inputs; + return processResult ? processResult(response, state) : response; + } + async function initOperation(inputs) { + const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult } = inputs; + const { operationLocation, resourceLocation, metadata, response } = await init(); + if (operationLocation) + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); + const config = { + metadata, + operationLocation, + resourceLocation + }; + logger_js_1.logger.verbose(`LRO: Operation description:`, config); + const state = stateProxy.initState(config); + const status = getOperationStatus({ response, state, operationLocation }); + processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); + return state; + } + exports2.initOperation = initOperation; + async function pollOperationHelper(inputs) { + const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, isOperationError, options } = inputs; + const response = await poll(operationLocation, options).catch(setStateError({ + state, + stateProxy, + isOperationError + })); + const status = getOperationStatus(response, state); + logger_js_1.logger.verbose(`LRO: Status: + Polling from: ${state.config.operationLocation} + Operation status: ${status} + Polling status: ${constants_js_1.terminalStates.includes(status) ? "Stopped" : "Running"}`); + if (status === "succeeded") { + const resourceLocation = getResourceLocation(response, state); + if (resourceLocation !== void 0) { + return { + response: await poll(resourceLocation).catch(setStateError({ state, stateProxy, isOperationError })), + status + }; + } } - static async readBytes(stream2, options = {}) { - const size = await _AvroParser.readLong(stream2, options); - if (size < 0) { - throw new Error("Bytes size was negative."); + return { response, status }; + } + async function pollOperation(inputs) { + const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, processResult, getError, updateState, setDelay, isDone, setErrorAsResult } = inputs; + const { operationLocation } = state.config; + if (operationLocation !== void 0) { + const { response, status } = await pollOperationHelper({ + poll, + getOperationStatus, + state, + stateProxy, + operationLocation, + getResourceLocation, + isOperationError, + options + }); + processOperationStatus({ + status, + response, + state, + stateProxy, + isDone, + processResult, + getError, + setErrorAsResult + }); + if (!constants_js_1.terminalStates.includes(status)) { + const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); + if (intervalInMs) + setDelay(intervalInMs); + const location = getOperationLocation === null || getOperationLocation === void 0 ? void 0 : getOperationLocation(response, state); + if (location !== void 0) { + const isUpdated = operationLocation !== location; + state.config.operationLocation = location; + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(location, isUpdated); + } else + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); + } + updateState === null || updateState === void 0 ? void 0 : updateState(state, response); + } + } + exports2.pollOperation = pollOperation; + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/http/operation.js +var require_operation2 = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/http/operation.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.pollHttpOperation = exports2.isOperationError = exports2.getResourceLocation = exports2.getOperationStatus = exports2.getOperationLocation = exports2.initHttpOperation = exports2.getStatusFromInitialResponse = exports2.getErrorFromResponse = exports2.parseRetryAfter = exports2.inferLroMode = void 0; + var operation_js_1 = require_operation(); + var logger_js_1 = require_logger2(); + function getOperationLocationPollingUrl(inputs) { + const { azureAsyncOperation, operationLocation } = inputs; + return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation; + } + function getLocationHeader(rawResponse) { + return rawResponse.headers["location"]; + } + function getOperationLocationHeader(rawResponse) { + return rawResponse.headers["operation-location"]; + } + function getAzureAsyncOperationHeader(rawResponse) { + return rawResponse.headers["azure-asyncoperation"]; + } + function findResourceLocation(inputs) { + var _a; + const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "DELETE": { + return void 0; + } + case "PATCH": { + return (_a = getDefault()) !== null && _a !== void 0 ? _a : requestPath; } - return stream2.read(size, { abortSignal: options.abortSignal }); - } - static async readString(stream2, options = {}) { - const u8arr = await _AvroParser.readBytes(stream2, options); - const utf8decoder = new TextDecoder(); - return utf8decoder.decode(u8arr); - } - static async readMapPair(stream2, readItemMethod, options = {}) { - const key = await _AvroParser.readString(stream2, options); - const value = await readItemMethod(stream2, options); - return { key, value }; - } - static async readMap(stream2, readItemMethod, options = {}) { - const readPairMethod = (s, opts = {}) => { - return _AvroParser.readMapPair(s, readItemMethod, opts); - }; - const pairs = await _AvroParser.readArray(stream2, readPairMethod, options); - const dict = {}; - for (const pair of pairs) { - dict[pair.key] = pair.value; + default: { + return getDefault(); } - return dict; } - static async readArray(stream2, readItemMethod, options = {}) { - const items = []; - for (let count = await _AvroParser.readLong(stream2, options); count !== 0; count = await _AvroParser.readLong(stream2, options)) { - if (count < 0) { - await _AvroParser.readLong(stream2, options); - count = -count; + function getDefault() { + switch (resourceLocationConfig) { + case "azure-async-operation": { + return void 0; } - while (count--) { - const item = await readItemMethod(stream2, options); - items.push(item); + case "original-uri": { + return requestPath; + } + case "location": + default: { + return location; } } - return items; - } - }; - var AvroComplex; - (function(AvroComplex2) { - AvroComplex2["RECORD"] = "record"; - AvroComplex2["ENUM"] = "enum"; - AvroComplex2["ARRAY"] = "array"; - AvroComplex2["MAP"] = "map"; - AvroComplex2["UNION"] = "union"; - AvroComplex2["FIXED"] = "fixed"; - })(AvroComplex || (AvroComplex = {})); - var AvroPrimitive; - (function(AvroPrimitive2) { - AvroPrimitive2["NULL"] = "null"; - AvroPrimitive2["BOOLEAN"] = "boolean"; - AvroPrimitive2["INT"] = "int"; - AvroPrimitive2["LONG"] = "long"; - AvroPrimitive2["FLOAT"] = "float"; - AvroPrimitive2["DOUBLE"] = "double"; - AvroPrimitive2["BYTES"] = "bytes"; - AvroPrimitive2["STRING"] = "string"; - })(AvroPrimitive || (AvroPrimitive = {})); - var AvroType = class _AvroType { - /** - * Determines the AvroType from the Avro Schema. - */ - // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - static fromSchema(schema) { - if (typeof schema === "string") { - return _AvroType.fromStringSchema(schema); - } else if (Array.isArray(schema)) { - return _AvroType.fromArraySchema(schema); - } else { - return _AvroType.fromObjectSchema(schema); - } - } - static fromStringSchema(schema) { - switch (schema) { - case AvroPrimitive.NULL: - case AvroPrimitive.BOOLEAN: - case AvroPrimitive.INT: - case AvroPrimitive.LONG: - case AvroPrimitive.FLOAT: - case AvroPrimitive.DOUBLE: - case AvroPrimitive.BYTES: - case AvroPrimitive.STRING: - return new AvroPrimitiveType(schema); - default: - throw new Error(`Unexpected Avro type ${schema}`); - } - } - static fromArraySchema(schema) { - return new AvroUnionType(schema.map(_AvroType.fromSchema)); } - static fromObjectSchema(schema) { - const type = schema.type; - try { - return _AvroType.fromStringSchema(type); - } catch (_a) { - } - switch (type) { - case AvroComplex.RECORD: - if (schema.aliases) { - throw new Error(`aliases currently is not supported, schema: ${schema}`); - } - if (!schema.name) { - throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); - } - const fields = {}; - if (!schema.fields) { - throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); - } - for (const field of schema.fields) { - fields[field.name] = _AvroType.fromSchema(field.type); - } - return new AvroRecordType(fields, schema.name); - case AvroComplex.ENUM: - if (schema.aliases) { - throw new Error(`aliases currently is not supported, schema: ${schema}`); - } - if (!schema.symbols) { - throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); - } - return new AvroEnumType(schema.symbols); - case AvroComplex.MAP: - if (!schema.values) { - throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); - } - return new AvroMapType(_AvroType.fromSchema(schema.values)); - case AvroComplex.ARRAY: - // Unused today - case AvroComplex.FIXED: - // Unused today - default: - throw new Error(`Unexpected Avro type ${type} in ${schema}`); - } + } + function inferLroMode(inputs) { + const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs; + const operationLocation = getOperationLocationHeader(rawResponse); + const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); + const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); + const location = getLocationHeader(rawResponse); + const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase(); + if (pollingUrl !== void 0) { + return { + mode: "OperationLocation", + operationLocation: pollingUrl, + resourceLocation: findResourceLocation({ + requestMethod: normalizedRequestMethod, + location, + requestPath, + resourceLocationConfig + }) + }; + } else if (location !== void 0) { + return { + mode: "ResourceLocation", + operationLocation: location + }; + } else if (normalizedRequestMethod === "PUT" && requestPath) { + return { + mode: "Body", + operationLocation: requestPath + }; + } else { + return void 0; } - }; - var AvroPrimitiveType = class extends AvroType { - constructor(primitive) { - super(); - this._primitive = primitive; + } + exports2.inferLroMode = inferLroMode; + function transformStatus(inputs) { + const { status, statusCode } = inputs; + if (typeof status !== "string" && status !== void 0) { + throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); } - // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - read(stream2, options = {}) { - switch (this._primitive) { - case AvroPrimitive.NULL: - return AvroParser.readNull(); - case AvroPrimitive.BOOLEAN: - return AvroParser.readBoolean(stream2, options); - case AvroPrimitive.INT: - return AvroParser.readInt(stream2, options); - case AvroPrimitive.LONG: - return AvroParser.readLong(stream2, options); - case AvroPrimitive.FLOAT: - return AvroParser.readFloat(stream2, options); - case AvroPrimitive.DOUBLE: - return AvroParser.readDouble(stream2, options); - case AvroPrimitive.BYTES: - return AvroParser.readBytes(stream2, options); - case AvroPrimitive.STRING: - return AvroParser.readString(stream2, options); - default: - throw new Error("Unknown Avro Primitive"); + switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) { + case void 0: + return toOperationStatus(statusCode); + case "succeeded": + return "succeeded"; + case "failed": + return "failed"; + case "running": + case "accepted": + case "started": + case "canceling": + case "cancelling": + return "running"; + case "canceled": + case "cancelled": + return "canceled"; + default: { + logger_js_1.logger.verbose(`LRO: unrecognized operation status: ${status}`); + return status; } } - }; - var AvroEnumType = class extends AvroType { - constructor(symbols) { - super(); - this._symbols = symbols; - } - // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - async read(stream2, options = {}) { - const value = await AvroParser.readInt(stream2, options); - return this._symbols[value]; - } - }; - var AvroUnionType = class extends AvroType { - constructor(types) { - super(); - this._types = types; + } + function getStatus(rawResponse) { + var _a; + const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + return transformStatus({ status, statusCode: rawResponse.statusCode }); + } + function getProvisioningState(rawResponse) { + var _a, _b; + const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const status = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; + return transformStatus({ status, statusCode: rawResponse.statusCode }); + } + function toOperationStatus(statusCode) { + if (statusCode === 202) { + return "running"; + } else if (statusCode < 300) { + return "succeeded"; + } else { + return "failed"; } - async read(stream2, options = {}) { - const typeIndex = await AvroParser.readInt(stream2, options); - return this._types[typeIndex].read(stream2, options); + } + function parseRetryAfter({ rawResponse }) { + const retryAfter = rawResponse.headers["retry-after"]; + if (retryAfter !== void 0) { + const retryAfterInSeconds = parseInt(retryAfter); + return isNaN(retryAfterInSeconds) ? calculatePollingIntervalFromDate(new Date(retryAfter)) : retryAfterInSeconds * 1e3; } - }; - var AvroMapType = class extends AvroType { - constructor(itemType) { - super(); - this._itemType = itemType; + return void 0; + } + exports2.parseRetryAfter = parseRetryAfter; + function getErrorFromResponse(response) { + const error2 = accessBodyProperty(response, "error"); + if (!error2) { + logger_js_1.logger.warning(`The long-running operation failed but there is no error property in the response's body`); + return; } - // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - read(stream2, options = {}) { - const readItemMethod = (s, opts) => { - return this._itemType.read(s, opts); - }; - return AvroParser.readMap(stream2, readItemMethod, options); + if (!error2.code || !error2.message) { + logger_js_1.logger.warning(`The long-running operation failed but the error property in the response's body doesn't contain code or message`); + return; } - }; - var AvroRecordType = class extends AvroType { - constructor(fields, name) { - super(); - this._fields = fields; - this._name = name; + return error2; + } + exports2.getErrorFromResponse = getErrorFromResponse; + function calculatePollingIntervalFromDate(retryAfterDate) { + const timeNow = Math.floor((/* @__PURE__ */ new Date()).getTime()); + const retryAfterTime = retryAfterDate.getTime(); + if (timeNow < retryAfterTime) { + return retryAfterTime - timeNow; } - // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types - async read(stream2, options = {}) { - const record = {}; - record["$schema"] = this._name; - for (const key in this._fields) { - if (Object.prototype.hasOwnProperty.call(this._fields, key)) { - record[key] = await this._fields[key].read(stream2, options); - } + return void 0; + } + function getStatusFromInitialResponse(inputs) { + const { response, state, operationLocation } = inputs; + function helper() { + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; + switch (mode) { + case void 0: + return toOperationStatus(response.rawResponse.statusCode); + case "Body": + return getOperationStatus(response, state); + default: + return "running"; } - return record; - } - }; - function arraysEqual(a, b) { - if (a === b) - return true; - if (a == null || b == null) - return false; - if (a.length !== b.length) - return false; - for (let i = 0; i < a.length; ++i) { - if (a[i] !== b[i]) - return false; } - return true; + const status = helper(); + return status === "running" && operationLocation === void 0 ? "succeeded" : status; } - var AvroReader = class { - get blockOffset() { - return this._blockOffset; - } - get objectIndex() { - return this._objectIndex; - } - constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { - this._dataStream = dataStream; - this._headerStream = headerStream || dataStream; - this._initialized = false; - this._blockOffset = currentBlockOffset || 0; - this._objectIndex = indexWithinCurrentBlock || 0; - this._initialBlockOffset = currentBlockOffset || 0; - } - async initialize(options = {}) { - const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { - abortSignal: options.abortSignal - }); - if (!arraysEqual(header, AVRO_INIT_BYTES)) { - throw new Error("Stream is not an Avro file."); + exports2.getStatusFromInitialResponse = getStatusFromInitialResponse; + async function initHttpOperation(inputs) { + const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; + return (0, operation_js_1.initOperation)({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = inferLroMode({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig + }); + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); + }, + stateProxy, + processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse, + getOperationStatus: getStatusFromInitialResponse, + setErrorAsResult + }); + } + exports2.initHttpOperation = initHttpOperation; + function getOperationLocation({ rawResponse }, state) { + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; + switch (mode) { + case "OperationLocation": { + return getOperationLocationPollingUrl({ + operationLocation: getOperationLocationHeader(rawResponse), + azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse) + }); } - this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { - abortSignal: options.abortSignal - }); - const codec = this._metadata[AVRO_CODEC_KEY]; - if (!(codec === void 0 || codec === null || codec === "null")) { - throw new Error("Codecs are not supported"); + case "ResourceLocation": { + return getLocationHeader(rawResponse); } - this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal - }); - const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); - this._itemType = AvroType.fromSchema(schema); - if (this._blockOffset === 0) { - this._blockOffset = this._initialBlockOffset + this._dataStream.position; + case "Body": + default: { + return void 0; } - this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal - }); - await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); - this._initialized = true; - if (this._objectIndex && this._objectIndex > 0) { - for (let i = 0; i < this._objectIndex; i++) { - await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); - this._itemsRemainingInBlock--; - } + } + } + exports2.getOperationLocation = getOperationLocation; + function getOperationStatus({ rawResponse }, state) { + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; + switch (mode) { + case "OperationLocation": { + return getStatus(rawResponse); + } + case "ResourceLocation": { + return toOperationStatus(rawResponse.statusCode); } + case "Body": { + return getProvisioningState(rawResponse); + } + default: + throw new Error(`Internal error: Unexpected operation mode: ${mode}`); } - hasNext() { - return !this._initialized || this._itemsRemainingInBlock > 0; + } + exports2.getOperationStatus = getOperationStatus; + function accessBodyProperty({ flatResponse, rawResponse }, prop) { + var _a, _b; + return (_a = flatResponse === null || flatResponse === void 0 ? void 0 : flatResponse[prop]) !== null && _a !== void 0 ? _a : (_b = rawResponse.body) === null || _b === void 0 ? void 0 : _b[prop]; + } + function getResourceLocation(res, state) { + const loc = accessBodyProperty(res, "resourceLocation"); + if (loc && typeof loc === "string") { + state.config.resourceLocation = loc; } - parseObjects() { - return tslib.__asyncGenerator(this, arguments, function* parseObjects_1(options = {}) { - if (!this._initialized) { - yield tslib.__await(this.initialize(options)); - } - while (this.hasNext()) { - const result2 = yield tslib.__await(this._itemType.read(this._dataStream, { - abortSignal: options.abortSignal - })); - this._itemsRemainingInBlock--; - this._objectIndex++; - if (this._itemsRemainingInBlock === 0) { - const marker2 = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal - })); - this._blockOffset = this._initialBlockOffset + this._dataStream.position; - this._objectIndex = 0; - if (!arraysEqual(this._syncMarker, marker2)) { - throw new Error("Stream is not a valid Avro file."); + return state.config.resourceLocation; + } + exports2.getResourceLocation = getResourceLocation; + function isOperationError(e) { + return e.name === "RestError"; + } + exports2.isOperationError = isOperationError; + async function pollHttpOperation(inputs) { + const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult } = inputs; + return (0, operation_js_1.pollOperation)({ + state, + stateProxy, + setDelay, + processResult: processResult ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) : ({ flatResponse }) => flatResponse, + getError: getErrorFromResponse, + updateState, + getPollingInterval: parseRetryAfter, + getOperationLocation, + getOperationStatus, + isOperationError, + getResourceLocation, + options, + /** + * The expansion here is intentional because `lro` could be an object that + * references an inner this, so we need to preserve a reference to it. + */ + poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), + setErrorAsResult + }); + } + exports2.pollHttpOperation = pollHttpOperation; + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/poller/poller.js +var require_poller = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/poller/poller.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.buildCreatePoller = void 0; + var operation_js_1 = require_operation(); + var constants_js_1 = require_constants11(); + var core_util_1 = require_commonjs4(); + var createStateProxy = () => ({ + /** + * The state at this point is created to be of type OperationState. + * It will be updated later to be of type TState when the + * customer-provided callback, `updateState`, is called during polling. + */ + initState: (config) => ({ status: "running", config }), + setCanceled: (state) => state.status = "canceled", + setError: (state, error2) => state.error = error2, + setResult: (state, result2) => state.result = result2, + setRunning: (state) => state.status = "running", + setSucceeded: (state) => state.status = "succeeded", + setFailed: (state) => state.status = "failed", + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => state.status === "canceled", + isFailed: (state) => state.status === "failed", + isRunning: (state) => state.status === "running", + isSucceeded: (state) => state.status === "succeeded" + }); + function buildCreatePoller(inputs) { + const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful } = inputs; + return async ({ init, poll }, options) => { + const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, restoreFrom } = options || {}; + const stateProxy = createStateProxy(); + const withOperationLocation = withOperationLocationCallback ? /* @__PURE__ */ (() => { + let called = false; + return (operationLocation, isUpdated) => { + if (isUpdated) + withOperationLocationCallback(operationLocation); + else if (!called) + withOperationLocationCallback(operationLocation); + called = true; + }; + })() : void 0; + const state = restoreFrom ? (0, operation_js_1.deserializeState)(restoreFrom) : await (0, operation_js_1.initOperation)({ + init, + stateProxy, + processResult, + getOperationStatus: getStatusFromInitialResponse, + withOperationLocation, + setErrorAsResult: !resolveOnUnsuccessful + }); + let resultPromise; + const abortController = new AbortController(); + const handlers = /* @__PURE__ */ new Map(); + const handleProgressEvents = async () => handlers.forEach((h) => h(state)); + const cancelErrMsg = "Operation was canceled"; + let currentPollIntervalInMs = intervalInMs; + const poller = { + getOperationState: () => state, + getResult: () => state.result, + isDone: () => ["succeeded", "failed", "canceled"].includes(state.status), + isStopped: () => resultPromise === void 0, + stopPolling: () => { + abortController.abort(); + }, + toString: () => JSON.stringify({ + state + }), + onProgress: (callback) => { + const s = Symbol(); + handlers.set(s, callback); + return () => handlers.delete(s); + }, + pollUntilDone: (pollOptions) => resultPromise !== null && resultPromise !== void 0 ? resultPromise : resultPromise = (async () => { + const { abortSignal: inputAbortSignal } = pollOptions || {}; + function abortListener() { + abortController.abort(); + } + const abortSignal = abortController.signal; + if (inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.aborted) { + abortController.abort(); + } else if (!abortSignal.aborted) { + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.addEventListener("abort", abortListener, { once: true }); + } + try { + if (!poller.isDone()) { + await poller.poll({ abortSignal }); + while (!poller.isDone()) { + await (0, core_util_1.delay)(currentPollIntervalInMs, { abortSignal }); + await poller.poll({ abortSignal }); + } } - try { - this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal - })); - } catch (_a) { - this._itemsRemainingInBlock = 0; + } finally { + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.removeEventListener("abort", abortListener); + } + if (resolveOnUnsuccessful) { + return poller.getResult(); + } else { + switch (state.status) { + case "succeeded": + return poller.getResult(); + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + case "notStarted": + case "running": + throw new Error(`Polling completed without succeeding or failing`); + } + } + })().finally(() => { + resultPromise = void 0; + }), + async poll(pollOptions) { + if (resolveOnUnsuccessful) { + if (poller.isDone()) + return; + } else { + switch (state.status) { + case "succeeded": + return; + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; } - if (this._itemsRemainingInBlock > 0) { - yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); + } + await (0, operation_js_1.pollOperation)({ + poll, + state, + stateProxy, + getOperationLocation, + isOperationError, + withOperationLocation, + getPollingInterval, + getOperationStatus: getStatusFromPollResponse, + getResourceLocation, + processResult, + getError, + updateState, + options: pollOptions, + setDelay: (pollIntervalInMs) => { + currentPollIntervalInMs = pollIntervalInMs; + }, + setErrorAsResult: !resolveOnUnsuccessful + }); + await handleProgressEvents(); + if (!resolveOnUnsuccessful) { + switch (state.status) { + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; } } - yield yield tslib.__await(result2); } - }); - } - }; - var AvroReadable = class { - }; - var ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); - var AvroReadableFromStream = class extends AvroReadable { - toUint8Array(data) { - if (typeof data === "string") { - return Buffer.from(data); - } - return data; - } - constructor(readable) { - super(); - this._readable = readable; - this._position = 0; + }; + return poller; + }; + } + exports2.buildCreatePoller = buildCreatePoller; + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/http/poller.js +var require_poller2 = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/http/poller.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createHttpPoller = void 0; + var operation_js_1 = require_operation2(); + var poller_js_1 = require_poller(); + async function createHttpPoller(lro, options) { + const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false } = options || {}; + return (0, poller_js_1.buildCreatePoller)({ + getStatusFromInitialResponse: operation_js_1.getStatusFromInitialResponse, + getStatusFromPollResponse: operation_js_1.getOperationStatus, + isOperationError: operation_js_1.isOperationError, + getOperationLocation: operation_js_1.getOperationLocation, + getResourceLocation: operation_js_1.getResourceLocation, + getPollingInterval: operation_js_1.parseRetryAfter, + getError: operation_js_1.getErrorFromResponse, + resolveOnUnsuccessful + })({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = (0, operation_js_1.inferLroMode)({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig + }); + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); + }, + poll: lro.sendPollRequest + }, { + intervalInMs, + withOperationLocation, + restoreFrom, + updateState, + processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse + }); + } + exports2.createHttpPoller = createHttpPoller; + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js +var require_operation3 = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.GenericPollOperation = void 0; + var operation_js_1 = require_operation2(); + var logger_js_1 = require_logger2(); + var createStateProxy = () => ({ + initState: (config) => ({ config, isStarted: true }), + setCanceled: (state) => state.isCancelled = true, + setError: (state, error2) => state.error = error2, + setResult: (state, result2) => state.result = result2, + setRunning: (state) => state.isStarted = true, + setSucceeded: (state) => state.isCompleted = true, + setFailed: () => { + }, + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => !!state.isCancelled, + isFailed: (state) => !!state.error, + isRunning: (state) => !!state.isStarted, + isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error) + }); + var GenericPollOperation = class { + constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { + this.state = state; + this.lro = lro; + this.setErrorAsResult = setErrorAsResult; + this.lroResourceLocationConfig = lroResourceLocationConfig; + this.processResult = processResult; + this.updateState = updateState; + this.isDone = isDone; } - get position() { - return this._position; + setPollerConfig(pollerConfig) { + this.pollerConfig = pollerConfig; } - async read(size, options = {}) { + async update(options) { var _a; - if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { - throw ABORT_ERROR; - } - if (size < 0) { - throw new Error(`size parameter should be positive: ${size}`); - } - if (size === 0) { - return new Uint8Array(); - } - if (!this._readable.readable) { - throw new Error("Stream no longer readable."); + const stateProxy = createStateProxy(); + if (!this.state.isStarted) { + this.state = Object.assign(Object.assign({}, this.state), await (0, operation_js_1.initHttpOperation)({ + lro: this.lro, + stateProxy, + resourceLocationConfig: this.lroResourceLocationConfig, + processResult: this.processResult, + setErrorAsResult: this.setErrorAsResult + })); } - const chunk = this._readable.read(size); - if (chunk) { - this._position += chunk.length; - return this.toUint8Array(chunk); - } else { - return new Promise((resolve, reject) => { - const cleanUp = () => { - this._readable.removeListener("readable", readableCallback); - this._readable.removeListener("error", rejectCallback); - this._readable.removeListener("end", rejectCallback); - this._readable.removeListener("close", rejectCallback); - if (options.abortSignal) { - options.abortSignal.removeEventListener("abort", abortHandler); - } - }; - const readableCallback = () => { - const callbackChunk = this._readable.read(size); - if (callbackChunk) { - this._position += callbackChunk.length; - cleanUp(); - resolve(this.toUint8Array(callbackChunk)); - } - }; - const rejectCallback = () => { - cleanUp(); - reject(); - }; - const abortHandler = () => { - cleanUp(); - reject(ABORT_ERROR); - }; - this._readable.on("readable", readableCallback); - this._readable.once("error", rejectCallback); - this._readable.once("end", rejectCallback); - this._readable.once("close", rejectCallback); - if (options.abortSignal) { - options.abortSignal.addEventListener("abort", abortHandler); - } + const updateState = this.updateState; + const isDone = this.isDone; + if (!this.state.isCompleted && this.state.error === void 0) { + await (0, operation_js_1.pollHttpOperation)({ + lro: this.lro, + state: this.state, + stateProxy, + processResult: this.processResult, + updateState: updateState ? (state, { rawResponse }) => updateState(state, rawResponse) : void 0, + isDone: isDone ? ({ flatResponse }, state) => isDone(flatResponse, state) : void 0, + options, + setDelay: (intervalInMs) => { + this.pollerConfig.intervalInMs = intervalInMs; + }, + setErrorAsResult: this.setErrorAsResult }); } + (_a = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a === void 0 ? void 0 : _a.call(options, this.state); + return this; + } + async cancel() { + logger_js_1.logger.error("`cancelOperation` is deprecated because it wasn't implemented"); + return this; } - }; - var BlobQuickQueryStream = class extends stream.Readable { /** - * Creates an instance of BlobQuickQueryStream. - * - * @param source - The current ReadableStream returned from getter - * @param options - + * Serializes the Poller operation. */ - constructor(source, options = {}) { - super(); - this.avroPaused = true; - this.source = source; - this.onProgress = options.onProgress; - this.onError = options.onError; - this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); - this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); - } - _read() { - if (this.avroPaused) { - this.readInternal().catch((err) => { - this.emit("error", err); - }); - } - } - async readInternal() { - this.avroPaused = false; - let avroNext; - do { - avroNext = await this.avroIter.next(); - if (avroNext.done) { - break; - } - const obj = avroNext.value; - const schema = obj.$schema; - if (typeof schema !== "string") { - throw Error("Missing schema in avro record."); - } - switch (schema) { - case "com.microsoft.azure.storage.queryBlobContents.resultData": - { - const data = obj.data; - if (data instanceof Uint8Array === false) { - throw Error("Invalid data in avro result record."); - } - if (!this.push(Buffer.from(data))) { - this.avroPaused = true; - } - } - break; - case "com.microsoft.azure.storage.queryBlobContents.progress": - { - const bytesScanned = obj.bytesScanned; - if (typeof bytesScanned !== "number") { - throw Error("Invalid bytesScanned in avro progress record."); - } - if (this.onProgress) { - this.onProgress({ loadedBytes: bytesScanned }); - } - } - break; - case "com.microsoft.azure.storage.queryBlobContents.end": - if (this.onProgress) { - const totalBytes = obj.totalBytes; - if (typeof totalBytes !== "number") { - throw Error("Invalid totalBytes in avro end record."); - } - this.onProgress({ loadedBytes: totalBytes }); - } - this.push(null); - break; - case "com.microsoft.azure.storage.queryBlobContents.error": - if (this.onError) { - const fatal = obj.fatal; - if (typeof fatal !== "boolean") { - throw Error("Invalid fatal in avro error record."); - } - const name = obj.name; - if (typeof name !== "string") { - throw Error("Invalid name in avro error record."); - } - const description = obj.description; - if (typeof description !== "string") { - throw Error("Invalid description in avro error record."); - } - const position = obj.position; - if (typeof position !== "number") { - throw Error("Invalid position in avro error record."); - } - this.onError({ - position, - name, - isFatal: fatal, - description - }); - } - break; - default: - throw Error(`Unknown schema ${schema} in avro progress record.`); - } - } while (!avroNext.done && !this.avroPaused); + toString() { + return JSON.stringify({ + state: this.state + }); } }; - var BlobQueryResponse = class { - /** - * Indicates that the service supports - * requests for partial file content. - * - * @readonly - */ - get acceptRanges() { - return this.originalResponse.acceptRanges; + exports2.GenericPollOperation = GenericPollOperation; + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js +var require_poller3 = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Poller = exports2.PollerCancelledError = exports2.PollerStoppedError = void 0; + var PollerStoppedError = class _PollerStoppedError extends Error { + constructor(message) { + super(message); + this.name = "PollerStoppedError"; + Object.setPrototypeOf(this, _PollerStoppedError.prototype); } - /** - * Returns if it was previously specified - * for the file. - * - * @readonly - */ - get cacheControl() { - return this.originalResponse.cacheControl; + }; + exports2.PollerStoppedError = PollerStoppedError; + var PollerCancelledError = class _PollerCancelledError extends Error { + constructor(message) { + super(message); + this.name = "PollerCancelledError"; + Object.setPrototypeOf(this, _PollerCancelledError.prototype); } + }; + exports2.PollerCancelledError = PollerCancelledError; + var Poller = class { /** - * Returns the value that was specified - * for the 'x-ms-content-disposition' header and specifies how to process the - * response. + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. * - * @readonly - */ - get contentDisposition() { - return this.originalResponse.contentDisposition; - } - /** - * Returns the value that was specified - * for the Content-Encoding request header. + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. * - * @readonly - */ - get contentEncoding() { - return this.originalResponse.contentEncoding; - } - /** - * Returns the value that was specified - * for the Content-Language request header. + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; * - * @readonly - */ - get contentLanguage() { - return this.originalResponse.contentLanguage; - } - /** - * The current sequence number for a - * page blob. This header is not returned for block blobs or append blobs. + * const operation = { + * state, + * update, + * cancel, + * toString + * } * - * @readonly - */ - get blobSequenceNumber() { - return this.originalResponse.blobSequenceNumber; - } - /** - * The blob's type. Possible values include: - * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * // Sending the operation to the parent's constructor. + * super(operation); * - * @readonly - */ - get blobType() { - return this.originalResponse.blobType; - } - /** - * The number of bytes present in the - * response body. + * // You can assign more local properties here. + * } + * } + * ``` * - * @readonly - */ - get contentLength() { - return this.originalResponse.contentLength; - } - /** - * If the file has an MD5 hash and the - * request is to read the full file, this response header is returned so that - * the client can check for message content integrity. If the request is to - * read a specified range and the 'x-ms-range-get-content-md5' is set to - * true, then the request returns an MD5 hash for the range, as long as the - * range size is less than or equal to 4 MB. If neither of these sets of - * conditions is true, then no value is returned for the 'Content-MD5' - * header. + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. * - * @readonly - */ - get contentMD5() { - return this.originalResponse.contentMD5; - } - /** - * Indicates the range of bytes returned if - * the client requested a subset of the file by setting the Range request - * header. + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: * - * @readonly - */ - get contentRange() { - return this.originalResponse.contentRange; - } - /** - * The content type specified for the file. - * The default content type is 'application/octet-stream' + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` * - * @readonly + * @param operation - Must contain the basic properties of `PollOperation`. */ - get contentType() { - return this.originalResponse.contentType; + constructor(operation) { + this.resolveOnUnsuccessful = false; + this.stopped = true; + this.pollProgressCallbacks = []; + this.operation = operation; + this.promise = new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; + }); + this.promise.catch(() => { + }); } /** - * Conclusion time of the last attempted - * Copy File operation where this file was the destination file. This value - * can specify the time of a completed, aborted, or failed copy attempt. - * - * @readonly + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. */ - get copyCompletedOn() { - return void 0; + async startPolling(pollOptions = {}) { + if (this.stopped) { + this.stopped = false; + } + while (!this.isStopped() && !this.isDone()) { + await this.poll(pollOptions); + await this.delay(); + } } /** - * String identifier for the last attempted Copy - * File operation where this file was the destination file. + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. * - * @readonly - */ - get copyId() { - return this.originalResponse.copyId; - } - /** - * Contains the number of bytes copied and - * the total bytes in the source in the last attempted Copy File operation - * where this file was the destination file. Can show between 0 and - * Content-Length bytes copied. + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. * - * @readonly + * @param options - Optional properties passed to the operation's update method. */ - get copyProgress() { - return this.originalResponse.copyProgress; + async pollOnce(options = {}) { + if (!this.isDone()) { + this.operation = await this.operation.update({ + abortSignal: options.abortSignal, + fireProgress: this.fireProgress.bind(this) + }); + } + this.processUpdatedState(); } /** - * URL up to 2KB in length that specifies the - * source file used in the last attempted Copy File operation where this file - * was the destination file. + * fireProgress calls the functions passed in via onProgress the method of the poller. * - * @readonly - */ - get copySource() { - return this.originalResponse.copySource; - } - /** - * State of the copy operation - * identified by 'x-ms-copy-id'. Possible values include: 'pending', - * 'success', 'aborted', 'failed' + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. * - * @readonly + * @param state - The current operation state. */ - get copyStatus() { - return this.originalResponse.copyStatus; + fireProgress(state) { + for (const callback of this.pollProgressCallbacks) { + callback(state); + } } /** - * Only appears when - * x-ms-copy-status is failed or pending. Describes cause of fatal or - * non-fatal copy operation failure. - * - * @readonly + * Invokes the underlying operation's cancel method. */ - get copyStatusDescription() { - return this.originalResponse.copyStatusDescription; + async cancelOnce(options = {}) { + this.operation = await this.operation.cancel(options); } /** - * When a blob is leased, - * specifies whether the lease is of infinite or fixed duration. Possible - * values include: 'infinite', 'fixed'. + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. * - * @readonly - */ - get leaseDuration() { - return this.originalResponse.leaseDuration; - } - /** - * Lease state of the blob. Possible - * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. * - * @readonly + * @param options - Optional properties passed to the operation's update method. */ - get leaseState() { - return this.originalResponse.leaseState; + poll(options = {}) { + if (!this.pollOncePromise) { + this.pollOncePromise = this.pollOnce(options); + const clearPollOncePromise = () => { + this.pollOncePromise = void 0; + }; + this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); + } + return this.pollOncePromise; } - /** - * The current lease status of the - * blob. Possible values include: 'locked', 'unlocked'. - * - * @readonly - */ - get leaseStatus() { - return this.originalResponse.leaseStatus; + processUpdatedState() { + if (this.operation.state.error) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + this.reject(this.operation.state.error); + throw this.operation.state.error; + } + } + if (this.operation.state.isCancelled) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + const error2 = new PollerCancelledError("Operation was canceled"); + this.reject(error2); + throw error2; + } + } + if (this.isDone() && this.resolve) { + this.resolve(this.getResult()); + } } /** - * A UTC date/time value generated by the service that - * indicates the time at which the response was initiated. - * - * @readonly + * Returns a promise that will resolve once the underlying operation is completed. */ - get date() { - return this.originalResponse.date; + async pollUntilDone(pollOptions = {}) { + if (this.stopped) { + this.startPolling(pollOptions).catch(this.reject); + } + this.processUpdatedState(); + return this.promise; } /** - * The number of committed blocks - * present in the blob. This header is returned only for append blobs. + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. * - * @readonly + * It returns a method that can be used to stop receiving updates on the given callback function. */ - get blobCommittedBlockCount() { - return this.originalResponse.blobCommittedBlockCount; + onProgress(callback) { + this.pollProgressCallbacks.push(callback); + return () => { + this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); + }; } /** - * The ETag contains a value that you can use to - * perform operations conditionally, in quotes. - * - * @readonly + * Returns true if the poller has finished polling. */ - get etag() { - return this.originalResponse.etag; + isDone() { + const state = this.operation.state; + return Boolean(state.isCompleted || state.isCancelled || state.error); } /** - * The error code. - * - * @readonly + * Stops the poller from continuing to poll. */ - get errorCode() { - return this.originalResponse.errorCode; + stopPolling() { + if (!this.stopped) { + this.stopped = true; + if (this.reject) { + this.reject(new PollerStoppedError("This poller is already stopped")); + } + } } /** - * The value of this header is set to - * true if the file data and application metadata are completely encrypted - * using the specified algorithm. Otherwise, the value is set to false (when - * the file is unencrypted, or if only parts of the file/application metadata - * are encrypted). - * - * @readonly + * Returns true if the poller is stopped. */ - get isServerEncrypted() { - return this.originalResponse.isServerEncrypted; + isStopped() { + return this.stopped; } /** - * If the blob has a MD5 hash, and if - * request contains range header (Range or x-ms-range), this response header - * is returned with the value of the whole blob's MD5 value. This value may - * or may not be equal to the value returned in Content-MD5 header, with the - * latter calculated from the requested range. + * Attempts to cancel the underlying operation. * - * @readonly - */ - get blobContentMD5() { - return this.originalResponse.blobContentMD5; - } - /** - * Returns the date and time the file was last - * modified. Any operation that modifies the file or its properties updates - * the last modified time. + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. * - * @readonly - */ - get lastModified() { - return this.originalResponse.lastModified; - } - /** - * A name-value pair - * to associate with a file storage object. + * If it's called again before it finishes, it will throw an error. * - * @readonly + * @param options - Optional properties passed to the operation's update method. */ - get metadata() { - return this.originalResponse.metadata; + cancelOperation(options = {}) { + if (!this.cancelPromise) { + this.cancelPromise = this.cancelOnce(options); + } else if (options.abortSignal) { + throw new Error("A cancel request is currently pending"); + } + return this.cancelPromise; } /** - * This header uniquely identifies the request - * that was made and can be used for troubleshooting the request. + * Returns the state of the operation. * - * @readonly - */ - get requestId() { - return this.originalResponse.requestId; - } - /** - * If a client request id header is sent in the request, this header will be present in the - * response with the same value. + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. * - * @readonly - */ - get clientRequestId() { - return this.originalResponse.clientRequestId; - } - /** - * Indicates the version of the File service used - * to execute the request. + * Example: * - * @readonly - */ - get version() { - return this.originalResponse.version; - } - /** - * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned - * when the blob was encrypted with a customer-provided key. + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } * - * @readonly - */ - get encryptionKeySha256() { - return this.originalResponse.encryptionKeySha256; - } - /** - * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to - * true, then the request returns a crc64 for the range, as long as the range size is less than - * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is - * specified in the same request, it will fail with 400(Bad Request) - */ - get contentCrc64() { - return this.originalResponse.contentCrc64; - } - /** - * The response body as a browser Blob. - * Always undefined in node.js. + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } * - * @readonly - */ - get blobBody() { - return void 0; - } - /** - * The response body as a node.js Readable stream. - * Always undefined in the browser. + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... * - * It will parse avor data returned by blob query. + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, * - * @readonly + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. */ - get readableStreamBody() { - return coreUtil.isNode ? this.blobDownloadStream : void 0; + getOperationState() { + return this.operation.state; } /** - * The HTTP response. + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. */ - get _response() { - return this.originalResponse._response; + getResult() { + const state = this.operation.state; + return state.result; } /** - * Creates an instance of BlobQueryResponse. - * - * @param originalResponse - - * @param options - + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. */ - constructor(originalResponse, options = {}) { - this.originalResponse = originalResponse; - this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); - } - }; - exports2.BlockBlobTier = void 0; - (function(BlockBlobTier) { - BlockBlobTier["Hot"] = "Hot"; - BlockBlobTier["Cool"] = "Cool"; - BlockBlobTier["Cold"] = "Cold"; - BlockBlobTier["Archive"] = "Archive"; - })(exports2.BlockBlobTier || (exports2.BlockBlobTier = {})); - exports2.PremiumPageBlobTier = void 0; - (function(PremiumPageBlobTier) { - PremiumPageBlobTier["P4"] = "P4"; - PremiumPageBlobTier["P6"] = "P6"; - PremiumPageBlobTier["P10"] = "P10"; - PremiumPageBlobTier["P15"] = "P15"; - PremiumPageBlobTier["P20"] = "P20"; - PremiumPageBlobTier["P30"] = "P30"; - PremiumPageBlobTier["P40"] = "P40"; - PremiumPageBlobTier["P50"] = "P50"; - PremiumPageBlobTier["P60"] = "P60"; - PremiumPageBlobTier["P70"] = "P70"; - PremiumPageBlobTier["P80"] = "P80"; - })(exports2.PremiumPageBlobTier || (exports2.PremiumPageBlobTier = {})); - function toAccessTier(tier2) { - if (tier2 === void 0) { - return void 0; + toString() { + return this.operation.toString(); } - return tier2; - } - function ensureCpkIfSpecified(cpk, isHttps) { - if (cpk && !isHttps) { - throw new RangeError("Customer-provided encryption key must be used over HTTPS."); + }; + exports2.Poller = Poller; + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js +var require_lroEngine = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.LroEngine = void 0; + var operation_js_1 = require_operation3(); + var constants_js_1 = require_constants11(); + var poller_js_1 = require_poller3(); + var operation_js_2 = require_operation(); + var LroEngine = class extends poller_js_1.Poller { + constructor(lro, options) { + const { intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState } = options || {}; + const state = resumeFrom ? (0, operation_js_2.deserializeState)(resumeFrom) : {}; + const operation = new operation_js_1.GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); + super(operation); + this.resolveOnUnsuccessful = resolveOnUnsuccessful; + this.config = { intervalInMs }; + operation.setPollerConfig(this.config); } - if (cpk && !cpk.encryptionAlgorithm) { - cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay() { + return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); } - } - exports2.StorageBlobAudience = void 0; - (function(StorageBlobAudience) { - StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; - StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; - })(exports2.StorageBlobAudience || (exports2.StorageBlobAudience = {})); - function getBlobServiceAccountAudience(storageAccountName) { - return `https://${storageAccountName}.blob.core.windows.net/.default`; - } - function rangeResponseFromModel(response) { - const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ - offset: x.start, - count: x.end - x.start - })); - const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ - offset: x.start, - count: x.end - x.start - })); - return Object.assign(Object.assign({}, response), { - pageRange, - clearRange, - _response: Object.assign(Object.assign({}, response._response), { parsedBody: { - pageRange, - clearRange - } }) - }); - } - var BlobBeginCopyFromUrlPoller = class extends coreLro.Poller { + }; + exports2.LroEngine = LroEngine; + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js +var require_lroEngine2 = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.LroEngine = void 0; + var lroEngine_js_1 = require_lroEngine(); + Object.defineProperty(exports2, "LroEngine", { enumerable: true, get: function() { + return lroEngine_js_1.LroEngine; + } }); + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js +var require_pollOperation = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../../node_modules/@azure/core-lro/dist/commonjs/index.js +var require_commonjs12 = __commonJS({ + "../../node_modules/@azure/core-lro/dist/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createHttpPoller = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var poller_js_1 = require_poller2(); + Object.defineProperty(exports2, "createHttpPoller", { enumerable: true, get: function() { + return poller_js_1.createHttpPoller; + } }); + tslib_1.__exportStar(require_lroEngine2(), exports2); + tslib_1.__exportStar(require_poller3(), exports2); + tslib_1.__exportStar(require_pollOperation(), exports2); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/pollers/BlobStartCopyFromUrlPoller.js +var require_BlobStartCopyFromUrlPoller = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/pollers/BlobStartCopyFromUrlPoller.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobBeginCopyFromUrlPoller = void 0; + var core_util_1 = require_commonjs4(); + var core_lro_1 = require_commonjs12(); + var BlobBeginCopyFromUrlPoller = class extends core_lro_1.Poller { + intervalInMs; constructor(options) { - const { blobClient, copySource: copySource2, intervalInMs = 15e3, onProgress, resumeFrom, startCopyFromURLOptions } = options; + const { blobClient, copySource, intervalInMs = 15e3, onProgress, resumeFrom, startCopyFromURLOptions } = options; let state; if (resumeFrom) { state = JSON.parse(resumeFrom).state; } - const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { + const operation = makeBlobBeginCopyFromURLPollOperation({ + ...state, blobClient, - copySource: copySource2, + copySource, startCopyFromURLOptions - })); + }); super(operation); if (typeof onProgress === "function") { this.onProgress(onProgress); @@ -52251,20 +55681,21 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; this.intervalInMs = intervalInMs; } delay() { - return coreUtil.delay(this.intervalInMs); + return (0, core_util_1.delay)(this.intervalInMs); } }; + exports2.BlobBeginCopyFromUrlPoller = BlobBeginCopyFromUrlPoller; var cancel = async function cancel2(options = {}) { const state = this.state; - const { copyId: copyId2 } = state; + const { copyId } = state; if (state.isCompleted) { return makeBlobBeginCopyFromURLPollOperation(state); } - if (!copyId2) { + if (!copyId) { state.isCancelled = true; return makeBlobBeginCopyFromURLPollOperation(state); } - await state.blobClient.abortCopyFromURL(copyId2, { + await state.blobClient.abortCopyFromURL(copyId, { abortSignal: options.abortSignal }); state.isCancelled = true; @@ -52272,10 +55703,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; }; var update = async function update2(options = {}) { const state = this.state; - const { blobClient, copySource: copySource2, startCopyFromURLOptions } = state; + const { blobClient, copySource, startCopyFromURLOptions } = state; if (!state.isStarted) { state.isStarted = true; - const result2 = await blobClient.startCopyFromURL(copySource2, startCopyFromURLOptions); + const result2 = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); state.copyId = result2.copyId; if (result2.copyStatus === "success") { state.result = result2; @@ -52315,12 +55746,21 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; }; function makeBlobBeginCopyFromURLPollOperation(state) { return { - state: Object.assign({}, state), + state: { ...state }, cancel, toString, update }; } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/Range.js +var require_Range = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/Range.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.rangeToString = rangeToString; function rangeToString(iRange) { if (iRange.offset < 0) { throw new RangeError(`Range.offset cannot be smaller than 0.`); @@ -52330,27 +55770,61 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } return iRange.count ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` : `bytes=${iRange.offset}-`; } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/Batch.js +var require_Batch = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/Batch.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Batch = void 0; + var events_1 = require("events"); var BatchStates; (function(BatchStates2) { BatchStates2[BatchStates2["Good"] = 0] = "Good"; BatchStates2[BatchStates2["Error"] = 1] = "Error"; })(BatchStates || (BatchStates = {})); var Batch = class { + /** + * Concurrency. Must be lager than 0. + */ + concurrency; + /** + * Number of active operations under execution. + */ + actives = 0; + /** + * Number of completed operations under execution. + */ + completed = 0; + /** + * Offset of next operation to be executed. + */ + offset = 0; + /** + * Operation array to be executed. + */ + operations = []; + /** + * States of Batch. When an error happens, state will turn into error. + * Batch will stop execute left operations. + */ + state = BatchStates.Good; + /** + * A private emitter used to pass events inside this class. + */ + emitter; /** * Creates an instance of Batch. * @param concurrency - */ constructor(concurrency = 5) { - this.actives = 0; - this.completed = 0; - this.offset = 0; - this.operations = []; - this.state = BatchStates.Good; if (concurrency < 1) { throw new RangeError("concurrency must be larger than 0"); } this.concurrency = concurrency; - this.emitter = new events.EventEmitter(); + this.emitter = new events_1.EventEmitter(); } /** * Add a operation into queue. @@ -52420,333 +55894,36 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BuffersStream = class extends stream.Readable { - /** - * Creates an instance of BuffersStream that will emit the data - * contained in the array of buffers. - * - * @param buffers - Array of buffers containing the data - * @param byteLength - The total length of data contained in the buffers - */ - constructor(buffers, byteLength, options) { - super(options); - this.buffers = buffers; - this.byteLength = byteLength; - this.byteOffsetInCurrentBuffer = 0; - this.bufferIndex = 0; - this.pushedBytesLength = 0; - let buffersLength = 0; - for (const buf of this.buffers) { - buffersLength += buf.byteLength; - } - if (buffersLength < this.byteLength) { - throw new Error("Data size shouldn't be larger than the total length of buffers."); - } - } - /** - * Internal _read() that will be called when the stream wants to pull more data in. - * - * @param size - Optional. The size of data to be read - */ - _read(size) { - if (this.pushedBytesLength >= this.byteLength) { - this.push(null); - } - if (!size) { - size = this.readableHighWaterMark; - } - const outBuffers = []; - let i = 0; - while (i < size && this.pushedBytesLength < this.byteLength) { - const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; - const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; - const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); - if (remaining > size - i) { - const end = this.byteOffsetInCurrentBuffer + size - i; - outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); - this.pushedBytesLength += size - i; - this.byteOffsetInCurrentBuffer = end; - i = size; - break; - } else { - const end = this.byteOffsetInCurrentBuffer + remaining; - outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); - if (remaining === remainingCapacityInThisBuffer) { - this.byteOffsetInCurrentBuffer = 0; - this.bufferIndex++; - } else { - this.byteOffsetInCurrentBuffer = end; - } - this.pushedBytesLength += remaining; - i += remaining; - } - } - if (outBuffers.length > 1) { - this.push(Buffer.concat(outBuffers)); - } else if (outBuffers.length === 1) { - this.push(outBuffers[0]); - } - } - }; - var maxBufferLength = buffer.constants.MAX_LENGTH; - var PooledBuffer = class { - /** - * The size of the data contained in the pooled buffers. - */ - get size() { - return this._size; - } - constructor(capacity, buffers, totalLength) { - this.buffers = []; - this.capacity = capacity; - this._size = 0; - const bufferNum = Math.ceil(capacity / maxBufferLength); - for (let i = 0; i < bufferNum; i++) { - let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; - if (len === 0) { - len = maxBufferLength; - } - this.buffers.push(Buffer.allocUnsafe(len)); - } - if (buffers) { - this.fill(buffers, totalLength); - } - } - /** - * Fill the internal buffers with data in the input buffers serially - * with respect to the total length and the total capacity of the internal buffers. - * Data copied will be shift out of the input buffers. - * - * @param buffers - Input buffers containing the data to be filled in the pooled buffer - * @param totalLength - Total length of the data to be filled in. - * - */ - fill(buffers, totalLength) { - this._size = Math.min(this.capacity, totalLength); - let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; - while (totalCopiedNum < this._size) { - const source = buffers[i]; - const target = this.buffers[j]; - const copiedNum = source.copy(target, targetOffset, sourceOffset); - totalCopiedNum += copiedNum; - sourceOffset += copiedNum; - targetOffset += copiedNum; - if (sourceOffset === source.length) { - i++; - sourceOffset = 0; - } - if (targetOffset === target.length) { - j++; - targetOffset = 0; - } - } - buffers.splice(0, i); - if (buffers.length > 0) { - buffers[0] = buffers[0].slice(sourceOffset); - } - } - /** - * Get the readable stream assembled from all the data in the internal buffers. - * - */ - getReadableStream() { - return new BuffersStream(this.buffers, this.size); - } - }; - var BufferScheduler = class { - /** - * Creates an instance of BufferScheduler. - * - * @param readable - A Node.js Readable stream - * @param bufferSize - Buffer size of every maintained buffer - * @param maxBuffers - How many buffers can be allocated - * @param outgoingHandler - An async function scheduled to be - * triggered when a buffer fully filled - * with stream data - * @param concurrency - Concurrency of executing outgoingHandlers (>0) - * @param encoding - [Optional] Encoding of Readable stream when it's a string stream - */ - constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { - this.emitter = new events.EventEmitter(); - this.offset = 0; - this.isStreamEnd = false; - this.isError = false; - this.executingOutgoingHandlers = 0; - this.numBuffers = 0; - this.unresolvedDataArray = []; - this.unresolvedLength = 0; - this.incoming = []; - this.outgoing = []; - if (bufferSize <= 0) { - throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); - } - if (maxBuffers <= 0) { - throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); - } - if (concurrency <= 0) { - throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); - } - this.bufferSize = bufferSize; - this.maxBuffers = maxBuffers; - this.readable = readable; - this.outgoingHandler = outgoingHandler; - this.concurrency = concurrency; - this.encoding = encoding; - } - /** - * Start the scheduler, will return error when stream of any of the outgoingHandlers - * returns error. - * - */ - async do() { - return new Promise((resolve, reject) => { - this.readable.on("data", (data) => { - data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; - this.appendUnresolvedData(data); - if (!this.resolveData()) { - this.readable.pause(); - } - }); - this.readable.on("error", (err) => { - this.emitter.emit("error", err); - }); - this.readable.on("end", () => { - this.isStreamEnd = true; - this.emitter.emit("checkEnd"); - }); - this.emitter.on("error", (err) => { - this.isError = true; - this.readable.pause(); - reject(err); - }); - this.emitter.on("checkEnd", () => { - if (this.outgoing.length > 0) { - this.triggerOutgoingHandlers(); - return; - } - if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { - if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { - const buffer2 = this.shiftBufferFromUnresolvedDataArray(); - this.outgoingHandler(() => buffer2.getReadableStream(), buffer2.size, this.offset).then(resolve).catch(reject); - } else if (this.unresolvedLength >= this.bufferSize) { - return; - } else { - resolve(); - } - } - }); - }); - } - /** - * Insert a new data into unresolved array. - * - * @param data - - */ - appendUnresolvedData(data) { - this.unresolvedDataArray.push(data); - this.unresolvedLength += data.length; - } - /** - * Try to shift a buffer with size in blockSize. The buffer returned may be less - * than blockSize when data in unresolvedDataArray is less than bufferSize. - * - */ - shiftBufferFromUnresolvedDataArray(buffer2) { - if (!buffer2) { - buffer2 = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); - } else { - buffer2.fill(this.unresolvedDataArray, this.unresolvedLength); - } - this.unresolvedLength -= buffer2.size; - return buffer2; - } - /** - * Resolve data in unresolvedDataArray. For every buffer with size in blockSize - * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, - * then push it into outgoing to be handled by outgoing handler. - * - * Return false when available buffers in incoming are not enough, else true. - * - * @returns Return false when buffers in incoming are not enough, else true. - */ - resolveData() { - while (this.unresolvedLength >= this.bufferSize) { - let buffer2; - if (this.incoming.length > 0) { - buffer2 = this.incoming.shift(); - this.shiftBufferFromUnresolvedDataArray(buffer2); - } else { - if (this.numBuffers < this.maxBuffers) { - buffer2 = this.shiftBufferFromUnresolvedDataArray(); - this.numBuffers++; - } else { - return false; - } - } - this.outgoing.push(buffer2); - this.triggerOutgoingHandlers(); - } - return true; - } - /** - * Try to trigger a outgoing handler for every buffer in outgoing. Stop when - * concurrency reaches. - */ - async triggerOutgoingHandlers() { - let buffer2; - do { - if (this.executingOutgoingHandlers >= this.concurrency) { - return; - } - buffer2 = this.outgoing.shift(); - if (buffer2) { - this.triggerOutgoingHandler(buffer2); - } - } while (buffer2); - } - /** - * Trigger a outgoing handler for a buffer shifted from outgoing. - * - * @param buffer - - */ - async triggerOutgoingHandler(buffer2) { - const bufferLength = buffer2.size; - this.executingOutgoingHandlers++; - this.offset += bufferLength; - try { - await this.outgoingHandler(() => buffer2.getReadableStream(), bufferLength, this.offset - bufferLength); - } catch (err) { - this.emitter.emit("error", err); - return; - } - this.executingOutgoingHandlers--; - this.reuseBuffer(buffer2); - this.emitter.emit("checkEnd"); - } - /** - * Return buffer used by outgoing handler into incoming. - * - * @param buffer - - */ - reuseBuffer(buffer2) { - this.incoming.push(buffer2); - if (!this.isError && this.resolveData() && !this.isStreamEnd) { - this.readable.resume(); - } - } - }; - async function streamToBuffer(stream2, buffer2, offset, end, encoding) { + exports2.Batch = Batch; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/utils.js +var require_utils4 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/utils.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.fsCreateReadStream = exports2.fsStat = void 0; + exports2.streamToBuffer = streamToBuffer; + exports2.streamToBuffer2 = streamToBuffer2; + exports2.streamToBuffer3 = streamToBuffer3; + exports2.readStreamToLocalFile = readStreamToLocalFile; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var node_fs_1 = tslib_1.__importDefault(require("node:fs")); + var node_util_1 = tslib_1.__importDefault(require("node:util")); + var constants_js_1 = require_constants9(); + async function streamToBuffer(stream, buffer, offset, end, encoding) { let pos = 0; const count = end - offset; return new Promise((resolve, reject) => { - const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), REQUEST_TIMEOUT); - stream2.on("readable", () => { + const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), constants_js_1.REQUEST_TIMEOUT); + stream.on("readable", () => { if (pos >= count) { clearTimeout(timeout); resolve(); return; } - let chunk = stream2.read(); + let chunk = stream.read(); if (!chunk) { return; } @@ -52754,28 +55931,28 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; chunk = Buffer.from(chunk, encoding); } const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; - buffer2.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); pos += chunkLength; }); - stream2.on("end", () => { + stream.on("end", () => { clearTimeout(timeout); if (pos < count) { reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); } resolve(); }); - stream2.on("error", (msg) => { + stream.on("error", (msg) => { clearTimeout(timeout); reject(msg); }); }); } - async function streamToBuffer2(stream2, buffer2, encoding) { + async function streamToBuffer2(stream, buffer, encoding) { let pos = 0; - const bufferSize = buffer2.length; + const bufferSize = buffer.length; return new Promise((resolve, reject) => { - stream2.on("readable", () => { - let chunk = stream2.read(); + stream.on("readable", () => { + let chunk = stream.read(); if (!chunk) { return; } @@ -52786,18 +55963,30 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); return; } - buffer2.fill(chunk, pos, pos + chunk.length); + buffer.fill(chunk, pos, pos + chunk.length); pos += chunk.length; }); - stream2.on("end", () => { + stream.on("end", () => { resolve(pos); }); - stream2.on("error", reject); + stream.on("error", reject); + }); + } + async function streamToBuffer3(readableStream, encoding) { + return new Promise((resolve, reject) => { + const chunks = []; + readableStream.on("data", (data) => { + chunks.push(typeof data === "string" ? Buffer.from(data, encoding) : data); + }); + readableStream.on("end", () => { + resolve(Buffer.concat(chunks)); + }); + readableStream.on("error", reject); }); } async function readStreamToLocalFile(rs, file) { return new Promise((resolve, reject) => { - const ws = fs__namespace.createWriteStream(file); + const ws = node_fs_1.default.createWriteStream(file); rs.on("error", (err) => { reject(err); }); @@ -52808,9 +55997,48 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; rs.pipe(ws); }); } - var fsStat = util__namespace.promisify(fs__namespace.stat); - var fsCreateReadStream = fs__namespace.createReadStream; - var BlobClient = class _BlobClient extends StorageClient { + exports2.fsStat = node_util_1.default.promisify(node_fs_1.default.stat); + exports2.fsCreateReadStream = node_fs_1.default.createReadStream; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/Clients.js +var require_Clients = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/Clients.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.PageBlobClient = exports2.BlockBlobClient = exports2.AppendBlobClient = exports2.BlobClient = void 0; + var core_rest_pipeline_1 = require_commonjs6(); + var core_auth_1 = require_commonjs7(); + var core_util_1 = require_commonjs4(); + var core_util_2 = require_commonjs4(); + var BlobDownloadResponse_js_1 = require_BlobDownloadResponse(); + var BlobQueryResponse_js_1 = require_BlobQueryResponse(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var models_js_1 = require_models2(); + var PageBlobRangeResponse_js_1 = require_PageBlobRangeResponse(); + var Pipeline_js_1 = require_Pipeline(); + var BlobStartCopyFromUrlPoller_js_1 = require_BlobStartCopyFromUrlPoller(); + var Range_js_1 = require_Range(); + var StorageClient_js_1 = require_StorageClient(); + var Batch_js_1 = require_Batch(); + var storage_common_1 = require_commonjs11(); + var constants_js_1 = require_constants9(); + var tracing_js_1 = require_tracing(); + var utils_common_js_1 = require_utils_common(); + var utils_js_1 = require_utils4(); + var BlobSASSignatureValues_js_1 = require_BlobSASSignatureValues(); + var BlobLeaseClient_js_1 = require_BlobLeaseClient(); + var BlobClient = class _BlobClient extends StorageClient_js_1.StorageClient { + /** + * blobContext provided by protocol layer. + */ + blobContext; + _name; + _containerName; + _versionId; + _snapshot; /** * The name of the blob. */ @@ -52826,49 +56054,49 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { options = options || {}; let pipeline; - let url2; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + let url; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; - } else if (coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; + url = urlOrConnectionString; if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { options = blobNameOrOptions; } - pipeline = newPipeline(new AnonymousCredential(), options); + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { const containerName = credentialOrPipelineOrContainerName; const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreUtil.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { - options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); } - pipeline = newPipeline(sharedKeyCredential, options); + pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - super(url2, pipeline); + super(url, pipeline); ({ blobName: this._name, containerName: this._containerName } = this.getBlobAndContainerNamesFromUrl()); this.blobContext = this.storageClientContext.blob; - this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); - this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); + this._snapshot = (0, utils_common_js_1.getURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT); + this._versionId = (0, utils_common_js_1.getURLParameter)(this.url, constants_js_1.URLConstants.Parameters.VERSIONID); } /** * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. @@ -52877,8 +56105,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param snapshot - The snapshot timestamp. * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp */ - withSnapshot(snapshot2) { - return new _BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); + withSnapshot(snapshot) { + return new _BlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); } /** * Creates a new BlobClient object pointing to a version of this blob. @@ -52887,8 +56115,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param versionId - The versionId. * @returns A new BlobClient object pointing to the version of this blob. */ - withVersion(versionId2) { - return new _BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId2.length === 0 ? void 0 : versionId2), this.pipeline); + withVersion(versionId) { + return new _BlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.VERSIONID, versionId.length === 0 ? void 0 : versionId), this.pipeline); } /** * Creates a AppendBlobClient object. @@ -52918,7 +56146,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * In Node.js, data returns in a Readable stream readableStreamBody * * In browsers, data returns in a promise blobBody * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-blob + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob * * @param offset - From which position of the blob to download, greater than or equal to 0 * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined @@ -52927,76 +56155,106 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage (Node.js): * - * ```js - * // Download and convert a blob to a string + * ```ts snippet:ReadmeSampleDownloadBlob_Node + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobClient = containerClient.getBlobClient(blobName); + * + * // Get blob content from position 0 to the end + * // In Node.js, get downloaded data by accessing downloadBlockBlobResponse.readableStreamBody * const downloadBlockBlobResponse = await blobClient.download(); - * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); - * console.log("Downloaded blob content:", downloaded.toString()); + * if (downloadBlockBlobResponse.readableStreamBody) { + * const downloaded = await streamToString(downloadBlockBlobResponse.readableStreamBody); + * console.log(`Downloaded blob content: ${downloaded}`); + * } * - * async function streamToBuffer(readableStream) { - * return new Promise((resolve, reject) => { - * const chunks = []; - * readableStream.on("data", (data) => { - * chunks.push(typeof data === "string" ? Buffer.from(data) : data); + * async function streamToString(stream: NodeJS.ReadableStream): Promise { + * const result = await new Promise>((resolve, reject) => { + * const chunks: Buffer[] = []; + * stream.on("data", (data) => { + * chunks.push(Buffer.isBuffer(data) ? data : Buffer.from(data)); * }); - * readableStream.on("end", () => { + * stream.on("end", () => { * resolve(Buffer.concat(chunks)); * }); - * readableStream.on("error", reject); + * stream.on("error", reject); * }); + * return result.toString(); * } * ``` * * Example usage (browser): * - * ```js - * // Download and convert a blob to a string - * const downloadBlockBlobResponse = await blobClient.download(); - * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); - * console.log( - * "Downloaded blob content", - * downloaded + * ```ts snippet:ReadmeSampleDownloadBlob_Browser + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), * ); * - * async function blobToString(blob: Blob): Promise { - * const fileReader = new FileReader(); - * return new Promise((resolve, reject) => { - * fileReader.onloadend = (ev: any) => { - * resolve(ev.target!.result); - * }; - * fileReader.onerror = reject; - * fileReader.readAsText(blob); - * }); + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobClient = containerClient.getBlobClient(blobName); + * + * // Get blob content from position 0 to the end + * // In browsers, get downloaded data by accessing downloadBlockBlobResponse.blobBody + * const downloadBlockBlobResponse = await blobClient.download(); + * const blobBody = await downloadBlockBlobResponse.blobBody; + * if (blobBody) { + * const downloaded = await blobBody.text(); + * console.log(`Downloaded blob content: ${downloaded}`); * } * ``` */ async download(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlobClient-download", options, async (updatedOptions) => { - var _a; - const res = assertResponse(await this.blobContext.download({ + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-download", options, async (updatedOptions) => { + const res = (0, utils_common_js_1.assertResponse)(await this.blobContext.download({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, requestOptions: { - onDownloadProgress: coreUtil.isNode ? void 0 : options.onProgress + onDownloadProgress: core_util_1.isNodeLike ? void 0 : options.onProgress // for Node.js, progress is reported by RetriableReadableStream }, - range: offset === 0 && !count ? void 0 : rangeToString({ offset, count }), + range: offset === 0 && !count ? void 0 : (0, Range_js_1.rangeToString)({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions })); - const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); - if (!coreUtil.isNode) { + const wrappedRes = { + ...res, + _response: res._response, + // _response is made non-enumerable + objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, + objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(res.objectReplicationRules) + }; + if (!core_util_1.isNodeLike) { return wrappedRes; } if (options.maxRetryRequests === void 0 || options.maxRetryRequests < 0) { - options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + options.maxRetryRequests = constants_js_1.DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; } if (res.contentLength === void 0) { throw new RangeError(`File download response doesn't contain valid content length header`); @@ -53004,8 +56262,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!res.etag) { throw new RangeError(`File download response doesn't contain valid etag header`); } - return new BlobDownloadResponse(wrappedRes, async (start) => { - var _a2; + return new BlobDownloadResponse_js_1.BlobDownloadResponse(wrappedRes, async (start) => { const updatedDownloadOptions = { leaseAccessConditions: options.conditions, modifiedAccessConditions: { @@ -53013,9 +56270,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; ifModifiedSince: options.conditions.ifModifiedSince, ifNoneMatch: options.conditions.ifNoneMatch, ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, - ifTags: (_a2 = options.conditions) === null || _a2 === void 0 ? void 0 : _a2.tagConditions + ifTags: options.conditions?.tagConditions }, - range: rangeToString({ + range: (0, Range_js_1.rangeToString)({ count: offset + res.contentLength - start, offset: start }), @@ -53024,7 +56281,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }; - return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; + return (await this.blobContext.download({ + abortSignal: options.abortSignal, + ...updatedDownloadOptions + })).readableStreamBody; }, offset, res.contentLength, { maxRetryRequests: options.maxRetryRequests, onProgress: options.onProgress @@ -53041,9 +56301,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - options to Exists operation. */ async exists(options = {}) { - return tracingClient.withSpan("BlobClient-exists", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BlobClient-exists", options, async (updatedOptions) => { try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); await this.getProperties({ abortSignal: options.abortSignal, customerProvidedKey: options.customerProvidedKey, @@ -53054,7 +56314,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } catch (e) { if (e.statusCode === 404) { return false; - } else if (e.statusCode === 409 && (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { + } else if (e.statusCode === 409 && (e.details.errorCode === constants_js_1.BlobUsesCustomerSpecifiedEncryptionMsg || e.details.errorCode === constants_js_1.BlobDoesNotUseCustomerSpecifiedEncryption)) { return true; } throw e; @@ -53064,7 +56324,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; /** * Returns all user-defined metadata, standard HTTP properties, and system properties * for the blob. It does not return the content of the blob. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-properties * * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if * they originally contained uppercase characters. This differs from the metadata keys returned by @@ -53075,17 +56335,25 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async getProperties(options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlobClient-getProperties", options, async (updatedOptions) => { - var _a; - const res = assertResponse(await this.blobContext.getProperties({ + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-getProperties", options, async (updatedOptions) => { + const res = (0, utils_common_js_1.assertResponse)(await this.blobContext.getProperties({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, cpkInfo: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions })); - return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + return { + ...res, + _response: res._response, + // _response is made non-enumerable + objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, + objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(res.objectReplicationRules) + }; }); } /** @@ -53093,19 +56361,21 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * during garbage collection. Note that in order to delete a blob, you must delete * all of its snapshots. You can delete both at the same time with the Delete * Blob operation. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/delete-blob + * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob * * @param options - Optional options to Blob Delete operation. */ async delete(options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("BlobClient-delete", options, async (updatedOptions) => { - var _a; - return assertResponse(await this.blobContext.delete({ + return tracing_js_1.tracingClient.withSpan("BlobClient-delete", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.delete({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, tracingOptions: updatedOptions.tracingOptions })); }); @@ -53115,19 +56385,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * during garbage collection. Note that in order to delete a blob, you must delete * all of its snapshots. You can delete both at the same time with the Delete * Blob operation. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/delete-blob + * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob * * @param options - Optional options to Blob Delete operation. */ async deleteIfExists(options = {}) { - return tracingClient.withSpan("BlobClient-deleteIfExists", options, async (updatedOptions) => { - var _a, _b; + return tracing_js_1.tracingClient.withSpan("BlobClient-deleteIfExists", options, async (updatedOptions) => { try { - const res = assertResponse(await this.delete(updatedOptions)); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + const res = (0, utils_common_js_1.assertResponse)(await this.delete(updatedOptions)); + return { + succeeded: true, + ...res, + _response: res._response + // _response is made non-enumerable + }; } catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + if (e.details?.errorCode === "BlobNotFound") { + return { + succeeded: false, + ...e.response?.parsedHeaders, + _response: e.response + }; } throw e; } @@ -53137,13 +56415,13 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Restores the contents and metadata of soft deleted blob and any associated * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 * or later. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * @see https://learn.microsoft.com/rest/api/storageservices/undelete-blob * * @param options - Optional options to Blob Undelete operation. */ async undelete(options = {}) { - return tracingClient.withSpan("BlobClient-undelete", options, async (updatedOptions) => { - return assertResponse(await this.blobContext.undelete({ + return tracing_js_1.tracingClient.withSpan("BlobClient-undelete", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.undelete({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); @@ -53154,7 +56432,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * If no value provided, or no value provided for the specified blob HTTP headers, * these blob HTTP headers without a value will be cleared. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties * * @param blobHTTPHeaders - If no value provided, or no value provided for * the specified blob HTTP headers, these blob HTTP @@ -53166,14 +56444,16 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async setHTTPHeaders(blobHTTPHeaders, options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlobClient-setHTTPHeaders", options, async (updatedOptions) => { - var _a; - return assertResponse(await this.blobContext.setHttpHeaders({ + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-setHTTPHeaders", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setHttpHeaders({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger. tracingOptions: updatedOptions.tracingOptions })); @@ -53184,22 +56464,24 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * If no option provided, or no metadata defined in the parameter, the blob * metadata will be removed. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-metadata * * @param metadata - Replace existing metadata with this value. * If no value provided the existing metadata will be removed. * @param options - Optional options to Set Metadata operation. */ - async setMetadata(metadata2, options = {}) { + async setMetadata(metadata, options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlobClient-setMetadata", options, async (updatedOptions) => { - var _a; - return assertResponse(await this.blobContext.setMetadata({ + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-setMetadata", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setMetadata({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - metadata: metadata2, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + metadata, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions @@ -53215,15 +56497,17 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param tags - * @param options - */ - async setTags(tags2, options = {}) { - return tracingClient.withSpan("BlobClient-setTags", options, async (updatedOptions) => { - var _a; - return assertResponse(await this.blobContext.setTags({ + async setTags(tags, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-setTags", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setTags({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, tracingOptions: updatedOptions.tracingOptions, - tags: toBlobTags(tags2) + tags: (0, utils_common_js_1.toBlobTags)(tags) })); }); } @@ -53233,15 +56517,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - */ async getTags(options = {}) { - return tracingClient.withSpan("BlobClient-getTags", options, async (updatedOptions) => { - var _a; - const response = assertResponse(await this.blobContext.getTags({ + return tracing_js_1.tracingClient.withSpan("BlobClient-getTags", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.blobContext.getTags({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, tracingOptions: updatedOptions.tracingOptions })); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); + const wrappedResponse = { + ...response, + _response: response._response, + // _response is made non-enumerable + tags: (0, utils_common_js_1.toTags)({ blobTagSet: response.blobTagSet }) || {} + }; return wrappedResponse; }); } @@ -53252,24 +56543,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @returns A new BlobLeaseClient object for managing leases on the blob. */ getBlobLeaseClient(proposeLeaseId) { - return new BlobLeaseClient(this, proposeLeaseId); + return new BlobLeaseClient_js_1.BlobLeaseClient(this, proposeLeaseId); } /** * Creates a read-only snapshot of a blob. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * @see https://learn.microsoft.com/rest/api/storageservices/snapshot-blob * * @param options - Optional options to the Blob Create Snapshot operation. */ async createSnapshot(options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlobClient-createSnapshot", options, async (updatedOptions) => { - var _a; - return assertResponse(await this.blobContext.createSnapshot({ + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-createSnapshot", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.createSnapshot({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions @@ -53290,57 +56583,58 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * an Azure file in any Azure storage account. * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob * operation to copy from another storage account. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/copy-blob + * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob * - * Example using automatic polling: + * ```ts snippet:ClientsBeginCopyFromURL + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); - * const result = await copyPoller.pollUntilDone(); - * ``` + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); * - * Example using manual polling: + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobClient = containerClient.getBlobClient(blobName); * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); - * while (!poller.isDone()) { - * await poller.poll(); - * } - * const result = copyPoller.getResult(); - * ``` + * // Example using automatic polling + * const automaticCopyPoller = await blobClient.beginCopyFromURL("url"); + * const automaticResult = await automaticCopyPoller.pollUntilDone(); * - * Example using progress updates: + * // Example using manual polling + * const manualCopyPoller = await blobClient.beginCopyFromURL("url"); + * while (!manualCopyPoller.isDone()) { + * await manualCopyPoller.poll(); + * } + * const manualResult = manualCopyPoller.getResult(); * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url', { + * // Example using progress updates + * const progressUpdatesCopyPoller = await blobClient.beginCopyFromURL("url", { * onProgress(state) { * console.log(`Progress: ${state.copyProgress}`); - * } + * }, * }); - * const result = await copyPoller.pollUntilDone(); - * ``` - * - * Example using a changing polling interval (default 15 seconds): + * const progressUpdatesResult = await progressUpdatesCopyPoller.pollUntilDone(); * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url', { - * intervalInMs: 1000 // poll blob every 1 second for copy progress + * // Example using a changing polling interval (default 15 seconds) + * const pollingIntervalCopyPoller = await blobClient.beginCopyFromURL("url", { + * intervalInMs: 1000, // poll blob every 1 second for copy progress * }); - * const result = await copyPoller.pollUntilDone(); - * ``` - * - * Example using copy cancellation: + * const pollingIntervalResult = await pollingIntervalCopyPoller.pollUntilDone(); * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // Example using copy cancellation: + * const cancelCopyPoller = await blobClient.beginCopyFromURL("url"); * // cancel operation after starting it. * try { - * await copyPoller.cancelOperation(); + * await cancelCopyPoller.cancelOperation(); * // calls to get the result now throw PollerCancelledError - * await copyPoller.getResult(); - * } catch (err) { - * if (err.name === 'PollerCancelledError') { - * console.log('The copy was cancelled.'); + * cancelCopyPoller.getResult(); + * } catch (err: any) { + * if (err.name === "PollerCancelledError") { + * console.log("The copy was cancelled."); * } * } * ``` @@ -53348,15 +56642,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param copySource - url to the source Azure Blob/File. * @param options - Optional options to the Blob Start Copy From URL operation. */ - async beginCopyFromURL(copySource2, options = {}) { + async beginCopyFromURL(copySource, options = {}) { const client = { abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), getProperties: (...args) => this.getProperties(...args), startCopyFromURL: (...args) => this.startCopyFromURL(...args) }; - const poller = new BlobBeginCopyFromUrlPoller({ + const poller = new BlobStartCopyFromUrlPoller_js_1.BlobBeginCopyFromUrlPoller({ blobClient: client, - copySource: copySource2, + copySource, intervalInMs: options.intervalInMs, onProgress: options.onProgress, resumeFrom: options.resumeFrom, @@ -53368,14 +56662,14 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; /** * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero * length and full metadata. Version 2012-02-12 and newer. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * @see https://learn.microsoft.com/rest/api/storageservices/abort-copy-blob * * @param copyId - Id of the Copy From URL operation. * @param options - Optional options to the Blob Abort Copy From URL operation. */ - async abortCopyFromURL(copyId2, options = {}) { - return tracingClient.withSpan("BlobClient-abortCopyFromURL", options, async (updatedOptions) => { - return assertResponse(await this.blobContext.abortCopyFromURL(copyId2, { + async abortCopyFromURL(copyId, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-abortCopyFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.abortCopyFromURL(copyId, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions @@ -53385,36 +56679,39 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; /** * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not * return a response until the copy is complete. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob-from-url * * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication * @param options - */ - async syncCopyFromURL(copySource2, options = {}) { + async syncCopyFromURL(copySource, options = {}) { options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; - return tracingClient.withSpan("BlobClient-syncCopyFromURL", options, async (updatedOptions) => { - var _a, _b, _c, _d, _e, _f, _g; - return assertResponse(await this.blobContext.copyFromURL(copySource2, { + return tracing_js_1.tracingClient.withSpan("BlobClient-syncCopyFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.copyFromURL(copySource, { abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, sourceModifiedAccessConditions: { - sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, - sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, - sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, - sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince + sourceIfMatch: options.sourceConditions?.ifMatch, + sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince }, sourceContentMD5: options.sourceContentMD5, - copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), - tier: toAccessTier(options.tier), - blobTagsString: toBlobTagsString(options.tags), - immutabilityPolicyExpiry: (_f = options.immutabilityPolicy) === null || _f === void 0 ? void 0 : _f.expiriesOn, - immutabilityPolicyMode: (_g = options.immutabilityPolicy) === null || _g === void 0 ? void 0 : _g.policyMode, + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags, + fileRequestIntent: options.sourceShareTokenIntent, tracingOptions: updatedOptions.tracingOptions })); }); @@ -53425,31 +56722,32 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * storage only). A premium page blob's tier determines the allowed size, IOPS, * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive * storage type. This operation does not update the blob's ETag. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-tier * * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. * @param options - Optional options to the Blob Set Tier operation. */ - async setAccessTier(tier2, options = {}) { - return tracingClient.withSpan("BlobClient-setAccessTier", options, async (updatedOptions) => { - var _a; - return assertResponse(await this.blobContext.setTier(toAccessTier(tier2), { + async setAccessTier(tier, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-setAccessTier", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setTier((0, models_js_1.toAccessTier)(tier), { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, rehydratePriority: options.rehydratePriority, tracingOptions: updatedOptions.tracingOptions })); }); } async downloadToBuffer(param1, param2, param3, param4 = {}) { - var _a; - let buffer2; + let buffer; let offset = 0; let count = 0; let options = param4; if (param1 instanceof Buffer) { - buffer2 = param1; + buffer = param1; offset = param2 || 0; count = typeof param3 === "number" ? param3 : 0; } else { @@ -53457,12 +56755,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; count = typeof param2 === "number" ? param2 : 0; options = param3 || {}; } - let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0; + let blockSize = options.blockSize ?? 0; if (blockSize < 0) { throw new RangeError("blockSize option must be >= 0"); } if (blockSize === 0) { - blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + blockSize = constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; } if (offset < 0) { throw new RangeError("offset option must be >= 0"); @@ -53473,26 +56771,29 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!options.conditions) { options.conditions = {}; } - return tracingClient.withSpan("BlobClient-downloadToBuffer", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BlobClient-downloadToBuffer", options, async (updatedOptions) => { if (!count) { - const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + const response = await this.getProperties({ + ...options, + tracingOptions: updatedOptions.tracingOptions + }); count = response.contentLength - offset; if (count < 0) { throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); } } - if (!buffer2) { + if (!buffer) { try { - buffer2 = Buffer.alloc(count); + buffer = Buffer.alloc(count); } catch (error2) { throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile". ${error2.message}`); } } - if (buffer2.length < count) { + if (buffer.length < count) { throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); } let transferProgress = 0; - const batch = new Batch(options.concurrency); + const batch = new Batch_js_1.Batch(options.concurrency); for (let off = offset; off < offset + count; off = off + blockSize) { batch.addOperation(async () => { let chunkEnd = offset + count; @@ -53506,8 +56807,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; customerProvidedKey: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions }); - const stream2 = response.readableStreamBody; - await streamToBuffer(stream2, buffer2, off - offset, chunkEnd - offset); + const stream = response.readableStreamBody; + await (0, utils_js_1.streamToBuffer)(stream, buffer, off - offset, chunkEnd - offset); transferProgress += chunkEnd - off; if (options.onProgress) { options.onProgress({ loadedBytes: transferProgress }); @@ -53515,7 +56816,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; }); } await batch.do(); - return buffer2; + return buffer; }); } /** @@ -53535,10 +56836,13 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * at the specified path. */ async downloadToFile(filePath, offset = 0, count, options = {}) { - return tracingClient.withSpan("BlobClient-downloadToFile", options, async (updatedOptions) => { - const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + return tracing_js_1.tracingClient.withSpan("BlobClient-downloadToFile", options, async (updatedOptions) => { + const response = await this.download(offset, count, { + ...options, + tracingOptions: updatedOptions.tracingOptions + }); if (response.readableStreamBody) { - await readStreamToLocalFile(response.readableStreamBody, filePath); + await (0, utils_js_1.readStreamToLocalFile)(response.readableStreamBody, filePath); } response.blobDownloadStream = void 0; return response; @@ -53553,7 +56857,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); containerName = pathComponents[1]; blobName = pathComponents[3]; - } else if (isIpEndpointStyle(parsedUrl)) { + } else if ((0, utils_common_js_1.isIpEndpointStyle)(parsedUrl)) { const pathComponents = parsedUrl.pathname.match("/([^/]*)/([^/]*)(/(.*))?"); containerName = pathComponents[2]; blobName = pathComponents[4]; @@ -53581,21 +56885,23 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * an Azure file in any Azure storage account. * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob * operation to copy from another storage account. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/copy-blob + * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob * * @param copySource - url to the source Azure Blob/File. * @param options - Optional options to the Blob Start Copy From URL operation. */ - async startCopyFromURL(copySource2, options = {}) { - return tracingClient.withSpan("BlobClient-startCopyFromURL", options, async (updatedOptions) => { - var _a, _b, _c; + async startCopyFromURL(copySource, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-startCopyFromURL", options, async (updatedOptions) => { options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; - return assertResponse(await this.blobContext.startCopyFromURL(copySource2, { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.startCopyFromURL(copySource, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, sourceModifiedAccessConditions: { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, @@ -53603,12 +56909,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, sourceIfTags: options.sourceConditions.tagConditions }, - immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, - immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, - tier: toAccessTier(options.tier), - blobTagsString: toBlobTagsString(options.tags), + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), sealBlob: options.sealBlob, tracingOptions: updatedOptions.tracingOptions })); @@ -53620,18 +56926,24 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties * and parameters passed in. The SAS is signed by the shared key credential of the client. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateSasUrl(options) { return new Promise((resolve) => { - if (!(this.credential instanceof StorageSharedKeyCredential)) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); - resolve(appendToURLQuery(this.url, sas)); + const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ + containerName: this._containerName, + blobName: this._name, + snapshotTime: this._snapshot, + versionId: this._versionId, + ...options + }, this.credential).toString(); + resolve((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** @@ -53640,24 +56952,30 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ generateSasStringToSign(options) { - if (!(this.credential instanceof StorageSharedKeyCredential)) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } - return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).stringToSign; + return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ + containerName: this._containerName, + blobName: this._name, + snapshotTime: this._snapshot, + versionId: this._versionId, + ...options + }, this.credential).stringToSign; } /** * * Generates a Blob Service Shared Access Signature (SAS) URI based on * the client properties and parameters passed in. The SAS is signed by the input user delegation key. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` @@ -53665,8 +56983,14 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ generateUserDelegationSasUrl(options, userDelegationKey) { return new Promise((resolve) => { - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), userDelegationKey, this.accountName).toString(); - resolve(appendToURLQuery(this.url, sas)); + const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ + containerName: this._containerName, + blobName: this._name, + snapshotTime: this._snapshot, + versionId: this._versionId, + ...options + }, userDelegationKey, this.accountName).toString(); + resolve((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** @@ -53675,14 +56999,20 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on * the client properties and parameters passed in. The SAS is signed by the input user delegation key. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateUserDelegationSasStringToSign(options, userDelegationKey) { - return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), userDelegationKey, this.accountName).stringToSign; + return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ + containerName: this._containerName, + blobName: this._name, + snapshotTime: this._snapshot, + versionId: this._versionId, + ...options + }, userDelegationKey, this.accountName).stringToSign; } /** * Delete the immutablility policy on the blob. @@ -53690,8 +57020,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Optional options to delete immutability policy on the blob. */ async deleteImmutabilityPolicy(options = {}) { - return tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy", options, async (updatedOptions) => { - return assertResponse(await this.blobContext.deleteImmutabilityPolicy({ + return tracing_js_1.tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.deleteImmutabilityPolicy({ tracingOptions: updatedOptions.tracingOptions })); }); @@ -53702,8 +57032,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Optional options to set immutability policy on the blob. */ async setImmutabilityPolicy(immutabilityPolicy, options = {}) { - return tracingClient.withSpan("BlobClient-setImmutabilityPolicy", options, async (updatedOptions) => { - return assertResponse(await this.blobContext.setImmutabilityPolicy({ + return tracing_js_1.tracingClient.withSpan("BlobClient-setImmutabilityPolicy", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setImmutabilityPolicy({ immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, tracingOptions: updatedOptions.tracingOptions @@ -53716,8 +57046,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Optional options to set legal hold on the blob. */ async setLegalHold(legalHoldEnabled, options = {}) { - return tracingClient.withSpan("BlobClient-setLegalHold", options, async (updatedOptions) => { - return assertResponse(await this.blobContext.setLegalHold(legalHoldEnabled, { + return tracing_js_1.tracingClient.withSpan("BlobClient-setLegalHold", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setLegalHold(legalHoldEnabled, { tracingOptions: updatedOptions.tracingOptions })); }); @@ -53727,60 +57057,65 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * for the specified account. * The Get Account Information operation is available on service versions beginning * with version 2018-03-28. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-account-information + * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information * * @param options - Options to the Service Get Account Info operation. * @returns Response data for the Service Get Account Info operation. */ async getAccountInfo(options = {}) { - return tracingClient.withSpan("BlobClient-getAccountInfo", options, async (updatedOptions) => { - return assertResponse(await this.blobContext.getAccountInfo({ + return tracing_js_1.tracingClient.withSpan("BlobClient-getAccountInfo", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.getAccountInfo({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); }); } }; + exports2.BlobClient = BlobClient; var AppendBlobClient = class _AppendBlobClient extends BlobClient { + /** + * appendBlobsContext provided by protocol layer. + */ + appendBlobContext; constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { let pipeline; - let url2; + let url; options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; - } else if (coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; - pipeline = newPipeline(new AnonymousCredential(), options); + url = urlOrConnectionString; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { const containerName = credentialOrPipelineOrContainerName; const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreUtil.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { - options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); } - pipeline = newPipeline(sharedKeyCredential, options); + pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - super(url2, pipeline); + super(url, pipeline); this.appendBlobContext = this.storageClientContext.appendBlob; } /** @@ -53791,8 +57126,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param snapshot - The snapshot timestamp. * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. */ - withSnapshot(snapshot2) { - return new _AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); + withSnapshot(snapshot) { + return new _AppendBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); } /** * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. @@ -53803,28 +57138,43 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage: * - * ```js - * const appendBlobClient = containerClient.getAppendBlobClient(""); + * ```ts snippet:ClientsCreateAppendBlob + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * + * const appendBlobClient = containerClient.getAppendBlobClient(blobName); * await appendBlobClient.create(); * ``` */ async create(options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("AppendBlobClient-create", options, async (updatedOptions) => { - var _a, _b, _c; - return assertResponse(await this.appendBlobContext.create(0, { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-create", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.create(0, { abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, - immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, - immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, legalHold: options.legalHold, - blobTagsString: toBlobTagsString(options.tags), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), tracingOptions: updatedOptions.tracingOptions })); }); @@ -53837,15 +57187,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - */ async createIfNotExists(options = {}) { - const conditions = { ifNoneMatch: ETagAny }; - return tracingClient.withSpan("AppendBlobClient-createIfNotExists", options, async (updatedOptions) => { - var _a, _b; + const conditions = { ifNoneMatch: constants_js_1.ETagAny }; + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-createIfNotExists", options, async (updatedOptions) => { try { - const res = assertResponse(await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions }))); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + const res = (0, utils_common_js_1.assertResponse)(await this.create({ + ...updatedOptions, + conditions + })); + return { + succeeded: true, + ...res, + _response: res._response + // _response is made non-enumerable + }; } catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + if (e.details?.errorCode === "BlobAlreadyExists") { + return { + succeeded: false, + ...e.response?.parsedHeaders, + _response: e.response + }; } throw e; } @@ -53858,13 +57219,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async seal(options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("AppendBlobClient-seal", options, async (updatedOptions) => { - var _a; - return assertResponse(await this.appendBlobContext.seal({ + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-seal", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.seal({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, tracingOptions: updatedOptions.tracingOptions })); }); @@ -53880,29 +57243,44 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage: * - * ```js + * ```ts snippet:ClientsAppendBlock + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * * const content = "Hello World!"; * * // Create a new append blob and append data to the blob. - * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * const newAppendBlobClient = containerClient.getAppendBlobClient(blobName); * await newAppendBlobClient.create(); * await newAppendBlobClient.appendBlock(content, content.length); * * // Append data to an existing append blob. - * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * const existingAppendBlobClient = containerClient.getAppendBlobClient(blobName); * await existingAppendBlobClient.appendBlock(content, content.length); * ``` */ - async appendBlock(body2, contentLength2, options = {}) { + async appendBlock(body, contentLength, options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("AppendBlobClient-appendBlock", options, async (updatedOptions) => { - var _a; - return assertResponse(await this.appendBlobContext.appendBlock(contentLength2, body2, { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-appendBlock", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.appendBlock(contentLength, body, { abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, requestOptions: { onUploadProgress: options.onProgress }, @@ -53917,7 +57295,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; /** * The Append Block operation commits a new block of data to the end of an existing append blob * where the contents are read from a source url. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * @see https://learn.microsoft.com/rest/api/storageservices/append-block-from-url * * @param sourceURL - * The url to the blob that will be the source of the copy. A source blob in the same storage account can @@ -53931,74 +57309,89 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("AppendBlobClient-appendBlockFromURL", options, async (updatedOptions) => { - var _a, _b, _c, _d, _e; - return assertResponse(await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-appendBlockFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, { abortSignal: options.abortSignal, - sourceRange: rangeToString({ offset: sourceOffset, count }), + sourceRange: (0, Range_js_1.rangeToString)({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, sourceModifiedAccessConditions: { - sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, - sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, - sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, - sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince + sourceIfMatch: options.sourceConditions?.ifMatch, + sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince }, - copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, + fileRequestIntent: options.sourceShareTokenIntent, tracingOptions: updatedOptions.tracingOptions })); }); } }; + exports2.AppendBlobClient = AppendBlobClient; var BlockBlobClient = class _BlockBlobClient extends BlobClient { + /** + * blobContext provided by protocol layer. + * + * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API + * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient. + */ + _blobContext; + /** + * blockBlobContext provided by protocol layer. + */ + blockBlobContext; constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { let pipeline; - let url2; + let url; options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; - } else if (coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; + url = urlOrConnectionString; if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { options = blobNameOrOptions; } - pipeline = newPipeline(new AnonymousCredential(), options); + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { const containerName = credentialOrPipelineOrContainerName; const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreUtil.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { - options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); } - pipeline = newPipeline(sharedKeyCredential, options); + pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - super(url2, pipeline); + super(url, pipeline); this.blockBlobContext = this.storageClientContext.blockBlob; this._blobContext = this.storageClientContext.blob; } @@ -54010,8 +57403,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param snapshot - The snapshot timestamp. * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. */ - withSnapshot(snapshot2) { - return new _BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); + withSnapshot(snapshot) { + return new _BlockBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. @@ -54020,17 +57413,34 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage (Node.js): * - * ```js + * ```ts snippet:ClientsQuery + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blockBlobClient = containerClient.getBlockBlobClient(blobName); + * * // Query and convert a blob to a string - * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); - * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); - * console.log("Query blob content:", downloaded); + * const queryBlockBlobResponse = await blockBlobClient.query("select from BlobStorage"); + * if (queryBlockBlobResponse.readableStreamBody) { + * const downloadedBuffer = await streamToBuffer(queryBlockBlobResponse.readableStreamBody); + * const downloaded = downloadedBuffer.toString(); + * console.log(`Query blob content: ${downloaded}`); + * } * - * async function streamToBuffer(readableStream) { + * async function streamToBuffer(readableStream: NodeJS.ReadableStream): Promise { * return new Promise((resolve, reject) => { - * const chunks = []; + * const chunks: Buffer[] = []; * readableStream.on("data", (data) => { - * chunks.push(typeof data === "string" ? Buffer.from(data) : data); + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); * }); * readableStream.on("end", () => { * resolve(Buffer.concat(chunks)); @@ -54044,26 +57454,28 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - */ async query(query, options = {}) { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - if (!coreUtil.isNode) { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + if (!core_util_1.isNodeLike) { throw new Error("This operation currently is only supported in Node.js."); } - return tracingClient.withSpan("BlockBlobClient-query", options, async (updatedOptions) => { - var _a; - const response = assertResponse(await this._blobContext.query({ + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-query", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this._blobContext.query({ abortSignal: options.abortSignal, queryRequest: { queryType: "SQL", expression: query, - inputSerialization: toQuerySerialization(options.inputTextConfiguration), - outputSerialization: toQuerySerialization(options.outputTextConfiguration) + inputSerialization: (0, utils_common_js_1.toQuerySerialization)(options.inputTextConfiguration), + outputSerialization: (0, utils_common_js_1.toQuerySerialization)(options.outputTextConfiguration) }, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, cpkInfo: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions })); - return new BlobQueryResponse(response, { + return new BlobQueryResponse_js_1.BlobQueryResponse(response, { abortSignal: options.abortSignal, onProgress: options.onProgress, onError: options.onError @@ -54092,32 +57504,48 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage: * - * ```js + * ```ts snippet:ClientsUpload + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blockBlobClient = containerClient.getBlockBlobClient(blobName); + * * const content = "Hello world!"; * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); * ``` */ - async upload(body2, contentLength2, options = {}) { + async upload(body, contentLength, options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlockBlobClient-upload", options, async (updatedOptions) => { - var _a, _b, _c; - return assertResponse(await this.blockBlobContext.upload(contentLength2, body2, { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-upload", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.upload(contentLength, body, { abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, requestOptions: { onUploadProgress: options.onProgress }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, - immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, - immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, legalHold: options.legalHold, - tier: toAccessTier(options.tier), - blobTagsString: toBlobTagsString(options.tags), + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), tracingOptions: updatedOptions.tracingOptions })); }); @@ -54142,16 +57570,31 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async syncUploadFromURL(sourceURL, options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlockBlobClient-syncUploadFromURL", options, async (updatedOptions) => { - var _a, _b, _c, _d, _e, _f; - return assertResponse(await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, - sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, - sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, - sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, - sourceIfTags: (_f = options.sourceConditions) === null || _f === void 0 ? void 0 : _f.tagConditions - }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags, tracingOptions: updatedOptions.tracingOptions }))); + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-syncUploadFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.putBlobFromUrl(0, sourceURL, { + ...options, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions?.ifMatch, + sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions?.tagConditions + }, + cpkInfo: options.customerProvidedKey, + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), + copySourceTags: options.copySourceTags, + fileRequestIntent: options.sourceShareTokenIntent, + tracingOptions: updatedOptions.tracingOptions + })); }); } /** @@ -54165,10 +57608,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to the Block Blob Stage Block operation. * @returns Response data for the Block Blob Stage Block operation. */ - async stageBlock(blockId2, body2, contentLength2, options = {}) { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlockBlobClient-stageBlock", options, async (updatedOptions) => { - return assertResponse(await this.blockBlobContext.stageBlock(blockId2, contentLength2, body2, { + async stageBlock(blockId, body, contentLength, options = {}) { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-stageBlock", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.stageBlock(blockId, contentLength, body, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { @@ -54186,7 +57629,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * The Stage Block From URL operation creates a new block to be committed as part * of a blob where the contents are read from a URL. * This API is available starting in version 2018-03-28. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * @see https://learn.microsoft.com/rest/api/storageservices/put-block-from-url * * @param blockId - A 64-byte value that is base64-encoded * @param sourceURL - Specifies the URL of the blob. The value @@ -54203,18 +57646,19 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to the Block Blob Stage Block From URL operation. * @returns Response data for the Block Blob Stage Block From URL operation. */ - async stageBlockFromURL(blockId2, sourceURL, offset = 0, count, options = {}) { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlockBlobClient-stageBlockFromURL", options, async (updatedOptions) => { - return assertResponse(await this.blockBlobContext.stageBlockFromURL(blockId2, 0, sourceURL, { + async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-stageBlockFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, - sourceRange: offset === 0 && !count ? void 0 : rangeToString({ offset, count }), + sourceRange: offset === 0 && !count ? void 0 : (0, Range_js_1.rangeToString)({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, - copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + fileRequestIntent: options.sourceShareTokenIntent, tracingOptions: updatedOptions.tracingOptions })); }); @@ -54231,24 +57675,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to the Block Blob Commit Block List operation. * @returns Response data for the Block Blob Commit Block List operation. */ - async commitBlockList(blocks2, options = {}) { + async commitBlockList(blocks, options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("BlockBlobClient-commitBlockList", options, async (updatedOptions) => { - var _a, _b, _c; - return assertResponse(await this.blockBlobContext.commitBlockList({ latest: blocks2 }, { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-commitBlockList", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.commitBlockList({ latest: blocks }, { abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, - immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, - immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, legalHold: options.legalHold, - tier: toAccessTier(options.tier), - blobTagsString: toBlobTagsString(options.tags), + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), tracingOptions: updatedOptions.tracingOptions })); }); @@ -54263,13 +57709,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to the Block Blob Get Block List operation. * @returns Response data for the Block Blob Get Block List operation. */ - async getBlockList(listType2, options = {}) { - return tracingClient.withSpan("BlockBlobClient-getBlockList", options, async (updatedOptions) => { - var _a; - const res = assertResponse(await this.blockBlobContext.getBlockList(listType2, { + async getBlockList(listType, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-getBlockList", options, async (updatedOptions) => { + const res = (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.getBlockList(listType, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, tracingOptions: updatedOptions.tracingOptions })); if (!res.committedBlocks) { @@ -54298,18 +57746,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - */ async uploadData(data, options = {}) { - return tracingClient.withSpan("BlockBlobClient-uploadData", options, async (updatedOptions) => { - if (coreUtil.isNode) { - let buffer2; + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadData", options, async (updatedOptions) => { + if (core_util_1.isNodeLike) { + let buffer; if (data instanceof Buffer) { - buffer2 = data; + buffer = data; } else if (data instanceof ArrayBuffer) { - buffer2 = Buffer.from(data); + buffer = Buffer.from(data); } else { data = data; - buffer2 = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); } - return this.uploadSeekableInternal((offset, size) => buffer2.slice(offset, offset + size), buffer2.byteLength, updatedOptions); + return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); } else { const browserBlob = new Blob([data]); return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); @@ -54336,7 +57784,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @returns Response data for the Blob Upload operation. */ async uploadBrowserData(browserData, options = {}) { - return tracingClient.withSpan("BlockBlobClient-uploadBrowserData", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadBrowserData", options, async (updatedOptions) => { const browserBlob = new Blob([browserData]); return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); }); @@ -54357,23 +57805,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @returns Response data for the Blob Upload operation. */ async uploadSeekableInternal(bodyFactory, size, options = {}) { - var _a, _b; - let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0; - if (blockSize < 0 || blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { - throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); + let blockSize = options.blockSize ?? 0; + if (blockSize < 0 || blockSize > constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError(`blockSize option must be >= 0 and <= ${constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); } - const maxSingleShotSize = (_b = options.maxSingleShotSize) !== null && _b !== void 0 ? _b : BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; - if (maxSingleShotSize < 0 || maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { - throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); + const maxSingleShotSize = options.maxSingleShotSize ?? constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + if (maxSingleShotSize < 0 || maxSingleShotSize > constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); } if (blockSize === 0) { - if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { + if (size > constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * constants_js_1.BLOCK_BLOB_MAX_BLOCKS) { throw new RangeError(`${size} is too larger to upload to a block blob.`); } if (size > maxSingleShotSize) { - blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); - if (blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { - blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + blockSize = Math.ceil(size / constants_js_1.BLOCK_BLOB_MAX_BLOCKS); + if (blockSize < constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + blockSize = constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; } } } @@ -54383,32 +57830,32 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!options.conditions) { options.conditions = {}; } - return tracingClient.withSpan("BlockBlobClient-uploadSeekableInternal", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadSeekableInternal", options, async (updatedOptions) => { if (size <= maxSingleShotSize) { - return assertResponse(await this.upload(bodyFactory(0, size), size, updatedOptions)); + return (0, utils_common_js_1.assertResponse)(await this.upload(bodyFactory(0, size), size, updatedOptions)); } const numBlocks = Math.floor((size - 1) / blockSize) + 1; - if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`The buffer's size is too big or the BlockSize is too small;the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); + if (numBlocks > constants_js_1.BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`The buffer's size is too big or the BlockSize is too small;the number of blocks must be <= ${constants_js_1.BLOCK_BLOB_MAX_BLOCKS}`); } const blockList = []; - const blockIDPrefix = coreUtil.randomUUID(); + const blockIDPrefix = (0, core_util_2.randomUUID)(); let transferProgress = 0; - const batch = new Batch(options.concurrency); + const batch = new Batch_js_1.Batch(options.concurrency); for (let i = 0; i < numBlocks; i++) { batch.addOperation(async () => { - const blockID = generateBlockID(blockIDPrefix, i); + const blockID = (0, utils_common_js_1.generateBlockID)(blockIDPrefix, i); const start = blockSize * i; const end = i === numBlocks - 1 ? size : start + blockSize; - const contentLength2 = end - start; + const contentLength = end - start; blockList.push(blockID); - await this.stageBlock(blockID, bodyFactory(start, contentLength2), contentLength2, { + await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { abortSignal: options.abortSignal, conditions: options.conditions, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions }); - transferProgress += contentLength2; + transferProgress += contentLength; if (options.onProgress) { options.onProgress({ loadedBytes: transferProgress @@ -54434,15 +57881,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @returns Response data for the Blob Upload operation. */ async uploadFile(filePath, options = {}) { - return tracingClient.withSpan("BlockBlobClient-uploadFile", options, async (updatedOptions) => { - const size = (await fsStat(filePath)).size; + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadFile", options, async (updatedOptions) => { + const size = (await (0, utils_js_1.fsStat)(filePath)).size; return this.uploadSeekableInternal((offset, count) => { - return () => fsCreateReadStream(filePath, { + return () => (0, utils_js_1.fsCreateReadStream)(filePath, { autoClose: true, end: count ? offset + count - 1 : Infinity, start: offset }); - }, size, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + }, size, { + ...options, + tracingOptions: updatedOptions.tracingOptions + }); }); } /** @@ -54461,27 +57911,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to Upload Stream to Block Blob operation. * @returns Response data for the Blob Upload operation. */ - async uploadStream(stream2, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + async uploadStream(stream, bufferSize = constants_js_1.DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { if (!options.blobHTTPHeaders) { options.blobHTTPHeaders = {}; } if (!options.conditions) { options.conditions = {}; } - return tracingClient.withSpan("BlockBlobClient-uploadStream", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadStream", options, async (updatedOptions) => { let blockNum = 0; - const blockIDPrefix = coreUtil.randomUUID(); + const blockIDPrefix = (0, core_util_2.randomUUID)(); let transferProgress = 0; const blockList = []; - const scheduler = new BufferScheduler( - stream2, + const scheduler = new storage_common_1.BufferScheduler( + stream, bufferSize, maxConcurrency, - async (body2, length) => { - const blockID = generateBlockID(blockIDPrefix, blockNum); + async (body, length) => { + const blockID = (0, utils_common_js_1.generateBlockID)(blockIDPrefix, blockNum); blockList.push(blockID); blockNum++; - await this.stageBlock(blockID, body2, length, { + await this.stageBlock(blockID, body, length, { customerProvidedKey: options.customerProvidedKey, conditions: options.conditions, encryptionScope: options.encryptionScope, @@ -54499,50 +57949,58 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; Math.ceil(maxConcurrency / 4 * 3) ); await scheduler.do(); - return assertResponse(await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions }))); + return (0, utils_common_js_1.assertResponse)(await this.commitBlockList(blockList, { + ...options, + tracingOptions: updatedOptions.tracingOptions + })); }); } }; + exports2.BlockBlobClient = BlockBlobClient; var PageBlobClient = class _PageBlobClient extends BlobClient { + /** + * pageBlobsContext provided by protocol layer. + */ + pageBlobContext; constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { let pipeline; - let url2; + let url; options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; - } else if (coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; - pipeline = newPipeline(new AnonymousCredential(), options); + url = urlOrConnectionString; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { const containerName = credentialOrPipelineOrContainerName; const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreUtil.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { - options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); } - pipeline = newPipeline(sharedKeyCredential, options); + pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - super(url2, pipeline); + super(url, pipeline); this.pageBlobContext = this.storageClientContext.pageBlob; } /** @@ -54553,8 +58011,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param snapshot - The snapshot timestamp. * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. */ - withSnapshot(snapshot2) { - return new _PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); + withSnapshot(snapshot) { + return new _PageBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); } /** * Creates a page blob of the specified length. Call uploadPages to upload data @@ -54567,23 +58025,25 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async create(size, options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("PageBlobClient-create", options, async (updatedOptions) => { - var _a, _b, _c; - return assertResponse(await this.pageBlobContext.create(0, size, { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("PageBlobClient-create", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.create(0, size, { abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, - immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, - immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, legalHold: options.legalHold, - tier: toAccessTier(options.tier), - blobTagsString: toBlobTagsString(options.tags), + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), tracingOptions: updatedOptions.tracingOptions })); }); @@ -54598,15 +58058,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - */ async createIfNotExists(size, options = {}) { - return tracingClient.withSpan("PageBlobClient-createIfNotExists", options, async (updatedOptions) => { - var _a, _b; + return tracing_js_1.tracingClient.withSpan("PageBlobClient-createIfNotExists", options, async (updatedOptions) => { try { - const conditions = { ifNoneMatch: ETagAny }; - const res = assertResponse(await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions }))); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + const conditions = { ifNoneMatch: constants_js_1.ETagAny }; + const res = (0, utils_common_js_1.assertResponse)(await this.create(size, { + ...options, + conditions, + tracingOptions: updatedOptions.tracingOptions + })); + return { + succeeded: true, + ...res, + _response: res._response + // _response is made non-enumerable + }; } catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + if (e.details?.errorCode === "BlobAlreadyExists") { + return { + succeeded: false, + ...e.response?.parsedHeaders, + _response: e.response + }; } throw e; } @@ -54622,19 +58094,21 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to the Page Blob Upload Pages operation. * @returns Response data for the Page Blob Upload Pages operation. */ - async uploadPages(body2, offset, count, options = {}) { + async uploadPages(body, offset, count, options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("PageBlobClient-uploadPages", options, async (updatedOptions) => { - var _a; - return assertResponse(await this.pageBlobContext.uploadPages(count, body2, { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("PageBlobClient-uploadPages", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.uploadPages(count, body, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, requestOptions: { onUploadProgress: options.onProgress }, - range: rangeToString({ offset, count }), + range: (0, Range_js_1.rangeToString)({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, @@ -54647,7 +58121,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; /** * The Upload Pages operation writes a range of pages to a page blob where the * contents are read from a URL. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * @see https://learn.microsoft.com/rest/api/storageservices/put-page-from-url * * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob @@ -54658,25 +58132,28 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return tracingClient.withSpan("PageBlobClient-uploadPagesFromURL", options, async (updatedOptions) => { - var _a, _b, _c, _d, _e; - return assertResponse(await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("PageBlobClient-uploadPagesFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.uploadPagesFromURL(sourceURL, (0, Range_js_1.rangeToString)({ offset: sourceOffset, count }), 0, (0, Range_js_1.rangeToString)({ offset: destOffset, count }), { abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, sourceModifiedAccessConditions: { - sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, - sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, - sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, - sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince + sourceIfMatch: options.sourceConditions?.ifMatch, + sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, - copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + fileRequestIntent: options.sourceShareTokenIntent, tracingOptions: updatedOptions.tracingOptions })); }); @@ -54692,13 +58169,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async clearPages(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("PageBlobClient-clearPages", options, async (updatedOptions) => { - var _a; - return assertResponse(await this.pageBlobContext.clearPages(0, { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-clearPages", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.clearPages(0, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), - range: rangeToString({ offset, count }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + range: (0, Range_js_1.rangeToString)({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, @@ -54717,16 +58196,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async getPageRanges(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("PageBlobClient-getPageRanges", options, async (updatedOptions) => { - var _a; - const response = assertResponse(await this.pageBlobContext.getPageRanges({ + return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRanges", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRanges({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), - range: rangeToString({ offset, count }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + range: (0, Range_js_1.rangeToString)({ offset, count }), tracingOptions: updatedOptions.tracingOptions })); - return rangeResponseFromModel(response); + return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(response); }); } /** @@ -54741,15 +58222,17 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. * @param options - Options to PageBlob Get Page Ranges Segment operation. */ - async listPageRangesSegment(offset = 0, count, marker2, options = {}) { - return tracingClient.withSpan("PageBlobClient-getPageRangesSegment", options, async (updatedOptions) => { - var _a; - return assertResponse(await this.pageBlobContext.getPageRanges({ + async listPageRangesSegment(offset = 0, count, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesSegment", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRanges({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), - range: rangeToString({ offset, count }), - marker: marker2, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + range: (0, Range_js_1.rangeToString)({ offset, count }), + marker, maxPageSize: options.maxPageSize, tracingOptions: updatedOptions.tracingOptions })); @@ -54769,17 +58252,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to List Page Ranges operation. */ - listPageRangeItemSegments() { - return tslib.__asyncGenerator(this, arguments, function* listPageRangeItemSegments_1(offset = 0, count, marker2, options = {}) { - let getPageRangeItemSegmentsResponse; - if (!!marker2 || marker2 === void 0) { - do { - getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker2, options)); - marker2 = getPageRangeItemSegmentsResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); - } while (marker2); - } - }); + async *listPageRangeItemSegments(offset = 0, count, marker, options = {}) { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === void 0) { + do { + getPageRangeItemSegmentsResponse = await this.listPageRangesSegment(offset, count, marker, options); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield await getPageRangeItemSegmentsResponse; + } while (marker); + } } /** * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects @@ -54788,27 +58269,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param count - Number of bytes to get. * @param options - Options to List Page Ranges operation. */ - listPageRangeItems() { - return tslib.__asyncGenerator(this, arguments, function* listPageRangeItems_1(offset = 0, count, options = {}) { - var _a, e_1, _b, _c; - let marker2; - try { - for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker2, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { - _c = _f.value; - _d = false; - const getPageRangesSegment = _c; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); - } - } catch (e_1_1) { - e_1 = { error: e_1_1 }; - } finally { - try { - if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); - } finally { - if (e_1) throw e_1.error; - } - } - }); + async *listPageRangeItems(offset = 0, count, options = {}) { + let marker; + for await (const getPageRangesSegment of this.listPageRangeItemSegments(offset, count, marker, options)) { + yield* (0, utils_common_js_1.ExtractPageRangeInfoItems)(getPageRangesSegment); + } } /** * Returns an async iterable iterator to list of page ranges for a page blob. @@ -54816,66 +58281,67 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * .byPage() returns an async iterable iterator to list of page ranges for a page blob. * - * Example using `for await` syntax: + * ```ts snippet:ClientsListPageBlobs + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const pageBlobClient = containerClient.getPageBlobClient(blobName); * - * ```js - * // Get the pageBlobClient before you run these snippets, - * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * // Example using `for await` syntax * let i = 1; * for await (const pageRange of pageBlobClient.listPageRanges()) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } - * ``` - * - * Example using `iter.next()`: * - * ```js - * let i = 1; - * let iter = pageBlobClient.listPageRanges(); - * let pageRangeItem = await iter.next(); - * while (!pageRangeItem.done) { - * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); - * pageRangeItem = await iter.next(); + * // Example using `iter.next()` syntax + * i = 1; + * const iter = pageBlobClient.listPageRanges(); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Page range ${i++}: ${value.start} - ${value.end}`); + * ({ value, done } = await iter.next()); * } - * ``` * - * Example using `byPage()`: - * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { - * for (const pageRange of response) { + * // Example using `byPage()` syntax + * i = 1; + * for await (const page of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of page.pageRange || []) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * } - * ``` - * - * Example using paging with a marker: * - * ```js - * let i = 1; + * // Example using paging with a marker + * i = 1; * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; - * * // Prints 2 page ranges - * for (const pageRange of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * if (response.pageRange) { + * for (const pageRange of response.pageRange) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } * } - * * // Gets next marker * let marker = response.continuationToken; - * * // Passing next marker as continuationToken - * * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; - * * // Prints 10 page ranges - * for (const blob of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * if (response.pageRange) { + * for (const pageRange of response.pageRange) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } * } * ``` + * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param options - Options to the Page Blob Get Ranges operation. @@ -54901,7 +58367,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + return this.listPageRangeItemSegments(offset, count, settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...options + }); } }; } @@ -54917,17 +58386,19 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("PageBlobClient-getPageRangesDiff", options, async (updatedOptions) => { - var _a; - const result2 = assertResponse(await this.pageBlobContext.getPageRangesDiff({ + return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesDiff", options, async (updatedOptions) => { + const result2 = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, prevsnapshot: prevSnapshot, - range: rangeToString({ offset, count }), + range: (0, Range_js_1.rangeToString)({ offset, count }), tracingOptions: updatedOptions.tracingOptions })); - return rangeResponseFromModel(result2); + return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(result2); }); } /** @@ -54944,20 +58415,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker2, options = {}) { - return tracingClient.withSpan("PageBlobClient-getPageRangesDiffSegment", options, async (updatedOptions) => { - var _a; - return assertResponse(await this.pageBlobContext.getPageRangesDiff({ - abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, - leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesDiffSegment", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options?.abortSignal, + leaseAccessConditions: options?.conditions, + modifiedAccessConditions: { + ...options?.conditions, + ifTags: options?.conditions?.tagConditions + }, prevsnapshot: prevSnapshotOrUrl, - range: rangeToString({ + range: (0, Range_js_1.rangeToString)({ offset, count }), - marker: marker2, - maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize, + marker, + maxPageSize: options?.maxPageSize, tracingOptions: updatedOptions.tracingOptions })); }); @@ -54978,17 +58451,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker2, options) { - return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItemSegments_1() { - let getPageRangeItemSegmentsResponse; - if (!!marker2 || marker2 === void 0) { - do { - getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker2, options)); - marker2 = getPageRangeItemSegmentsResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); - } while (marker2); - } - }); + async *listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === void 0) { + do { + getPageRangeItemSegmentsResponse = await this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield await getPageRangeItemSegmentsResponse; + } while (marker); + } } /** * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects @@ -54998,27 +58469,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { - return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() { - var _a, e_2, _b, _c; - let marker2; - try { - for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker2, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { - _c = _f.value; - _d = false; - const getPageRangesSegment = _c; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); - } - } catch (e_2_1) { - e_2 = { error: e_2_1 }; - } finally { - try { - if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); - } finally { - if (e_2) throw e_2.error; - } - } - }); + async *listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { + let marker; + for await (const getPageRangesSegment of this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)) { + yield* (0, utils_common_js_1.ExtractPageRangeInfoItems)(getPageRangesSegment); + } } /** * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. @@ -55026,66 +58481,76 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. * - * Example using `for await` syntax: + * ```ts snippet:ClientsListPageBlobsDiff + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; * - * ```js - * // Get the pageBlobClient before you run these snippets, - * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` - * let i = 1; - * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * ``` + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); * - * Example using `iter.next()`: + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const pageBlobClient = containerClient.getPageBlobClient(blobName); * - * ```js + * const offset = 0; + * const count = 1024; + * const previousSnapshot = ""; + * // Example using `for await` syntax * let i = 1; - * let iter = pageBlobClient.listPageRangesDiff(); - * let pageRangeItem = await iter.next(); - * while (!pageRangeItem.done) { - * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); - * pageRangeItem = await iter.next(); + * for await (const pageRange of pageBlobClient.listPageRangesDiff(offset, count, previousSnapshot)) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } - * ``` * - * Example using `byPage()`: + * // Example using `iter.next()` syntax + * i = 1; + * const iter = pageBlobClient.listPageRangesDiff(offset, count, previousSnapshot); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Page range ${i++}: ${value.start} - ${value.end}`); + * ({ value, done } = await iter.next()); + * } * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { - * for (const pageRange of response) { + * // Example using `byPage()` syntax + * i = 1; + * for await (const page of pageBlobClient + * .listPageRangesDiff(offset, count, previousSnapshot) + * .byPage({ maxPageSize: 20 })) { + * for (const pageRange of page.pageRange || []) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * } - * ``` - * - * Example using paging with a marker: * - * ```js - * let i = 1; - * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * // Example using paging with a marker + * i = 1; + * let iterator = pageBlobClient + * .listPageRangesDiff(offset, count, previousSnapshot) + * .byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; - * * // Prints 2 page ranges - * for (const pageRange of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * if (response.pageRange) { + * for (const pageRange of response.pageRange) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } * } - * * // Gets next marker * let marker = response.continuationToken; - * * // Passing next marker as continuationToken - * - * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * iterator = pageBlobClient + * .listPageRangesDiff(offset, count, previousSnapshot) + * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; - * * // Prints 10 page ranges - * for (const blob of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * if (response.pageRange) { + * for (const pageRange of response.pageRange) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } * } * ``` + * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. @@ -55094,7 +58559,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ listPageRangesDiff(offset, count, prevSnapshot, options = {}) { options.conditions = options.conditions || {}; - const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); + const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, { + ...options + }); return { /** * The next method, part of the iteration protocol @@ -55112,7 +58579,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...options + }); } }; } @@ -55126,19 +58596,21 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to the Page Blob Get Page Ranges Diff operation. * @returns Response data for the Page Blob Get Page Range Diff operation. */ - async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl2, options = {}) { + async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options, async (updatedOptions) => { - var _a; - const response = assertResponse(await this.pageBlobContext.getPageRangesDiff({ + return tracing_js_1.tracingClient.withSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), - prevSnapshotUrl: prevSnapshotUrl2, - range: rangeToString({ offset, count }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + prevSnapshotUrl, + range: (0, Range_js_1.rangeToString)({ offset, count }), tracingOptions: updatedOptions.tracingOptions })); - return rangeResponseFromModel(response); + return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(response); }); } /** @@ -55151,12 +58623,14 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ async resize(size, options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("PageBlobClient-resize", options, async (updatedOptions) => { - var _a; - return assertResponse(await this.pageBlobContext.resize(size, { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-resize", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.resize(size, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); @@ -55164,22 +58638,24 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } /** * Sets a page blob's sequence number. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties * * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. * @param sequenceNumber - Required if sequenceNumberAction is max or update * @param options - Options to the Page Blob Update Sequence Number operation. * @returns Response data for the Page Blob Update Sequence Number operation. */ - async updateSequenceNumber(sequenceNumberAction2, sequenceNumber, options = {}) { + async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("PageBlobClient-updateSequenceNumber", options, async (updatedOptions) => { - var _a; - return assertResponse(await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction2, { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-updateSequenceNumber", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, { abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, tracingOptions: updatedOptions.tracingOptions })); }); @@ -55190,37 +58666,71 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * copied snapshot are transferred to the destination. * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. * @see https://learn.microsoft.com/rest/api/storageservices/incremental-copy-blob - * @see https://learn.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * @see https://learn.microsoft.com/azure/virtual-machines/windows/incremental-snapshots * * @param copySource - Specifies the name of the source page blob snapshot. For example, * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= * @param options - Options to the Page Blob Copy Incremental operation. * @returns Response data for the Page Blob Copy Incremental operation. */ - async startCopyIncremental(copySource2, options = {}) { - return tracingClient.withSpan("PageBlobClient-startCopyIncremental", options, async (updatedOptions) => { - var _a; - return assertResponse(await this.pageBlobContext.copyIncremental(copySource2, { + async startCopyIncremental(copySource, options = {}) { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-startCopyIncremental", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.copyIncremental(copySource, { abortSignal: options.abortSignal, - modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, tracingOptions: updatedOptions.tracingOptions })); }); } }; + exports2.PageBlobClient = PageBlobClient; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BatchUtils.js +var require_BatchUtils = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BatchUtils.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getBodyAsText = getBodyAsText; + exports2.utf8ByteLength = utf8ByteLength; + var utils_js_1 = require_utils4(); + var constants_js_1 = require_constants9(); async function getBodyAsText(batchResponse) { - let buffer2 = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); - const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer2); - buffer2 = buffer2.slice(0, responseLength); - return buffer2.toString(); + let buffer = Buffer.alloc(constants_js_1.BATCH_MAX_PAYLOAD_IN_BYTES); + const responseLength = await (0, utils_js_1.streamToBuffer2)(batchResponse.readableStreamBody, buffer); + buffer = buffer.slice(0, responseLength); + return buffer.toString(); } function utf8ByteLength(str) { return Buffer.byteLength(str); } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BatchResponseParser.js +var require_BatchResponseParser = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BatchResponseParser.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BatchResponseParser = void 0; + var core_rest_pipeline_1 = require_commonjs6(); + var core_http_compat_1 = require_commonjs9(); + var constants_js_1 = require_constants9(); + var BatchUtils_js_1 = require_BatchUtils(); + var log_js_1 = require_log5(); var HTTP_HEADER_DELIMITER = ": "; var SPACE_DELIMITER = " "; var NOT_FOUND = -1; var BatchResponseParser = class { + batchResponse; + responseBatchBoundary; + perResponsePrefix; + batchResponseEnding; + subRequests; constructor(batchResponse, subRequests) { if (!batchResponse || !batchResponse.contentType) { throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); @@ -55231,15 +58741,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; this.batchResponse = batchResponse; this.subRequests = subRequests; this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; - this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; + this.perResponsePrefix = `--${this.responseBatchBoundary}${constants_js_1.HTTP_LINE_ENDING}`; this.batchResponseEnding = `--${this.responseBatchBoundary}--`; } - // For example of response, please refer to https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch#response + // For example of response, please refer to https://learn.microsoft.com/rest/api/storageservices/blob-batch#response async parseBatchResponse() { - if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { + if (this.batchResponse._response.status !== constants_js_1.HTTPURLConnection.HTTP_ACCEPTED) { throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); } - const responseBodyAsText = await getBodyAsText(this.batchResponse); + const responseBodyAsText = await (0, BatchUtils_js_1.getBodyAsText)(this.batchResponse); const subResponses = responseBodyAsText.split(this.batchResponseEnding)[0].split(this.perResponsePrefix).slice(1); const subResponseCount = subResponses.length; if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { @@ -55251,18 +58761,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; for (let index = 0; index < subResponseCount; index++) { const subResponse = subResponses[index]; const deserializedSubResponse = {}; - deserializedSubResponse.headers = coreHttpCompat.toHttpHeadersLike(coreRestPipeline.createHttpHeaders()); - const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); + deserializedSubResponse.headers = (0, core_http_compat_1.toHttpHeadersLike)((0, core_rest_pipeline_1.createHttpHeaders)()); + const responseLines = subResponse.split(`${constants_js_1.HTTP_LINE_ENDING}`); let subRespHeaderStartFound = false; let subRespHeaderEndFound = false; let subRespFailed = false; let contentId = NOT_FOUND; for (const responseLine of responseLines) { if (!subRespHeaderStartFound) { - if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { + if (responseLine.startsWith(constants_js_1.HeaderConstants.CONTENT_ID)) { contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); } - if (responseLine.startsWith(HTTP_VERSION_1_1)) { + if (responseLine.startsWith(constants_js_1.HTTP_VERSION_1_1)) { subRespHeaderStartFound = true; const tokens = responseLine.split(SPACE_DELIMITER); deserializedSubResponse.status = parseInt(tokens[1]); @@ -55282,7 +58792,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } const tokens = responseLine.split(HTTP_HEADER_DELIMITER); deserializedSubResponse.headers.set(tokens[0], tokens[1]); - if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { + if (tokens[0] === constants_js_1.HeaderConstants.X_MS_ERROR_CODE) { deserializedSubResponse.errorCode = tokens[1]; subRespFailed = true; } @@ -55297,7 +58807,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; deserializedSubResponse._request = this.subRequests.get(contentId); deserializedSubResponses[contentId] = deserializedSubResponse; } else { - logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + log_js_1.logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); } if (subRespFailed) { subResponsesFailedCount++; @@ -55312,6 +58822,16 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; }; } }; + exports2.BatchResponseParser = BatchResponseParser; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/utils/Mutex.js +var require_Mutex = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/utils/Mutex.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Mutex = void 0; var MutexLockStatus; (function(MutexLockStatus2) { MutexLockStatus2[MutexLockStatus2["LOCKED"] = 0] = "LOCKED"; @@ -55351,6 +58871,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; resolve(); }); } + static keys = {}; + static listeners = {}; static onUnlockEvent(key, handler) { if (this.listeners[key] === void 0) { this.listeners[key] = [handler]; @@ -55367,11 +58889,36 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - Mutex.keys = {}; - Mutex.listeners = {}; + exports2.Mutex = Mutex; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BlobBatch.js +var require_BlobBatch = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BlobBatch.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobBatch = void 0; + var core_util_1 = require_commonjs4(); + var core_auth_1 = require_commonjs7(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_2 = require_commonjs4(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var Clients_js_1 = require_Clients(); + var Mutex_js_1 = require_Mutex(); + var Pipeline_js_1 = require_Pipeline(); + var utils_common_js_1 = require_utils_common(); + var core_xml_1 = require_commonjs10(); + var constants_js_1 = require_constants9(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var tracing_js_1 = require_tracing(); + var core_client_1 = require_commonjs8(); + var StorageSharedKeyCredentialPolicyV2_js_1 = require_StorageSharedKeyCredentialPolicyV22(); var BlobBatch = class { + batchRequest; + batch = "batch"; + batchType; constructor() { - this.batch = "batch"; this.batchRequest = new InnerBatchRequest(); } /** @@ -55395,13 +58942,13 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return this.batchRequest.getSubRequests(); } async addSubRequestInternal(subRequest, assembleSubRequestFunc) { - await Mutex.lock(this.batch); + await Mutex_js_1.Mutex.lock(this.batch); try { this.batchRequest.preAddSubRequest(subRequest); await assembleSubRequestFunc(); this.batchRequest.postAddSubRequest(subRequest); } finally { - await Mutex.unlock(this.batch); + await Mutex_js_1.Mutex.unlock(this.batch); } } setBatchType(batchType) { @@ -55413,13 +58960,13 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { - let url2; + let url; let credential; - if (typeof urlOrBlobClient === "string" && (coreUtil.isNode && credentialOrOptions instanceof StorageSharedKeyCredential || credentialOrOptions instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrOptions))) { - url2 = urlOrBlobClient; + if (typeof urlOrBlobClient === "string" && (core_util_2.isNodeLike && credentialOrOptions instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrOptions instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrOptions))) { + url = urlOrBlobClient; credential = credentialOrOptions; - } else if (urlOrBlobClient instanceof BlobClient) { - url2 = urlOrBlobClient.url; + } else if (urlOrBlobClient instanceof Clients_js_1.BlobClient) { + url = urlOrBlobClient.url; credential = urlOrBlobClient.credential; options = credentialOrOptions; } else { @@ -55428,28 +58975,28 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!options) { options = {}; } - return tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => { this.setBatchType("delete"); await this.addSubRequestInternal({ - url: url2, + url, credential }, async () => { - await new BlobClient(url2, this.batchRequest.createPipeline(credential)).delete(updatedOptions); + await new Clients_js_1.BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); }); }); } async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { - let url2; + let url; let credential; - let tier2; - if (typeof urlOrBlobClient === "string" && (coreUtil.isNode && credentialOrTier instanceof StorageSharedKeyCredential || credentialOrTier instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrTier))) { - url2 = urlOrBlobClient; + let tier; + if (typeof urlOrBlobClient === "string" && (core_util_2.isNodeLike && credentialOrTier instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrTier instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrTier))) { + url = urlOrBlobClient; credential = credentialOrTier; - tier2 = tierOrOptions; - } else if (urlOrBlobClient instanceof BlobClient) { - url2 = urlOrBlobClient.url; + tier = tierOrOptions; + } else if (urlOrBlobClient instanceof Clients_js_1.BlobClient) { + url = urlOrBlobClient.url; credential = urlOrBlobClient.credential; - tier2 = credentialOrTier; + tier = credentialOrTier; options = tierOrOptions; } else { throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); @@ -55457,24 +59004,32 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!options) { options = {}; } - return tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => { this.setBatchType("setAccessTier"); await this.addSubRequestInternal({ - url: url2, + url, credential }, async () => { - await new BlobClient(url2, this.batchRequest.createPipeline(credential)).setAccessTier(tier2, updatedOptions); + await new Clients_js_1.BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); }); }); } }; + exports2.BlobBatch = BlobBatch; var InnerBatchRequest = class { + operationCount; + body; + subRequests; + boundary; + subRequestPrefix; + multipartContentType; + batchRequestEnding; constructor() { this.operationCount = 0; this.body = ""; - const tempGuid = coreUtil.randomUUID(); + const tempGuid = (0, core_util_1.randomUUID)(); this.boundary = `batch_${tempGuid}`; - this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + this.subRequestPrefix = `--${this.boundary}${constants_js_1.HTTP_LINE_ENDING}${constants_js_1.HeaderConstants.CONTENT_TYPE}: application/http${constants_js_1.HTTP_LINE_ENDING}${constants_js_1.HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; this.batchRequestEnding = `--${this.boundary}--`; this.subRequests = /* @__PURE__ */ new Map(); @@ -55487,9 +59042,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. */ createPipeline(credential) { - const corePipeline = coreRestPipeline.createEmptyPipeline(); - corePipeline.addPolicy(coreClient.serializationPolicy({ - stringifyXML: coreXml.stringifyXML, + const corePipeline = (0, core_rest_pipeline_1.createEmptyPipeline)(); + corePipeline.addPolicy((0, core_client_1.serializationPolicy)({ + stringifyXML: core_xml_1.stringifyXML, serializerOptions: { xml: { xmlCharKey: "#" @@ -55498,19 +59053,19 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; }), { phase: "Serialize" }); corePipeline.addPolicy(batchHeaderFilterPolicy()); corePipeline.addPolicy(batchRequestAssemblePolicy(this), { afterPhase: "Sign" }); - if (coreAuth.isTokenCredential(credential)) { - corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ + if ((0, core_auth_1.isTokenCredential)(credential)) { + corePipeline.addPolicy((0, core_rest_pipeline_1.bearerTokenAuthenticationPolicy)({ credential, - scopes: StorageOAuthScopes, - challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge } + scopes: constants_js_1.StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: core_client_1.authorizeRequestOnTenantChallenge } }), { phase: "Sign" }); - } else if (credential instanceof StorageSharedKeyCredential) { - corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ + } else if (credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential) { + corePipeline.addPolicy((0, StorageSharedKeyCredentialPolicyV2_js_1.storageSharedKeyCredentialPolicy)({ accountName: credential.accountName, accountKey: credential.accountKey }), { phase: "Sign" }); } - const pipeline = new Pipeline([]); + const pipeline = new Pipeline_js_1.Pipeline([]); pipeline._credential = credential; pipeline._corePipeline = corePipeline; return pipeline; @@ -55519,23 +59074,23 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; this.body += [ this.subRequestPrefix, // sub request constant prefix - `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, + `${constants_js_1.HeaderConstants.CONTENT_ID}: ${this.operationCount}`, // sub request's content ID "", // empty line after sub request's content ID - `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}` + `${request.method.toString()} ${(0, utils_common_js_1.getURLPathAndQuery)(request.url)} ${constants_js_1.HTTP_VERSION_1_1}${constants_js_1.HTTP_LINE_ENDING}` // sub request start line with method - ].join(HTTP_LINE_ENDING); + ].join(constants_js_1.HTTP_LINE_ENDING); for (const [name, value] of request.headers) { - this.body += `${name}: ${value}${HTTP_LINE_ENDING}`; + this.body += `${name}: ${value}${constants_js_1.HTTP_LINE_ENDING}`; } - this.body += HTTP_LINE_ENDING; + this.body += constants_js_1.HTTP_LINE_ENDING; } preAddSubRequest(subRequest) { - if (this.operationCount >= BATCH_MAX_REQUEST) { - throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); + if (this.operationCount >= constants_js_1.BATCH_MAX_REQUEST) { + throw new RangeError(`Cannot exceed ${constants_js_1.BATCH_MAX_REQUEST} sub requests in a single batch`); } - const path3 = getURLPath(subRequest.url); + const path3 = (0, utils_common_js_1.getURLPath)(subRequest.url); if (!path3 || path3 === "") { throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } @@ -55546,7 +59101,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } // Return the http request body with assembling the ending line to the sub request body. getHttpRequestBody() { - return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; + return `${this.body}${this.batchRequestEnding}${constants_js_1.HTTP_LINE_ENDING}`; } getMultipartContentType() { return this.multipartContentType; @@ -55563,7 +59118,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return { request, status: 200, - headers: coreRestPipeline.createHttpHeaders() + headers: (0, core_rest_pipeline_1.createHttpHeaders)() }; } }; @@ -55574,7 +59129,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; async sendRequest(request, next) { let xMsHeaderName = ""; for (const [name] of request.headers) { - if (iEqual(name, HeaderConstants.X_MS_VERSION)) { + if ((0, utils_common_js_1.iEqual)(name, constants_js_1.HeaderConstants.X_MS_VERSION)) { xMsHeaderName = name; } } @@ -55585,18 +59140,36 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } }; } + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BlobBatchClient.js +var require_BlobBatchClient = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BlobBatchClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobBatchClient = void 0; + var BatchResponseParser_js_1 = require_BatchResponseParser(); + var BatchUtils_js_1 = require_BatchUtils(); + var BlobBatch_js_1 = require_BlobBatch(); + var tracing_js_1 = require_tracing(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var StorageContextClient_js_1 = require_StorageContextClient(); + var Pipeline_js_1 = require_Pipeline(); + var utils_common_js_1 = require_utils_common(); var BlobBatchClient = class { - constructor(url2, credentialOrPipeline, options) { + serviceOrContainerContext; + constructor(url, credentialOrPipeline, options) { let pipeline; - if (isPipelineLike(credentialOrPipeline)) { + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipeline)) { pipeline = credentialOrPipeline; } else if (!credentialOrPipeline) { - pipeline = newPipeline(new AnonymousCredential(), options); + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else { - pipeline = newPipeline(credentialOrPipeline, options); + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipeline, options); } - const storageClientContext = new StorageContextClient(url2, getCoreClientOptions(pipeline)); - const path3 = getURLPath(url2); + const storageClientContext = new StorageContextClient_js_1.StorageContextClient(url, (0, Pipeline_js_1.getCoreClientOptions)(pipeline)); + const path3 = (0, utils_common_js_1.getURLPath)(url); if (path3 && path3 !== "/") { this.serviceOrContainerContext = storageClientContext.container; } else { @@ -55608,10 +59181,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * A BlobBatch represents an aggregated set of operations on blobs. */ createBatch() { - return new BlobBatch(); + return new BlobBatch_js_1.BlobBatch(); } async deleteBlobs(urlsOrBlobClients, credentialOrOptions, options) { - const batch = new BlobBatch(); + const batch = new BlobBatch_js_1.BlobBatch(); for (const urlOrBlobClient of urlsOrBlobClients) { if (typeof urlOrBlobClient === "string") { await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); @@ -55622,7 +59195,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return this.submitBatch(batch); } async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, options) { - const batch = new BlobBatch(); + const batch = new BlobBatch_js_1.BlobBatch(); for (const urlOrBlobClient of urlsOrBlobClients) { if (typeof urlOrBlobClient === "string") { await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); @@ -55640,11 +59213,25 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage: * - * ```js - * let batchRequest = new BlobBatch(); - * await batchRequest.deleteBlob(urlInString0, credential0); - * await batchRequest.deleteBlob(urlInString1, credential1, { - * deleteSnapshots: "include" + * ```ts snippet:BlobBatchClientSubmitBatch + * import { DefaultAzureCredential } from "@azure/identity"; + * import { BlobServiceClient, BlobBatch } from "@azure/storage-blob"; + * + * const account = ""; + * const credential = new DefaultAzureCredential(); + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * credential, + * ); + * + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobBatchClient = containerClient.getBlobBatchClient(); + * + * const batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob("", credential); + * await batchRequest.deleteBlob("", credential, { + * deleteSnapshots: "include", * }); * const batchResp = await blobBatchClient.submitBatch(batchRequest); * console.log(batchResp.subResponsesSucceededCount); @@ -55652,17 +59239,32 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example using a lease: * - * ```js - * let batchRequest = new BlobBatch(); - * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); - * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { - * conditions: { leaseId: leaseId } + * ```ts snippet:BlobBatchClientSubmitBatchWithLease + * import { DefaultAzureCredential } from "@azure/identity"; + * import { BlobServiceClient, BlobBatch } from "@azure/storage-blob"; + * + * const account = ""; + * const credential = new DefaultAzureCredential(); + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * credential, + * ); + * + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobBatchClient = containerClient.getBlobBatchClient(); + * const blobClient = containerClient.getBlobClient(""); + * + * const batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blobClient, "Cool"); + * await batchRequest.setBlobAccessTier(blobClient, "Cool", { + * conditions: { leaseId: "" }, * }); * const batchResp = await blobBatchClient.submitBatch(batchRequest); * console.log(batchResp.subResponsesSucceededCount); * ``` * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch + * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch * * @param batchRequest - A set of Delete or SetTier operations. * @param options - @@ -55671,10 +59273,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!batchRequest || batchRequest.getSubRequests().size === 0) { throw new RangeError("Batch request should contain one or more sub requests."); } - return tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => { const batchRequestBody = batchRequest.getHttpRequestBody(); - const rawBatchResponse = assertResponse(await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign({}, updatedOptions))); - const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + const rawBatchResponse = (0, utils_common_js_1.assertResponse)(await this.serviceOrContainerContext.submitBatch((0, BatchUtils_js_1.utf8ByteLength)(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, { + ...updatedOptions + })); + const batchResponseParser = new BatchResponseParser_js_1.BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); const responseSummary = await batchResponseParser.parseBatchResponse(); const res = { _response: rawBatchResponse._response, @@ -55691,7 +59295,35 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; }); } }; - var ContainerClient = class extends StorageClient { + exports2.BlobBatchClient = BlobBatchClient; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/ContainerClient.js +var require_ContainerClient = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/ContainerClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ContainerClient = void 0; + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); + var core_auth_1 = require_commonjs7(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var Pipeline_js_1 = require_Pipeline(); + var StorageClient_js_1 = require_StorageClient(); + var tracing_js_1 = require_tracing(); + var utils_common_js_1 = require_utils_common(); + var BlobSASSignatureValues_js_1 = require_BlobSASSignatureValues(); + var BlobLeaseClient_js_1 = require_BlobLeaseClient(); + var Clients_js_1 = require_Clients(); + var BlobBatchClient_js_1 = require_BlobBatchClient(); + var ContainerClient = class extends StorageClient_js_1.StorageClient { + /** + * containerContext provided by protocol layer. + */ + containerContext; + _containerName; /** * The name of the container. */ @@ -55700,48 +59332,48 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, options) { let pipeline; - let url2; + let url; options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; - } else if (coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; - pipeline = newPipeline(new AnonymousCredential(), options); + url = urlOrConnectionString; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string") { const containerName = credentialOrPipelineOrContainerName; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreUtil.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = (0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)); if (!options.proxyOptions) { - options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); } - pipeline = newPipeline(sharedKeyCredential, options); + pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); + url = (0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)) + "?" + extractedCreds.accountSas; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } else { throw new Error("Expecting non-empty strings for containerName parameter"); } - super(url2, pipeline); + super(url, pipeline); this._containerName = this.getContainerNameFromUrl(); this.containerContext = this.storageClientContext.container; } /** * Creates a new container under the specified account. If the container with * the same name already exists, the operation fails. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/create-container + * @see https://learn.microsoft.com/rest/api/storageservices/create-container * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * * @param options - Options to Container Create operation. @@ -55749,34 +59381,52 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage: * - * ```js - * const containerClient = blobServiceClient.getContainerClient(""); + * ```ts snippet:ContainerClientCreate + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); * const createContainerResponse = await containerClient.create(); * console.log("Container was created successfully", createContainerResponse.requestId); * ``` */ async create(options = {}) { - return tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => { - return assertResponse(await this.containerContext.create(updatedOptions)); + return tracing_js_1.tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.create(updatedOptions)); }); } /** * Creates a new container under the specified account. If the container with * the same name already exists, it is not changed. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/create-container + * @see https://learn.microsoft.com/rest/api/storageservices/create-container * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * * @param options - */ async createIfNotExists(options = {}) { - return tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => { - var _a, _b; + return tracing_js_1.tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => { try { const res = await this.create(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + return { + succeeded: true, + ...res, + _response: res._response + // _response is made non-enumerable + }; } catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + if (e.details?.errorCode === "ContainerAlreadyExists") { + return { + succeeded: false, + ...e.response?.parsedHeaders, + _response: e.response + }; } else { throw e; } @@ -55793,7 +59443,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - */ async exists(options = {}) { - return tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => { try { await this.getProperties({ abortSignal: options.abortSignal, @@ -55815,7 +59465,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @returns A new BlobClient object for the given blob name. */ getBlobClient(blobName) { - return new BlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + return new Clients_js_1.BlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); } /** * Creates an {@link AppendBlobClient} @@ -55823,7 +59473,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param blobName - An append blob name */ getAppendBlobClient(blobName) { - return new AppendBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + return new Clients_js_1.AppendBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); } /** * Creates a {@link BlockBlobClient} @@ -55833,15 +59483,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage: * - * ```js - * const content = "Hello world!"; + * ```ts snippet:ClientsUpload + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blockBlobClient = containerClient.getBlockBlobClient(blobName); * - * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const content = "Hello world!"; * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); * ``` */ getBlockBlobClient(blobName) { - return new BlockBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + return new Clients_js_1.BlockBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); } /** * Creates a {@link PageBlobClient} @@ -55849,12 +59511,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param blobName - A page blob name */ getPageBlobClient(blobName) { - return new PageBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + return new Clients_js_1.PageBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); } /** * Returns all user-defined metadata and system properties for the specified * container. The data returned does not include the container's list of blobs. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-container-properties + * @see https://learn.microsoft.com/rest/api/storageservices/get-container-properties * * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if * they originally contained uppercase characters. This differs from the metadata keys returned by @@ -55867,14 +59529,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!options.conditions) { options.conditions = {}; } - return tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => { - return assertResponse(await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), { tracingOptions: updatedOptions.tracingOptions }))); + return tracing_js_1.tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.getProperties({ + abortSignal: options.abortSignal, + ...options.conditions, + tracingOptions: updatedOptions.tracingOptions + })); }); } /** * Marks the specified container for deletion. The container and any blobs * contained within it are later deleted during garbage collection. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/delete-container + * @see https://learn.microsoft.com/rest/api/storageservices/delete-container * * @param options - Options to Container Delete operation. */ @@ -55882,8 +59548,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!options.conditions) { options.conditions = {}; } - return tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => { - return assertResponse(await this.containerContext.delete({ + return tracing_js_1.tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.delete({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions, @@ -55894,19 +59560,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; /** * Marks the specified container for deletion if it exists. The container and any blobs * contained within it are later deleted during garbage collection. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/delete-container + * @see https://learn.microsoft.com/rest/api/storageservices/delete-container * * @param options - Options to Container Delete operation. */ async deleteIfExists(options = {}) { - return tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => { - var _a, _b; + return tracing_js_1.tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => { try { const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + return { + succeeded: true, + ...res, + _response: res._response + }; } catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + if (e.details?.errorCode === "ContainerNotFound") { + return { + succeeded: false, + ...e.response?.parsedHeaders, + _response: e.response + }; } throw e; } @@ -55918,24 +59591,24 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * If no option provided, or no metadata defined in the parameter, the container * metadata will be removed. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * @see https://learn.microsoft.com/rest/api/storageservices/set-container-metadata * * @param metadata - Replace existing metadata with this value. * If no value provided the existing metadata will be removed. * @param options - Options to Container Set Metadata operation. */ - async setMetadata(metadata2, options = {}) { + async setMetadata(metadata, options = {}) { if (!options.conditions) { options.conditions = {}; } if (options.conditions.ifUnmodifiedSince) { throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); } - return tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => { - return assertResponse(await this.containerContext.setMetadata({ + return tracing_js_1.tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.setMetadata({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - metadata: metadata2, + metadata, modifiedAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions })); @@ -55948,7 +59621,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * @see https://learn.microsoft.com/rest/api/storageservices/get-container-acl * * @param options - Options to Container Get Access Policy operation. */ @@ -55956,8 +59629,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (!options.conditions) { options.conditions = {}; } - return tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => { - const response = assertResponse(await this.containerContext.getAccessPolicy({ + return tracing_js_1.tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.getAccessPolicy({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions @@ -56006,29 +59679,29 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. * During this interval, a shared access signature that is associated with the stored access policy will * fail with status code 403 (Forbidden), until the access policy becomes active. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * @see https://learn.microsoft.com/rest/api/storageservices/set-container-acl * * @param access - The level of public access to data in the container. * @param containerAcl - Array of elements each having a unique Id and details of the access policy. * @param options - Options to Container Set Access Policy operation. */ - async setAccessPolicy(access2, containerAcl2, options = {}) { + async setAccessPolicy(access, containerAcl, options = {}) { options.conditions = options.conditions || {}; - return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => { const acl = []; - for (const identifier of containerAcl2 || []) { + for (const identifier of containerAcl || []) { acl.push({ accessPolicy: { - expiresOn: identifier.accessPolicy.expiresOn ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) : "", + expiresOn: identifier.accessPolicy.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(identifier.accessPolicy.expiresOn) : "", permissions: identifier.accessPolicy.permissions, - startsOn: identifier.accessPolicy.startsOn ? truncatedISO8061Date(identifier.accessPolicy.startsOn) : "" + startsOn: identifier.accessPolicy.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(identifier.accessPolicy.startsOn) : "" }, id: identifier.id }); } - return assertResponse(await this.containerContext.setAccessPolicy({ + return (0, utils_common_js_1.assertResponse)(await this.containerContext.setAccessPolicy({ abortSignal: options.abortSignal, - access: access2, + access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions, @@ -56043,7 +59716,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @returns A new BlobLeaseClient object for managing leases on the container. */ getBlobLeaseClient(proposeLeaseId) { - return new BlobLeaseClient(this, proposeLeaseId); + return new BlobLeaseClient_js_1.BlobLeaseClient(this, proposeLeaseId); } /** * Creates a new block blob, or updates the content of an existing block blob. @@ -56067,10 +59740,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to configure the Block Blob Upload operation. * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. */ - async uploadBlockBlob(blobName, body2, contentLength2, options = {}) { - return tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => { + async uploadBlockBlob(blobName, body, contentLength, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => { const blockBlobClient = this.getBlockBlobClient(blobName); - const response = await blockBlobClient.upload(body2, contentLength2, updatedOptions); + const response = await blockBlobClient.upload(body, contentLength, updatedOptions); return { blockBlobClient, response @@ -56082,14 +59755,14 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * during garbage collection. Note that in order to delete a blob, you must delete * all of its snapshots. You can delete both at the same time with the Delete * Blob operation. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/delete-blob + * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob * * @param blobName - * @param options - Options to Blob Delete operation. * @returns Block blob deletion response data. */ async deleteBlob(blobName, options = {}) { - return tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => { let blobClient = this.getBlobClient(blobName); if (options.versionId) { blobClient = blobClient.withVersion(options.versionId); @@ -56107,13 +59780,33 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. * @param options - Options to Container List Blob Flat Segment operation. */ - async listBlobFlatSegment(marker2, options = {}) { - return tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => { - const response = assertResponse(await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker: marker2 }, options), { tracingOptions: updatedOptions.tracingOptions }))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { - const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); - return blobItem; - }) }) }); + async listBlobFlatSegment(marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.listBlobFlatSegment({ + marker, + ...options, + tracingOptions: updatedOptions.tracingOptions + })); + const wrappedResponse = { + ...response, + _response: { + ...response._response, + parsedBody: (0, utils_common_js_1.ConvertInternalResponseOfListBlobFlat)(response._response.parsedBody) + }, + // _response is made non-enumerable + segment: { + ...response.segment, + blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = { + ...blobItemInternal, + name: (0, utils_common_js_1.BlobNameToString)(blobItemInternal.name), + tags: (0, utils_common_js_1.toTags)(blobItemInternal.blobTags), + objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(blobItemInternal.objectReplicationMetadata) + }; + return blobItem; + }) + } + }; return wrappedResponse; }); } @@ -56128,17 +59821,40 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. * @param options - Options to Container List Blob Hierarchy Segment operation. */ - async listBlobHierarchySegment(delimiter2, marker2, options = {}) { - return tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => { - var _a; - const response = assertResponse(await this.containerContext.listBlobHierarchySegment(delimiter2, Object.assign(Object.assign({ marker: marker2 }, options), { tracingOptions: updatedOptions.tracingOptions }))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { - const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); - return blobItem; - }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { - const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); - return blobPrefix; - }) }) }); + async listBlobHierarchySegment(delimiter, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.listBlobHierarchySegment(delimiter, { + marker, + ...options, + tracingOptions: updatedOptions.tracingOptions + })); + const wrappedResponse = { + ...response, + _response: { + ...response._response, + parsedBody: (0, utils_common_js_1.ConvertInternalResponseOfListBlobHierarchy)(response._response.parsedBody) + }, + // _response is made non-enumerable + segment: { + ...response.segment, + blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = { + ...blobItemInternal, + name: (0, utils_common_js_1.BlobNameToString)(blobItemInternal.name), + tags: (0, utils_common_js_1.toTags)(blobItemInternal.blobTags), + objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(blobItemInternal.objectReplicationMetadata) + }; + return blobItem; + }), + blobPrefixes: response.segment.blobPrefixes?.map((blobPrefixInternal) => { + const blobPrefix = { + ...blobPrefixInternal, + name: (0, utils_common_js_1.BlobNameToString)(blobPrefixInternal.name) + }; + return blobPrefix; + }) + } + }; return wrappedResponse; }); } @@ -56154,44 +59870,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ - listSegments(marker_1) { - return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker2, options = {}) { - let listBlobsFlatSegmentResponse; - if (!!marker2 || marker2 === void 0) { - do { - listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker2, options)); - marker2 = listBlobsFlatSegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); - } while (marker2); - } - }); + async *listSegments(marker, options = {}) { + let listBlobsFlatSegmentResponse; + if (!!marker || marker === void 0) { + do { + listBlobsFlatSegmentResponse = await this.listBlobFlatSegment(marker, options); + marker = listBlobsFlatSegmentResponse.continuationToken; + yield await listBlobsFlatSegmentResponse; + } while (marker); + } } /** * Returns an AsyncIterableIterator of {@link BlobItem} objects * * @param options - Options to list blobs operation. */ - listItems() { - return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { - var _a, e_1, _b, _c; - let marker2; - try { - for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker2, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { - _c = _f.value; - _d = false; - const listBlobsFlatSegmentResponse = _c; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); - } - } catch (e_1_1) { - e_1 = { error: e_1_1 }; - } finally { - try { - if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); - } finally { - if (e_1) throw e_1.error; - } - } - }); + async *listItems(options = {}) { + let marker; + for await (const listBlobsFlatSegmentResponse of this.listSegments(marker, options)) { + yield* listBlobsFlatSegmentResponse.segment.blobItems; + } } /** * Returns an async iterable iterator to list all the blobs @@ -56199,64 +59897,63 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * .byPage() returns an async iterable iterator to list the blobs in pages. * - * Example using `for await` syntax: + * ```ts snippet:ReadmeSampleListBlobs_Multiple + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; * - * ```js - * // Get the containerClient before you run these snippets, - * // Can be obtained from `blobServiceClient.getContainerClient("");` - * let i = 1; - * for await (const blob of containerClient.listBlobsFlat()) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * ``` + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); * - * Example using `iter.next()`: + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); * - * ```js + * // Example using `for await` syntax * let i = 1; - * let iter = containerClient.listBlobsFlat(); - * let blobItem = await iter.next(); - * while (!blobItem.done) { - * console.log(`Blob ${i++}: ${blobItem.value.name}`); - * blobItem = await iter.next(); + * const blobs = containerClient.listBlobsFlat(); + * for await (const blob of blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); * } - * ``` * - * Example using `byPage()`: + * // Example using `iter.next()` syntax + * i = 1; + * const iter = containerClient.listBlobsFlat(); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Blob ${i++}: ${value.name}`); + * ({ value, done } = await iter.next()); + * } * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { - * for (const blob of response.segment.blobItems) { + * // Example using `byPage()` syntax + * i = 1; + * for await (const page of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of page.segment.blobItems) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } - * ``` * - * Example using paging with a marker: - * - * ```js - * let i = 1; + * // Example using paging with a marker + * i = 1; * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; - * * // Prints 2 blob names - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); + * if (response.segment.blobItems) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * } - * * // Gets next marker * let marker = response.continuationToken; - * * // Passing next marker as continuationToken - * * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; - * * // Prints 10 blob names - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); + * if (response.segment.blobItems) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * } * ``` * @@ -56264,41 +59961,44 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @returns An asyncIterableIterator that supports paging. */ listBlobsFlat(options = {}) { - const include2 = []; + const include = []; if (options.includeCopy) { - include2.push("copy"); + include.push("copy"); } if (options.includeDeleted) { - include2.push("deleted"); + include.push("deleted"); } if (options.includeMetadata) { - include2.push("metadata"); + include.push("metadata"); } if (options.includeSnapshots) { - include2.push("snapshots"); + include.push("snapshots"); } if (options.includeVersions) { - include2.push("versions"); + include.push("versions"); } if (options.includeUncommitedBlobs) { - include2.push("uncommittedblobs"); + include.push("uncommittedblobs"); } if (options.includeTags) { - include2.push("tags"); + include.push("tags"); } if (options.includeDeletedWithVersions) { - include2.push("deletedwithversions"); + include.push("deletedwithversions"); } if (options.includeImmutabilityPolicy) { - include2.push("immutabilitypolicy"); + include.push("immutabilitypolicy"); } if (options.includeLegalHold) { - include2.push("legalhold"); + include.push("legalhold"); } if (options.prefix === "") { options.prefix = void 0; } - const updatedOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); + const updatedOptions = { + ...options, + ...include.length > 0 ? { include } : {} + }; const iter = this.listItems(updatedOptions); return { /** @@ -56317,7 +60017,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + return this.listSegments(settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...updatedOptions + }); } }; } @@ -56334,17 +60037,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ - listHierarchySegments(delimiter_1, marker_1) { - return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1(delimiter2, marker2, options = {}) { - let listBlobsHierarchySegmentResponse; - if (!!marker2 || marker2 === void 0) { - do { - listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter2, marker2, options)); - marker2 = listBlobsHierarchySegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); - } while (marker2); - } - }); + async *listHierarchySegments(delimiter, marker, options = {}) { + let listBlobsHierarchySegmentResponse; + if (!!marker || marker === void 0) { + do { + listBlobsHierarchySegmentResponse = await this.listBlobHierarchySegment(delimiter, marker, options); + marker = listBlobsHierarchySegmentResponse.continuationToken; + yield await listBlobsHierarchySegmentResponse; + } while (marker); + } } /** * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. @@ -56352,35 +60053,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param delimiter - The character or string used to define the virtual hierarchy * @param options - Options to list blobs operation. */ - listItemsByHierarchy(delimiter_1) { - return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1(delimiter2, options = {}) { - var _a, e_2, _b, _c; - let marker2; - try { - for (var _d = true, _e = tslib.__asyncValues(this.listHierarchySegments(delimiter2, marker2, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { - _c = _f.value; - _d = false; - const listBlobsHierarchySegmentResponse = _c; - const segment = listBlobsHierarchySegmentResponse.segment; - if (segment.blobPrefixes) { - for (const prefix2 of segment.blobPrefixes) { - yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix2)); - } - } - for (const blob of segment.blobItems) { - yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); - } - } - } catch (e_2_1) { - e_2 = { error: e_2_1 }; - } finally { - try { - if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); - } finally { - if (e_2) throw e_2.error; + async *listItemsByHierarchy(delimiter, options = {}) { + let marker; + for await (const listBlobsHierarchySegmentResponse of this.listHierarchySegments(delimiter, marker, options)) { + const segment = listBlobsHierarchySegmentResponse.segment; + if (segment.blobPrefixes) { + for (const prefix of segment.blobPrefixes) { + yield { + kind: "prefix", + ...prefix + }; } } - }); + for (const blob of segment.blobItems) { + yield { kind: "blob", ...blob }; + } + } } /** * Returns an async iterable iterator to list all the blobs by hierarchy. @@ -56388,118 +60076,138 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. * - * Example using `for await` syntax: + * ```ts snippet:ReadmeSampleListBlobsByHierarchy + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; * - * ```js - * for await (const item of containerClient.listBlobsByHierarchy("/")) { - * if (item.kind === "prefix") { - * console.log(`\tBlobPrefix: ${item.name}`); + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * + * // Example using `for await` syntax + * let i = 1; + * const blobs = containerClient.listBlobsByHierarchy("/"); + * for await (const blob of blobs) { + * if (blob.kind === "prefix") { + * console.log(`\tBlobPrefix: ${blob.name}`); * } else { - * console.log(`\tBlobItem: name - ${item.name}`); + * console.log(`\tBlobItem: name - ${blob.name}`); * } * } - * ``` - * - * Example using `iter.next()`: * - * ```js - * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); - * let entity = await iter.next(); - * while (!entity.done) { - * let item = entity.value; - * if (item.kind === "prefix") { - * console.log(`\tBlobPrefix: ${item.name}`); + * // Example using `iter.next()` syntax + * i = 1; + * const iter = containerClient.listBlobsByHierarchy("/"); + * let { value, done } = await iter.next(); + * while (!done) { + * if (value.kind === "prefix") { + * console.log(`\tBlobPrefix: ${value.name}`); * } else { - * console.log(`\tBlobItem: name - ${item.name}`); + * console.log(`\tBlobItem: name - ${value.name}`); * } - * entity = await iter.next(); + * ({ value, done } = await iter.next()); * } - * ``` - * - * Example using `byPage()`: * - * ```js - * console.log("Listing blobs by hierarchy by page"); - * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { - * const segment = response.segment; + * // Example using `byPage()` syntax + * i = 1; + * for await (const page of containerClient.listBlobsByHierarchy("/").byPage({ maxPageSize: 20 })) { + * const segment = page.segment; * if (segment.blobPrefixes) { * for (const prefix of segment.blobPrefixes) { * console.log(`\tBlobPrefix: ${prefix.name}`); * } * } - * for (const blob of response.segment.blobItems) { + * for (const blob of page.segment.blobItems) { * console.log(`\tBlobItem: name - ${blob.name}`); * } * } - * ``` - * - * Example using paging with a max page size: - * - * ```js - * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); - * - * let i = 1; - * for await (const response of containerClient - * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) - * .byPage({ maxPageSize: 2 })) { - * console.log(`Page ${i++}`); - * const segment = response.segment; * - * if (segment.blobPrefixes) { - * for (const prefix of segment.blobPrefixes) { - * console.log(`\tBlobPrefix: ${prefix.name}`); - * } + * // Example using paging with a marker + * i = 1; + * let iterator = containerClient.listBlobsByHierarchy("/").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * // Prints 2 blob names + * if (response.blobPrefixes) { + * for (const prefix of response.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); * } - * + * } + * if (response.segment.blobItems) { * for (const blob of response.segment.blobItems) { * console.log(`\tBlobItem: name - ${blob.name}`); * } * } + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .listBlobsByHierarchy("/") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * // Prints 10 blob names + * if (response.blobPrefixes) { + * for (const prefix of response.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * if (response.segment.blobItems) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } * ``` * * @param delimiter - The character or string used to define the virtual hierarchy * @param options - Options to list blobs operation. */ - listBlobsByHierarchy(delimiter2, options = {}) { - if (delimiter2 === "") { + listBlobsByHierarchy(delimiter, options = {}) { + if (delimiter === "") { throw new RangeError("delimiter should contain one or more characters"); } - const include2 = []; + const include = []; if (options.includeCopy) { - include2.push("copy"); + include.push("copy"); } if (options.includeDeleted) { - include2.push("deleted"); + include.push("deleted"); } if (options.includeMetadata) { - include2.push("metadata"); + include.push("metadata"); } if (options.includeSnapshots) { - include2.push("snapshots"); + include.push("snapshots"); } if (options.includeVersions) { - include2.push("versions"); + include.push("versions"); } if (options.includeUncommitedBlobs) { - include2.push("uncommittedblobs"); + include.push("uncommittedblobs"); } if (options.includeTags) { - include2.push("tags"); + include.push("tags"); } if (options.includeDeletedWithVersions) { - include2.push("deletedwithversions"); + include.push("deletedwithversions"); } if (options.includeImmutabilityPolicy) { - include2.push("immutabilitypolicy"); + include.push("immutabilitypolicy"); } if (options.includeLegalHold) { - include2.push("legalhold"); + include.push("legalhold"); } if (options.prefix === "") { options.prefix = void 0; } - const updatedOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); - const iter = this.listItemsByHierarchy(delimiter2, updatedOptions); + const updatedOptions = { + ...options, + ...include.length > 0 ? { include } : {} + }; + const iter = this.listItemsByHierarchy(delimiter, updatedOptions); return { /** * The next method, part of the iteration protocol @@ -56517,7 +60225,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.listHierarchySegments(delimiter2, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + return this.listHierarchySegments(delimiter, settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...updatedOptions + }); } }; } @@ -56538,23 +60249,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - async findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options = {}) { - return tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => { - const response = assertResponse(await this.containerContext.filterBlobs({ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.filterBlobs({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, - marker: marker2, + marker, maxPageSize: options.maxPageSize, tracingOptions: updatedOptions.tracingOptions })); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { - var _a; - let tagValue = ""; - if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { - tagValue = blob.tags.blobTagSet[0].value; - } - return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); - }) }); + const wrappedResponse = { + ...response, + _response: response._response, + // _response is made non-enumerable + blobs: response.blobs.map((blob) => { + let tagValue = ""; + if (blob.tags?.blobTagSet.length === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return { ...blob, tags: (0, utils_common_js_1.toTags)(blob.tags), tagValue }; + }) + }; return wrappedResponse; }); } @@ -56574,18 +60289,16 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker2, options = {}) { - let response; - if (!!marker2 || marker2 === void 0) { - do { - response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options)); - response.blobs = response.blobs || []; - marker2 = response.continuationToken; - yield yield tslib.__await(response); - } while (marker2); - } - }); + async *findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + let response; + if (!!marker || marker === void 0) { + do { + response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield response; + } while (marker); + } } /** * Returns an AsyncIterableIterator for blobs. @@ -56596,27 +60309,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to findBlobsByTagsItems. */ - findBlobsByTagsItems(tagFilterSqlExpression_1) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { - var _a, e_3, _b, _c; - let marker2; - try { - for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { - _c = _f.value; - _d = false; - const segment = _c; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); - } - } catch (e_3_1) { - e_3 = { error: e_3_1 }; - } finally { - try { - if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); - } finally { - if (e_3) throw e_3.error; - } - } - }); + async *findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + let marker; + for await (const segment of this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)) { + yield* segment.blobs; + } } /** * Returns an async iterable iterator to find all blobs with specified tag @@ -56626,53 +60323,54 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example using `for await` syntax: * - * ```js + * ```ts snippet:ReadmeSampleFindBlobsByTags + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * + * // Example using `for await` syntax * let i = 1; * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { * console.log(`Blob ${i++}: ${blob.name}`); * } - * ``` - * - * Example using `iter.next()`: * - * ```js - * let i = 1; + * // Example using `iter.next()` syntax + * i = 1; * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); - * let blobItem = await iter.next(); - * while (!blobItem.done) { - * console.log(`Blob ${i++}: ${blobItem.value.name}`); - * blobItem = await iter.next(); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Blob ${i++}: ${value.name}`); + * ({ value, done } = await iter.next()); * } - * ``` - * - * Example using `byPage()`: * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } + * // Example using `byPage()` syntax + * i = 1; + * for await (const page of containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ maxPageSize: 20 })) { + * for (const blob of page.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); * } * } - * ``` - * - * Example using paging with a marker: * - * ```js - * let i = 1; + * // Example using paging with a marker + * i = 1; * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; - * * // Prints 2 blob names * if (response.blobs) { * for (const blob of response.blobs) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } - * * // Gets next marker * let marker = response.continuationToken; * // Passing next marker as continuationToken @@ -56680,11 +60378,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * .findBlobsByTags("tagkey='tagvalue'") * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; - * - * // Prints blob names + * // Prints 10 blob names * if (response.blobs) { * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); + * console.log(`Blob ${i++}: ${blob.name}`); * } * } * ``` @@ -56696,7 +60393,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to find blobs by tags. */ findBlobsByTags(tagFilterSqlExpression, options = {}) { - const listSegmentOptions = Object.assign({}, options); + const listSegmentOptions = { + ...options + }; const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); return { /** @@ -56715,7 +60414,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...listSegmentOptions + }); } }; } @@ -56724,14 +60426,14 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * for the specified account. * The Get Account Information operation is available on service versions beginning * with version 2018-03-28. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-account-information + * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information * * @param options - Options to the Service Get Account Info operation. * @returns Response data for the Service Get Account Info operation. */ async getAccountInfo(options = {}) { - return tracingClient.withSpan("ContainerClient-getAccountInfo", options, async (updatedOptions) => { - return assertResponse(await this.containerContext.getAccountInfo({ + return tracing_js_1.tracingClient.withSpan("ContainerClient-getAccountInfo", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.getAccountInfo({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); @@ -56743,7 +60445,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; const parsedUrl = new URL(this.url); if (parsedUrl.hostname.split(".")[1] === "blob") { containerName = parsedUrl.pathname.split("/")[1]; - } else if (isIpEndpointStyle(parsedUrl)) { + } else if ((0, utils_common_js_1.isIpEndpointStyle)(parsedUrl)) { containerName = parsedUrl.pathname.split("/")[2]; } else { containerName = parsedUrl.pathname.split("/")[1]; @@ -56763,18 +60465,21 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties * and parameters passed in. The SAS is signed by the shared key credential of the client. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateSasUrl(options) { return new Promise((resolve) => { - if (!(this.credential instanceof StorageSharedKeyCredential)) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); - resolve(appendToURLQuery(this.url, sas)); + const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ + containerName: this._containerName, + ...options + }, this.credential).toString(); + resolve((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** @@ -56783,23 +60488,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI * based on the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ generateSasStringToSign(options) { - if (!(this.credential instanceof StorageSharedKeyCredential)) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } - return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName }, options), this.credential).stringToSign; + return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ + containerName: this._containerName, + ...options + }, this.credential).stringToSign; } /** * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties * and parameters passed in. The SAS is signed by the input user delegation key. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` @@ -56807,50 +60515,51 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ generateUserDelegationSasUrl(options, userDelegationKey) { return new Promise((resolve) => { - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), userDelegationKey, this.accountName).toString(); - resolve(appendToURLQuery(this.url, sas)); + const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ + containerName: this._containerName, + ...options + }, userDelegationKey, this.accountName).toString(); + resolve((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI * based on the client properties and parameters passed in. The SAS is signed by the input user delegation key. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateUserDelegationSasStringToSign(options, userDelegationKey) { - return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName }, options), userDelegationKey, this.accountName).stringToSign; + return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ + containerName: this._containerName, + ...options + }, userDelegationKey, this.accountName).stringToSign; } /** * Creates a BlobBatchClient object to conduct batch operations. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch + * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch * * @returns A new BlobBatchClient object for this container. */ getBlobBatchClient() { - return new BlobBatchClient(this.url, this.pipeline); + return new BlobBatchClient_js_1.BlobBatchClient(this.url, this.pipeline); } }; + exports2.ContainerClient = ContainerClient; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASPermissions.js +var require_AccountSASPermissions = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASPermissions.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AccountSASPermissions = void 0; var AccountSASPermissions = class _AccountSASPermissions { - constructor() { - this.read = false; - this.write = false; - this.delete = false; - this.deleteVersion = false; - this.list = false; - this.add = false; - this.create = false; - this.update = false; - this.process = false; - this.tag = false; - this.filter = false; - this.setImmutabilityPolicy = false; - this.permanentDelete = false; - } /** * Parse initializes the AccountSASPermissions fields from a string. * @@ -56954,6 +60663,58 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } return accountSASPermissions; } + /** + * Permission to read resources and list queues and tables granted. + */ + read = false; + /** + * Permission to write resources granted. + */ + write = false; + /** + * Permission to delete blobs and files granted. + */ + delete = false; + /** + * Permission to delete versions granted. + */ + deleteVersion = false; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + list = false; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + add = false; + /** + * Permission to create blobs and files granted. + */ + create = false; + /** + * Permissions to update messages and table entities granted. + */ + update = false; + /** + * Permission to get and delete messages granted. + */ + process = false; + /** + * Specfies Tag access granted. + */ + tag = false; + /** + * Permission to filter blobs. + */ + filter = false; + /** + * Permission to set immutability policy. + */ + setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete = false; /** * Produces the SAS permissions string for an Azure Storage account. * Call this method to set AccountSASSignatureValues Permissions field. @@ -56961,7 +60722,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Using this method will guarantee the resource types are in * an order accepted by the service. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-an-account-sas * */ toString() { @@ -57008,12 +60769,17 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return permissions.join(""); } }; + exports2.AccountSASPermissions = AccountSASPermissions; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASResourceTypes.js +var require_AccountSASResourceTypes = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASResourceTypes.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AccountSASResourceTypes = void 0; var AccountSASResourceTypes = class _AccountSASResourceTypes { - constructor() { - this.service = false; - this.container = false; - this.object = false; - } /** * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an * Error if it encounters a character that does not correspond to a valid resource type. @@ -57039,10 +60805,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } return accountSASResourceTypes; } + /** + * Permission to access service level APIs granted. + */ + service = false; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + container = false; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + object = false; /** * Converts the given resource types to a string. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-an-account-sas * */ toString() { @@ -57059,13 +60837,17 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return resourceTypes.join(""); } }; + exports2.AccountSASResourceTypes = AccountSASResourceTypes; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASServices.js +var require_AccountSASServices = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASServices.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AccountSASServices = void 0; var AccountSASServices = class _AccountSASServices { - constructor() { - this.blob = false; - this.file = false; - this.queue = false; - this.table = false; - } /** * Creates an {@link AccountSASServices} from the specified services string. This method will throw an * Error if it encounters a character that does not correspond to a valid service. @@ -57094,6 +60876,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } return accountSASServices; } + /** + * Permission to access blob resources granted. + */ + blob = false; + /** + * Permission to access file resources granted. + */ + file = false; + /** + * Permission to access queue resources granted. + */ + queue = false; + /** + * Permission to access table resources granted. + */ + table = false; /** * Converts the given services to a string. * @@ -57115,44 +60913,62 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return services.join(""); } }; + exports2.AccountSASServices = AccountSASServices; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASSignatureValues.js +var require_AccountSASSignatureValues = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASSignatureValues.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.generateAccountSASQueryParameters = generateAccountSASQueryParameters; + exports2.generateAccountSASQueryParametersInternal = generateAccountSASQueryParametersInternal; + var AccountSASPermissions_js_1 = require_AccountSASPermissions(); + var AccountSASResourceTypes_js_1 = require_AccountSASResourceTypes(); + var AccountSASServices_js_1 = require_AccountSASServices(); + var SasIPRange_js_1 = require_SasIPRange(); + var SASQueryParameters_js_1 = require_SASQueryParameters(); + var constants_js_1 = require_constants9(); + var utils_common_js_1 = require_utils_common(); function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { return generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential).sasQueryParameters; } function generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential) { - const version2 = accountSASSignatureValues.version ? accountSASSignatureValues.version : SERVICE_VERSION; - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.setImmutabilityPolicy && version2 < "2020-08-04") { + const version = accountSASSignatureValues.version ? accountSASSignatureValues.version : constants_js_1.SERVICE_VERSION; + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.setImmutabilityPolicy && version < "2020-08-04") { throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.deleteVersion && version2 < "2019-10-10") { + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.deleteVersion && version < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.permanentDelete && version2 < "2019-10-10") { + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.permanentDelete && version < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version2 < "2019-12-12") { + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version < "2019-12-12") { throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.filter && version2 < "2019-12-12") { + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.filter && version < "2019-12-12") { throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); } - if (accountSASSignatureValues.encryptionScope && version2 < "2020-12-06") { + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); } - const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); - const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); - const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + const parsedPermissions = AccountSASPermissions_js_1.AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + const parsedServices = AccountSASServices_js_1.AccountSASServices.parse(accountSASSignatureValues.services).toString(); + const parsedResourceTypes = AccountSASResourceTypes_js_1.AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); let stringToSign; - if (version2 >= "2020-12-06") { + if (version >= "2020-12-06") { stringToSign = [ sharedKeyCredential.accountName, parsedPermissions, parsedServices, parsedResourceTypes, - accountSASSignatureValues.startsOn ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) : "", - truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.startsOn, false) : "", + (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(accountSASSignatureValues.ipRange) : "", accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version2, + version, accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", "" // Account SAS requires an additional newline character @@ -57163,22 +60979,50 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; parsedPermissions, parsedServices, parsedResourceTypes, - accountSASSignatureValues.startsOn ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) : "", - truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.startsOn, false) : "", + (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(accountSASSignatureValues.ipRange) : "", accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version2, + version, "" // Account SAS requires an additional newline character ].join("\n"); } const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return { - sasQueryParameters: new SASQueryParameters(version2, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, accountSASSignatureValues.encryptionScope), + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, accountSASSignatureValues.encryptionScope), stringToSign }; } - var BlobServiceClient = class _BlobServiceClient extends StorageClient { + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BlobServiceClient.js +var require_BlobServiceClient = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BlobServiceClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobServiceClient = void 0; + var core_auth_1 = require_commonjs7(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); + var Pipeline_js_1 = require_Pipeline(); + var ContainerClient_js_1 = require_ContainerClient(); + var utils_common_js_1 = require_utils_common(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var utils_common_js_2 = require_utils_common(); + var tracing_js_1 = require_tracing(); + var BlobBatchClient_js_1 = require_BlobBatchClient(); + var StorageClient_js_1 = require_StorageClient(); + var AccountSASPermissions_js_1 = require_AccountSASPermissions(); + var AccountSASSignatureValues_js_1 = require_AccountSASSignatureValues(); + var AccountSASServices_js_1 = require_AccountSASServices(); + var BlobServiceClient = class _BlobServiceClient extends StorageClient_js_1.StorageClient { + /** + * serviceContext provided by protocol layer. + */ + serviceContext; /** * * Creates an instance of BlobServiceClient from connection string. @@ -57193,35 +61037,35 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; */ static fromConnectionString(connectionString, options) { options = options || {}; - const extractedCreds = extractConnectionStringParts(connectionString); + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(connectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreUtil.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); if (!options.proxyOptions) { - options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); } - const pipeline = newPipeline(sharedKeyCredential, options); + const pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); return new _BlobServiceClient(extractedCreds.url, pipeline); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { - const pipeline = newPipeline(new AnonymousCredential(), options); + const pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); return new _BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } - constructor(url2, credentialOrPipeline, options) { + constructor(url, credentialOrPipeline, options) { let pipeline; - if (isPipelineLike(credentialOrPipeline)) { + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipeline)) { pipeline = credentialOrPipeline; - } else if (coreUtil.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential || credentialOrPipeline instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipeline)) { - pipeline = newPipeline(credentialOrPipeline, options); + } else if (core_util_1.isNodeLike && credentialOrPipeline instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipeline instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipeline)) { + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipeline, options); } else { - pipeline = newPipeline(new AnonymousCredential(), options); + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } - super(url2, pipeline); + super(url, pipeline); this.serviceContext = this.storageClientContext.service; } /** @@ -57232,22 +61076,31 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * Example usage: * - * ```js + * ```ts snippet:BlobServiceClientGetContainerClient + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * * const containerClient = blobServiceClient.getContainerClient(""); * ``` */ getContainerClient(containerName) { - return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); + return new ContainerClient_js_1.ContainerClient((0, utils_common_js_1.appendToURLPath)(this.url, encodeURIComponent(containerName)), this.pipeline); } /** - * Create a Blob container. @see https://learn.microsoft.com/en-us/rest/api/storageservices/create-container + * Create a Blob container. @see https://learn.microsoft.com/rest/api/storageservices/create-container * * @param containerName - Name of the container to create. * @param options - Options to configure Container Create operation. * @returns Container creation response and the corresponding container client. */ async createContainer(containerName, options = {}) { - return tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(containerName); const containerCreateResponse = await containerClient.create(updatedOptions); return { @@ -57264,7 +61117,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @returns Container deletion response. */ async deleteContainer(containerName, options = {}) { - return tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(containerName); return containerClient.delete(updatedOptions); }); @@ -57278,47 +61131,29 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to configure Container Restore operation. * @returns Container deletion response. */ - async undeleteContainer(deletedContainerName2, deletedContainerVersion2, options = {}) { - return tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => { - const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName2); + async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => { + const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); const containerContext = containerClient["storageClientContext"].container; - const containerUndeleteResponse = assertResponse(await containerContext.restore({ - deletedContainerName: deletedContainerName2, - deletedContainerVersion: deletedContainerVersion2, + const containerUndeleteResponse = (0, utils_common_js_2.assertResponse)(await containerContext.restore({ + deletedContainerName, + deletedContainerVersion, tracingOptions: updatedOptions.tracingOptions })); return { containerClient, containerUndeleteResponse }; }); } - /** - * Rename an existing Blob Container. - * - * @param sourceContainerName - The name of the source container. - * @param destinationContainerName - The new name of the container. - * @param options - Options to configure Container Rename operation. - */ - /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ - // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. - async renameContainer(sourceContainerName2, destinationContainerName, options = {}) { - return tracingClient.withSpan("BlobServiceClient-renameContainer", options, async (updatedOptions) => { - var _a; - const containerClient = this.getContainerClient(destinationContainerName); - const containerContext = containerClient["storageClientContext"].container; - const containerRenameResponse = assertResponse(await containerContext.rename(sourceContainerName2, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId }))); - return { containerClient, containerRenameResponse }; - }); - } /** * Gets the properties of a storage account’s Blob service, including properties * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-properties * * @param options - Options to the Service Get Properties operation. * @returns Response data for the Service Get Properties operation. */ async getProperties(options = {}) { - return tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => { - return assertResponse(await this.serviceContext.getProperties({ + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getProperties({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); @@ -57327,15 +61162,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; /** * Sets properties for a storage account’s Blob service endpoint, including properties * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-service-properties * * @param properties - * @param options - Options to the Service Set Properties operation. * @returns Response data for the Service Set Properties operation. */ async setProperties(properties, options = {}) { - return tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => { - return assertResponse(await this.serviceContext.setProperties(properties, { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.setProperties(properties, { abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); @@ -57345,14 +61180,14 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Retrieves statistics related to replication for the Blob service. It is only * available on the secondary location endpoint when read-access geo-redundant * replication is enabled for the storage account. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-stats * * @param options - Options to the Service Get Statistics operation. * @returns Response data for the Service Get Statistics operation. */ async getStatistics(options = {}) { - return tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => { - return assertResponse(await this.serviceContext.getStatistics({ + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getStatistics({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); @@ -57363,14 +61198,14 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * for the specified account. * The Get Account Information operation is available on service versions beginning * with version 2018-03-28. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-account-information + * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information * * @param options - Options to the Service Get Account Info operation. * @returns Response data for the Service Get Account Info operation. */ async getAccountInfo(options = {}) { - return tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => { - return assertResponse(await this.serviceContext.getAccountInfo({ + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getAccountInfo({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); @@ -57378,7 +61213,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } /** * Returns a list of the containers under the specified account. - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * @see https://learn.microsoft.com/rest/api/storageservices/list-containers2 * * @param marker - A string value that identifies the portion of * the list of containers to be returned with the next listing operation. The @@ -57390,9 +61225,15 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to the Service List Container Segment operation. * @returns Response data for the Service List Container Segment operation. */ - async listContainersSegment(marker2, options = {}) { - return tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => { - return assertResponse(await this.serviceContext.listContainersSegment(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker: marker2 }, options), { include: typeof options.include === "string" ? [options.include] : options.include, tracingOptions: updatedOptions.tracingOptions }))); + async listContainersSegment(marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.listContainersSegment({ + abortSignal: options.abortSignal, + marker, + ...options, + include: typeof options.include === "string" ? [options.include] : options.include, + tracingOptions: updatedOptions.tracingOptions + })); }); } /** @@ -57413,23 +61254,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - async findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options = {}) { - return tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => { - const response = assertResponse(await this.serviceContext.filterBlobs({ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = (0, utils_common_js_2.assertResponse)(await this.serviceContext.filterBlobs({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, - marker: marker2, + marker, maxPageSize: options.maxPageSize, tracingOptions: updatedOptions.tracingOptions })); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { - var _a; - let tagValue = ""; - if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { - tagValue = blob.tags.blobTagSet[0].value; - } - return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); - }) }); + const wrappedResponse = { + ...response, + _response: response._response, + // _response is made non-enumerable + blobs: response.blobs.map((blob) => { + let tagValue = ""; + if (blob.tags?.blobTagSet.length === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return { ...blob, tags: (0, utils_common_js_1.toTags)(blob.tags), tagValue }; + }) + }; return wrappedResponse; }); } @@ -57449,18 +61294,16 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker2, options = {}) { - let response; - if (!!marker2 || marker2 === void 0) { - do { - response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options)); - response.blobs = response.blobs || []; - marker2 = response.continuationToken; - yield yield tslib.__await(response); - } while (marker2); - } - }); + async *findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + let response; + if (!!marker || marker === void 0) { + do { + response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield response; + } while (marker); + } } /** * Returns an AsyncIterableIterator for blobs. @@ -57471,27 +61314,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to findBlobsByTagsItems. */ - findBlobsByTagsItems(tagFilterSqlExpression_1) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { - var _a, e_1, _b, _c; - let marker2; - try { - for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { - _c = _f.value; - _d = false; - const segment = _c; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); - } - } catch (e_1_1) { - e_1 = { error: e_1_1 }; - } finally { - try { - if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); - } finally { - if (e_1) throw e_1.error; - } - } - }); + async *findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + let marker; + for await (const segment of this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)) { + yield* segment.blobs; + } } /** * Returns an async iterable iterator to find all blobs with specified tag @@ -57499,57 +61326,53 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * .byPage() returns an async iterable iterator to list the blobs in pages. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-properties * - * Example using `for await` syntax: + * ```ts snippet:BlobServiceClientFindBlobsByTags + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); * - * ```js + * // Use for await to iterate the blobs * let i = 1; * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { - * console.log(`Blob ${i++}: ${container.name}`); + * console.log(`Blob ${i++}: ${blob.name}`); * } - * ``` - * - * Example using `iter.next()`: * - * ```js - * let i = 1; + * // Use iter.next() to iterate the blobs + * i = 1; * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); - * let blobItem = await iter.next(); - * while (!blobItem.done) { - * console.log(`Blob ${i++}: ${blobItem.value.name}`); - * blobItem = await iter.next(); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Blob ${i++}: ${value.name}`); + * ({ value, done } = await iter.next()); * } - * ``` * - * Example using `byPage()`: - * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } + * // Use byPage() to iterate the blobs + * i = 1; + * for await (const page of blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ maxPageSize: 20 })) { + * for (const blob of page.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); * } * } - * ``` - * - * Example using paging with a marker: * - * ```js - * let i = 1; + * // Use paging with a marker + * i = 1; * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; - * * // Prints 2 blob names * if (response.blobs) { * for (const blob of response.blobs) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } - * * // Gets next marker * let marker = response.continuationToken; * // Passing next marker as continuationToken @@ -57561,7 +61384,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * // Prints blob names * if (response.blobs) { * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); + * console.log(`Blob ${i++}: ${blob.name}`); * } * } * ``` @@ -57573,7 +61396,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Options to find blobs by tags. */ findBlobsByTags(tagFilterSqlExpression, options = {}) { - const listSegmentOptions = Object.assign({}, options); + const listSegmentOptions = { + ...options + }; const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); return { /** @@ -57592,7 +61417,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...listSegmentOptions + }); } }; } @@ -57608,45 +61436,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * items. The marker value is opaque to the client. * @param options - Options to list containers operation. */ - listSegments(marker_1) { - return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker2, options = {}) { - let listContainersSegmentResponse; - if (!!marker2 || marker2 === void 0) { - do { - listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker2, options)); - listContainersSegmentResponse.containerItems = listContainersSegmentResponse.containerItems || []; - marker2 = listContainersSegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); - } while (marker2); - } - }); + async *listSegments(marker, options = {}) { + let listContainersSegmentResponse; + if (!!marker || marker === void 0) { + do { + listContainersSegmentResponse = await this.listContainersSegment(marker, options); + listContainersSegmentResponse.containerItems = listContainersSegmentResponse.containerItems || []; + marker = listContainersSegmentResponse.continuationToken; + yield await listContainersSegmentResponse; + } while (marker); + } } /** * Returns an AsyncIterableIterator for Container Items * * @param options - Options to list containers operation. */ - listItems() { - return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { - var _a, e_2, _b, _c; - let marker2; - try { - for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker2, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { - _c = _f.value; - _d = false; - const segment = _c; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); - } - } catch (e_2_1) { - e_2 = { error: e_2_1 }; - } finally { - try { - if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); - } finally { - if (e_2) throw e_2.error; - } - } - }); + async *listItems(options = {}) { + let marker; + for await (const segment of this.listSegments(marker, options)) { + yield* segment.containerItems; + } } /** * Returns an async iterable iterator to list all the containers @@ -57654,45 +61464,41 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * .byPage() returns an async iterable iterator to list the containers in pages. * - * Example using `for await` syntax: + * ```ts snippet:BlobServiceClientListContainers + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); * - * ```js + * // Use for await to iterate the containers * let i = 1; * for await (const container of blobServiceClient.listContainers()) { * console.log(`Container ${i++}: ${container.name}`); * } - * ``` * - * Example using `iter.next()`: - * - * ```js - * let i = 1; + * // Use iter.next() to iterate the containers + * i = 1; * const iter = blobServiceClient.listContainers(); - * let containerItem = await iter.next(); - * while (!containerItem.done) { - * console.log(`Container ${i++}: ${containerItem.value.name}`); - * containerItem = await iter.next(); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Container ${i++}: ${value.name}`); + * ({ value, done } = await iter.next()); * } - * ``` - * - * Example using `byPage()`: * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { - * if (response.containerItems) { - * for (const container of response.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); - * } + * // Use byPage() to iterate the containers + * i = 1; + * for await (const page of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * for (const container of page.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); * } * } - * ``` * - * Example using paging with a marker: - * - * ```js - * let i = 1; + * // Use paging with a marker + * i = 1; * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * @@ -57714,7 +61520,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * // Prints 10 container names * if (response.containerItems) { * for (const container of response.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); + * console.log(`Container ${i++}: ${container.name}`); * } * } * ``` @@ -57726,17 +61532,20 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (options.prefix === "") { options.prefix = void 0; } - const include2 = []; + const include = []; if (options.includeDeleted) { - include2.push("deleted"); + include.push("deleted"); } if (options.includeMetadata) { - include2.push("metadata"); + include.push("metadata"); } if (options.includeSystem) { - include2.push("system"); + include.push("system"); } - const listSegmentOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); + const listSegmentOptions = { + ...options, + ...include.length > 0 ? { include } : {} + }; const iter = this.listItems(listSegmentOptions); return { /** @@ -57755,7 +61564,10 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + return this.listSegments(settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...listSegmentOptions + }); } }; } @@ -57765,16 +61577,16 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Retrieves a user delegation key for the Blob service. This is only a valid operation when using * bearer token authentication. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * @see https://learn.microsoft.com/rest/api/storageservices/get-user-delegation-key * * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time */ - async getUserDelegationKey(startsOn, expiresOn2, options = {}) { - return tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => { - const response = assertResponse(await this.serviceContext.getUserDelegationKey({ - startsOn: truncatedISO8061Date(startsOn, false), - expiresOn: truncatedISO8061Date(expiresOn2, false) + async getUserDelegationKey(startsOn, expiresOn, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => { + const response = (0, utils_common_js_2.assertResponse)(await this.serviceContext.getUserDelegationKey({ + startsOn: (0, utils_common_js_2.truncatedISO8061Date)(startsOn, false), + expiresOn: (0, utils_common_js_2.truncatedISO8061Date)(expiresOn, false) }, { abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions @@ -57788,19 +61600,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; signedVersion: response.signedVersion, value: response.value }; - const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); + const res = { + _response: response._response, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + version: response.version, + date: response.date, + errorCode: response.errorCode, + ...userDelegationKey + }; return res; }); } /** * Creates a BlobBatchClient object to conduct batch operations. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch + * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch * * @returns A new BlobBatchClient object for this service. */ getBlobBatchClient() { - return new BlobBatchClient(this.url, this.pipeline); + return new BlobBatchClient_js_1.BlobBatchClient(this.url, this.pipeline); } /** * Only available for BlobServiceClient constructed with a shared key credential. @@ -57808,7 +61628,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties * and parameters passed in. The SAS is signed by the shared key credential of the client. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * @see https://learn.microsoft.com/rest/api/storageservices/create-account-sas * * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. * @param permissions - Specifies the list of permissions to be associated with the SAS. @@ -57816,21 +61636,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Optional parameters. * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - generateAccountSasUrl(expiresOn2, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { - if (!(this.credential instanceof StorageSharedKeyCredential)) { + generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions_js_1.AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); } - if (expiresOn2 === void 0) { + if (expiresOn === void 0) { const now = /* @__PURE__ */ new Date(); - expiresOn2 = new Date(now.getTime() + 3600 * 1e3); + expiresOn = new Date(now.getTime() + 3600 * 1e3); } - const sas = generateAccountSASQueryParameters(Object.assign({ + const sas = (0, AccountSASSignatureValues_js_1.generateAccountSASQueryParameters)({ permissions, - expiresOn: expiresOn2, + expiresOn, resourceTypes, - services: AccountSASServices.parse("b").toString() - }, options), this.credential).toString(); - return appendToURLQuery(this.url, sas); + services: AccountSASServices_js_1.AccountSASServices.parse("b").toString(), + ...options + }, this.credential).toString(); + return (0, utils_common_js_1.appendToURLQuery)(this.url, sas); } /** * Only available for BlobServiceClient constructed with a shared key credential. @@ -57838,7 +61659,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Generates string to sign for a Blob account Shared Access Signature (SAS) URI based on * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. * - * @see https://learn.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * @see https://learn.microsoft.com/rest/api/storageservices/create-account-sas * * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. * @param permissions - Specifies the list of permissions to be associated with the SAS. @@ -57846,66 +61667,124 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param options - Optional parameters. * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - generateSasStringToSign(expiresOn2, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { - if (!(this.credential instanceof StorageSharedKeyCredential)) { + generateSasStringToSign(expiresOn, permissions = AccountSASPermissions_js_1.AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); } - if (expiresOn2 === void 0) { + if (expiresOn === void 0) { const now = /* @__PURE__ */ new Date(); - expiresOn2 = new Date(now.getTime() + 3600 * 1e3); + expiresOn = new Date(now.getTime() + 3600 * 1e3); } - return generateAccountSASQueryParametersInternal(Object.assign({ + return (0, AccountSASSignatureValues_js_1.generateAccountSASQueryParametersInternal)({ permissions, - expiresOn: expiresOn2, + expiresOn, resourceTypes, - services: AccountSASServices.parse("b").toString() - }, options), this.credential).stringToSign; + services: AccountSASServices_js_1.AccountSASServices.parse("b").toString(), + ...options + }, this.credential).stringToSign; } }; - exports2.KnownEncryptionAlgorithmType = void 0; - (function(KnownEncryptionAlgorithmType) { - KnownEncryptionAlgorithmType["AES256"] = "AES256"; - })(exports2.KnownEncryptionAlgorithmType || (exports2.KnownEncryptionAlgorithmType = {})); - Object.defineProperty(exports2, "RestError", { - enumerable: true, - get: function() { - return coreRestPipeline.RestError; - } - }); - exports2.AccountSASPermissions = AccountSASPermissions; - exports2.AccountSASResourceTypes = AccountSASResourceTypes; - exports2.AccountSASServices = AccountSASServices; - exports2.AnonymousCredential = AnonymousCredential; - exports2.AnonymousCredentialPolicy = AnonymousCredentialPolicy; - exports2.AppendBlobClient = AppendBlobClient; - exports2.BaseRequestPolicy = BaseRequestPolicy; - exports2.BlobBatch = BlobBatch; - exports2.BlobBatchClient = BlobBatchClient; - exports2.BlobClient = BlobClient; - exports2.BlobLeaseClient = BlobLeaseClient; - exports2.BlobSASPermissions = BlobSASPermissions; exports2.BlobServiceClient = BlobServiceClient; - exports2.BlockBlobClient = BlockBlobClient; - exports2.ContainerClient = ContainerClient; - exports2.ContainerSASPermissions = ContainerSASPermissions; - exports2.Credential = Credential; - exports2.CredentialPolicy = CredentialPolicy; - exports2.PageBlobClient = PageBlobClient; - exports2.Pipeline = Pipeline; - exports2.SASQueryParameters = SASQueryParameters; - exports2.StorageBrowserPolicy = StorageBrowserPolicy; - exports2.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; - exports2.StorageOAuthScopes = StorageOAuthScopes; - exports2.StorageRetryPolicy = StorageRetryPolicy; - exports2.StorageRetryPolicyFactory = StorageRetryPolicyFactory; - exports2.StorageSharedKeyCredential = StorageSharedKeyCredential; - exports2.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; - exports2.generateAccountSASQueryParameters = generateAccountSASQueryParameters; - exports2.generateBlobSASQueryParameters = generateBlobSASQueryParameters; - exports2.getBlobServiceAccountAudience = getBlobServiceAccountAudience; - exports2.isPipelineLike = isPipelineLike; - exports2.logger = logger; - exports2.newPipeline = newPipeline; + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/BatchResponse.js +var require_BatchResponse = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/BatchResponse.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/generatedModels.js +var require_generatedModels = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/generatedModels.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.KnownEncryptionAlgorithmType = void 0; + var KnownEncryptionAlgorithmType; + (function(KnownEncryptionAlgorithmType2) { + KnownEncryptionAlgorithmType2["AES256"] = "AES256"; + })(KnownEncryptionAlgorithmType || (exports2.KnownEncryptionAlgorithmType = KnownEncryptionAlgorithmType = {})); + } +}); + +// ../../node_modules/@azure/storage-blob/dist/commonjs/index.js +var require_commonjs13 = __commonJS({ + "../../node_modules/@azure/storage-blob/dist/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.logger = exports2.RestError = exports2.BaseRequestPolicy = exports2.StorageOAuthScopes = exports2.newPipeline = exports2.isPipelineLike = exports2.Pipeline = exports2.getBlobServiceAccountAudience = exports2.StorageBlobAudience = exports2.PremiumPageBlobTier = exports2.BlockBlobTier = exports2.generateBlobSASQueryParameters = exports2.generateAccountSASQueryParameters = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var core_rest_pipeline_1 = require_commonjs6(); + Object.defineProperty(exports2, "RestError", { enumerable: true, get: function() { + return core_rest_pipeline_1.RestError; + } }); + tslib_1.__exportStar(require_BlobServiceClient(), exports2); + tslib_1.__exportStar(require_Clients(), exports2); + tslib_1.__exportStar(require_ContainerClient(), exports2); + tslib_1.__exportStar(require_BlobLeaseClient(), exports2); + tslib_1.__exportStar(require_AccountSASPermissions(), exports2); + tslib_1.__exportStar(require_AccountSASResourceTypes(), exports2); + tslib_1.__exportStar(require_AccountSASServices(), exports2); + var AccountSASSignatureValues_js_1 = require_AccountSASSignatureValues(); + Object.defineProperty(exports2, "generateAccountSASQueryParameters", { enumerable: true, get: function() { + return AccountSASSignatureValues_js_1.generateAccountSASQueryParameters; + } }); + tslib_1.__exportStar(require_BlobBatch(), exports2); + tslib_1.__exportStar(require_BlobBatchClient(), exports2); + tslib_1.__exportStar(require_BatchResponse(), exports2); + tslib_1.__exportStar(require_BlobSASPermissions(), exports2); + var BlobSASSignatureValues_js_1 = require_BlobSASSignatureValues(); + Object.defineProperty(exports2, "generateBlobSASQueryParameters", { enumerable: true, get: function() { + return BlobSASSignatureValues_js_1.generateBlobSASQueryParameters; + } }); + tslib_1.__exportStar(require_StorageBrowserPolicyFactory2(), exports2); + tslib_1.__exportStar(require_ContainerSASPermissions(), exports2); + tslib_1.__exportStar(require_AnonymousCredential(), exports2); + tslib_1.__exportStar(require_Credential(), exports2); + tslib_1.__exportStar(require_StorageSharedKeyCredential(), exports2); + var models_js_1 = require_models2(); + Object.defineProperty(exports2, "BlockBlobTier", { enumerable: true, get: function() { + return models_js_1.BlockBlobTier; + } }); + Object.defineProperty(exports2, "PremiumPageBlobTier", { enumerable: true, get: function() { + return models_js_1.PremiumPageBlobTier; + } }); + Object.defineProperty(exports2, "StorageBlobAudience", { enumerable: true, get: function() { + return models_js_1.StorageBlobAudience; + } }); + Object.defineProperty(exports2, "getBlobServiceAccountAudience", { enumerable: true, get: function() { + return models_js_1.getBlobServiceAccountAudience; + } }); + var Pipeline_js_1 = require_Pipeline(); + Object.defineProperty(exports2, "Pipeline", { enumerable: true, get: function() { + return Pipeline_js_1.Pipeline; + } }); + Object.defineProperty(exports2, "isPipelineLike", { enumerable: true, get: function() { + return Pipeline_js_1.isPipelineLike; + } }); + Object.defineProperty(exports2, "newPipeline", { enumerable: true, get: function() { + return Pipeline_js_1.newPipeline; + } }); + Object.defineProperty(exports2, "StorageOAuthScopes", { enumerable: true, get: function() { + return Pipeline_js_1.StorageOAuthScopes; + } }); + tslib_1.__exportStar(require_StorageRetryPolicyFactory(), exports2); + var RequestPolicy_js_1 = require_RequestPolicy(); + Object.defineProperty(exports2, "BaseRequestPolicy", { enumerable: true, get: function() { + return RequestPolicy_js_1.BaseRequestPolicy; + } }); + tslib_1.__exportStar(require_AnonymousCredentialPolicy(), exports2); + tslib_1.__exportStar(require_CredentialPolicy(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicyFactory(), exports2); + tslib_1.__exportStar(require_StorageSharedKeyCredentialPolicy(), exports2); + tslib_1.__exportStar(require_SASQueryParameters(), exports2); + tslib_1.__exportStar(require_generatedModels(), exports2); + var log_js_1 = require_log5(); + Object.defineProperty(exports2, "logger", { enumerable: true, get: function() { + return log_js_1.logger; + } }); } }); @@ -58047,7 +61926,7 @@ var require_uploadUtils = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; var core5 = __importStar2(require_core()); - var storage_blob_1 = require_dist4(); + var storage_blob_1 = require_commonjs13(); var errors_1 = require_errors2(); var UploadProgress = class { constructor(contentLength) { @@ -58330,7 +62209,7 @@ var require_requestUtils = __commonJS({ }); // ../../node_modules/@actions/cache/node_modules/@azure/abort-controller/dist/index.js -var require_dist5 = __commonJS({ +var require_dist4 = __commonJS({ "../../node_modules/@actions/cache/node_modules/@azure/abort-controller/dist/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); @@ -58535,7 +62414,7 @@ var require_downloadUtils = __commonJS({ exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; var core5 = __importStar2(require_core()); var http_client_1 = require_lib(); - var storage_blob_1 = require_dist4(); + var storage_blob_1 = require_commonjs13(); var buffer = __importStar2(require("buffer")); var fs = __importStar2(require("fs")); var stream = __importStar2(require("stream")); @@ -58543,7 +62422,7 @@ var require_downloadUtils = __commonJS({ var utils = __importStar2(require_cacheUtils()); var constants_1 = require_constants6(); var requestUtils_1 = require_requestUtils(); - var abort_controller_1 = require_dist5(); + var abort_controller_1 = require_dist4(); function pipeResponseToStream(response, output) { return __awaiter2(this, void 0, void 0, function* () { const pipeline = util.promisify(stream.pipeline); @@ -62201,7 +66080,7 @@ var require_enum_object = __commonJS({ }); // ../../node_modules/@protobuf-ts/runtime/build/commonjs/index.js -var require_commonjs12 = __commonJS({ +var require_commonjs14 = __commonJS({ "../../node_modules/@protobuf-ts/runtime/build/commonjs/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); @@ -62392,7 +66271,7 @@ var require_reflection_info2 = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.readServiceOption = exports2.readMethodOption = exports2.readMethodOptions = exports2.normalizeMethodInfo = void 0; - var runtime_1 = require_commonjs12(); + var runtime_1 = require_commonjs14(); function normalizeMethodInfo(method, service) { var _a, _b, _c; let m = method; @@ -62501,7 +66380,7 @@ var require_rpc_options = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.mergeRpcOptions = void 0; - var runtime_1 = require_commonjs12(); + var runtime_1 = require_commonjs14(); function mergeRpcOptions(defaults, options) { if (!options) return defaults; @@ -62638,7 +66517,7 @@ var require_rpc_output_stream = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RpcOutputStreamController = void 0; var deferred_1 = require_deferred(); - var runtime_1 = require_commonjs12(); + var runtime_1 = require_commonjs14(); var RpcOutputStreamController = class { constructor() { this._lis = { @@ -63093,7 +66972,7 @@ var require_test_transport = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.TestTransport = void 0; var rpc_error_1 = require_rpc_error(); - var runtime_1 = require_commonjs12(); + var runtime_1 = require_commonjs14(); var rpc_output_stream_1 = require_rpc_output_stream(); var rpc_options_1 = require_rpc_options(); var unary_call_1 = require_unary_call(); @@ -63337,7 +67216,7 @@ var require_rpc_interceptor = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.stackDuplexStreamingInterceptors = exports2.stackClientStreamingInterceptors = exports2.stackServerStreamingInterceptors = exports2.stackUnaryInterceptors = exports2.stackIntercept = void 0; - var runtime_1 = require_commonjs12(); + var runtime_1 = require_commonjs14(); function stackIntercept(kind, transport, method, options, input) { var _a, _b, _c, _d; if (kind == "unary") { @@ -63461,7 +67340,7 @@ var require_server_call_context = __commonJS({ }); // ../../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js -var require_commonjs13 = __commonJS({ +var require_commonjs15 = __commonJS({ "../../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); @@ -63547,11 +67426,11 @@ var require_cachescope = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.CacheScope = void 0; - var runtime_1 = require_commonjs12(); - var runtime_2 = require_commonjs12(); - var runtime_3 = require_commonjs12(); - var runtime_4 = require_commonjs12(); - var runtime_5 = require_commonjs12(); + var runtime_1 = require_commonjs14(); + var runtime_2 = require_commonjs14(); + var runtime_3 = require_commonjs14(); + var runtime_4 = require_commonjs14(); + var runtime_5 = require_commonjs14(); var CacheScope$Type = class extends runtime_5.MessageType { constructor() { super("github.actions.results.entities.v1.CacheScope", [ @@ -63623,11 +67502,11 @@ var require_cachemetadata = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.CacheMetadata = void 0; - var runtime_1 = require_commonjs12(); - var runtime_2 = require_commonjs12(); - var runtime_3 = require_commonjs12(); - var runtime_4 = require_commonjs12(); - var runtime_5 = require_commonjs12(); + var runtime_1 = require_commonjs14(); + var runtime_2 = require_commonjs14(); + var runtime_3 = require_commonjs14(); + var runtime_4 = require_commonjs14(); + var runtime_5 = require_commonjs14(); var cachescope_1 = require_cachescope(); var CacheMetadata$Type = class extends runtime_5.MessageType { constructor() { @@ -63689,17 +67568,17 @@ var require_cachemetadata = __commonJS({ }); // ../../node_modules/@actions/cache/lib/generated/results/api/v1/cache.js -var require_cache2 = __commonJS({ +var require_cache3 = __commonJS({ "../../node_modules/@actions/cache/lib/generated/results/api/v1/cache.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.CacheService = exports2.GetCacheEntryDownloadURLResponse = exports2.GetCacheEntryDownloadURLRequest = exports2.FinalizeCacheEntryUploadResponse = exports2.FinalizeCacheEntryUploadRequest = exports2.CreateCacheEntryResponse = exports2.CreateCacheEntryRequest = void 0; - var runtime_rpc_1 = require_commonjs13(); - var runtime_1 = require_commonjs12(); - var runtime_2 = require_commonjs12(); - var runtime_3 = require_commonjs12(); - var runtime_4 = require_commonjs12(); - var runtime_5 = require_commonjs12(); + var runtime_rpc_1 = require_commonjs15(); + var runtime_1 = require_commonjs14(); + var runtime_2 = require_commonjs14(); + var runtime_3 = require_commonjs14(); + var runtime_4 = require_commonjs14(); + var runtime_5 = require_commonjs14(); var cachemetadata_1 = require_cachemetadata(); var CreateCacheEntryRequest$Type = class extends runtime_5.MessageType { constructor() { @@ -64148,7 +68027,7 @@ var require_cache_twirp_client = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.CacheServiceClientProtobuf = exports2.CacheServiceClientJSON = void 0; - var cache_1 = require_cache2(); + var cache_1 = require_cache3(); var CacheServiceClientJSON = class { constructor(rpc) { this.rpc = rpc; @@ -64674,7 +68553,7 @@ var require_tar = __commonJS({ }); // ../../node_modules/@actions/cache/lib/cache.js -var require_cache3 = __commonJS({ +var require_cache4 = __commonJS({ "../../node_modules/@actions/cache/lib/cache.js"(exports2) { "use strict"; var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { @@ -65120,7 +68999,7 @@ var require_context = __commonJS({ }); // ../../node_modules/@actions/github/lib/internal/utils.js -var require_utils4 = __commonJS({ +var require_utils5 = __commonJS({ "../../node_modules/@actions/github/lib/internal/utils.js"(exports2) { "use strict"; var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { @@ -69026,7 +72905,7 @@ var require_dist_node10 = __commonJS({ }); // ../../node_modules/@actions/github/lib/utils.js -var require_utils5 = __commonJS({ +var require_utils6 = __commonJS({ "../../node_modules/@actions/github/lib/utils.js"(exports2) { "use strict"; var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { @@ -69059,7 +72938,7 @@ var require_utils5 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOctokitOptions = exports2.GitHub = exports2.defaults = exports2.context = void 0; var Context = __importStar2(require_context()); - var Utils = __importStar2(require_utils4()); + var Utils = __importStar2(require_utils5()); var core_1 = require_dist_node8(); var plugin_rest_endpoint_methods_1 = require_dist_node9(); var plugin_paginate_rest_1 = require_dist_node10(); @@ -69119,7 +72998,7 @@ var require_github = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOctokit = exports2.context = void 0; var Context = __importStar2(require_context()); - var utils_1 = require_utils5(); + var utils_1 = require_utils6(); exports2.context = new Context.Context(); function getOctokit3(token, options, ...additionalPlugins) { const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); @@ -88997,7 +92876,7 @@ ${ctx.indent}`; }); // ../../node_modules/yaml/dist/log.js -var require_log5 = __commonJS({ +var require_log7 = __commonJS({ "../../node_modules/yaml/dist/log.js"(exports2) { "use strict"; var node_process = require("process"); @@ -89079,7 +92958,7 @@ var require_merge = __commonJS({ var require_addPairToJSMap = __commonJS({ "../../node_modules/yaml/dist/nodes/addPairToJSMap.js"(exports2) { "use strict"; - var log = require_log5(); + var log = require_log7(); var merge = require_merge(); var stringify = require_stringify(); var identity = require_identity(); @@ -94512,7 +98391,7 @@ var require_public_api = __commonJS({ var composer = require_composer(); var Document = require_Document(); var errors = require_errors3(); - var log = require_log5(); + var log = require_log7(); var identity = require_identity(); var lineCounter = require_line_counter(); var parser = require_parser(); @@ -94603,7 +98482,7 @@ var require_public_api = __commonJS({ }); // ../../node_modules/yaml/dist/index.js -var require_dist6 = __commonJS({ +var require_dist5 = __commonJS({ "../../node_modules/yaml/dist/index.js"(exports2) { "use strict"; var composer = require_composer(); @@ -95974,7 +99853,7 @@ var require_light = __commonJS({ }); // ../../node_modules/semver/internal/constants.js -var require_constants10 = __commonJS({ +var require_constants12 = __commonJS({ "../../node_modules/semver/internal/constants.js"(exports2, module2) { "use strict"; var SEMVER_SPEC_VERSION = "2.0.0"; @@ -96023,7 +99902,7 @@ var require_re = __commonJS({ MAX_SAFE_COMPONENT_LENGTH, MAX_SAFE_BUILD_LENGTH, MAX_LENGTH - } = require_constants10(); + } = require_constants12(); var debug2 = require_debug2(); exports2 = module2.exports = {}; var re = exports2.re = []; @@ -96149,7 +100028,7 @@ var require_semver2 = __commonJS({ "../../node_modules/semver/classes/semver.js"(exports2, module2) { "use strict"; var debug2 = require_debug2(); - var { MAX_LENGTH, MAX_SAFE_INTEGER } = require_constants10(); + var { MAX_LENGTH, MAX_SAFE_INTEGER } = require_constants12(); var { safeRe: re, t } = require_re(); var parseOptions = require_parse_options(); var { compareIdentifiers } = require_identifiers(); @@ -96979,7 +100858,7 @@ var require_range = __commonJS({ tildeTrimReplace, caretTrimReplace } = require_re(); - var { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = require_constants10(); + var { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = require_constants12(); var isNullSet = (c) => c.value === "<0.0.0-0"; var isAny = (c) => c.value === ""; var isSatisfiable = (comparators, options) => { @@ -97791,7 +101670,7 @@ var require_semver3 = __commonJS({ "../../node_modules/semver/index.js"(exports2, module2) { "use strict"; var internalRe = require_re(); - var constants = require_constants10(); + var constants = require_constants12(); var SemVer = require_semver2(); var identifiers = require_identifiers(); var parse3 = require_parse2(); @@ -97887,7 +101766,7 @@ var core4 = __toESM(require_core(), 1); // src/cache.ts var crypto = __toESM(require("node:crypto"), 1); -var cache = __toESM(require_cache3(), 1); +var cache = __toESM(require_cache4(), 1); var core2 = __toESM(require_core(), 1); var import_exec = __toESM(require_exec(), 1); var github2 = __toESM(require_github(), 1); @@ -97899,7 +101778,7 @@ var os = __toESM(require("node:os"), 1); var path = __toESM(require("node:path"), 1); var process2 = __toESM(require("node:process"), 1); var core = __toESM(require_core(), 1); -var yaml = __toESM(require_dist6(), 1); +var yaml = __toESM(require_dist5(), 1); var ARCHITECTURE = (() => { switch (process2.arch) { case "arm": { @@ -97949,7 +101828,7 @@ var PLATFORM = (() => { } })(); var GITHUB_WORKSPACE = process2.env.GITHUB_WORKSPACE ?? process2.cwd(); -var CYGWIN_ROOT = path.join("D:", "cygwin"); +var CYGWIN_ROOT = path.join("C:", "cygwin"); var CYGWIN_ROOT_BIN = path.join(CYGWIN_ROOT, "bin"); var CYGWIN_LOCAL_PACKAGE_DIR = path.join(CYGWIN_ROOT, "packages"); var CYGWIN_BASH_ENV = path.join(CYGWIN_ROOT, "bash_env"); @@ -97959,13 +101838,13 @@ var DUNE_CACHE_ROOT = (() => { return path.join(xdgCacheHome, "dune"); } if (PLATFORM === "windows") { - return path.join("D:", "dune"); + return path.join("C:", "dune"); } return path.join(os.homedir(), ".cache", "dune"); })(); var OPAM_ROOT = (() => { if (PLATFORM === "windows") { - return path.join("D:", ".opam"); + return path.join("C:", ".opam"); } return path.join(os.homedir(), ".opam"); })(); diff --git a/package.json b/package.json index 423fd1bd..a1c14479 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "setup-ocaml", - "version": "3.4.0", + "version": "3.4.1", "private": true, "type": "module", "workspaces": [ diff --git a/packages/analysis/package.json b/packages/analysis/package.json index 0e20db1d..54346c99 100644 --- a/packages/analysis/package.json +++ b/packages/analysis/package.json @@ -19,7 +19,7 @@ "devDependencies": { "@tsconfig/node24": "24.0.1", "@tsconfig/strictest": "2.0.5", - "@types/node": "24.0.15", + "@types/node": "24.1.0", "esbuild": "0.25.8", "typescript": "5.8.3" } diff --git a/packages/lint-doc/package.json b/packages/lint-doc/package.json index 6d64519e..4df933ec 100644 --- a/packages/lint-doc/package.json +++ b/packages/lint-doc/package.json @@ -14,7 +14,7 @@ "devDependencies": { "@tsconfig/node24": "24.0.1", "@tsconfig/strictest": "2.0.5", - "@types/node": "24.0.15", + "@types/node": "24.1.0", "esbuild": "0.25.8", "typescript": "5.8.3" } diff --git a/packages/lint-fmt/package.json b/packages/lint-fmt/package.json index b11cb522..64001f0b 100644 --- a/packages/lint-fmt/package.json +++ b/packages/lint-fmt/package.json @@ -14,7 +14,7 @@ "devDependencies": { "@tsconfig/node24": "24.0.1", "@tsconfig/strictest": "2.0.5", - "@types/node": "24.0.15", + "@types/node": "24.1.0", "esbuild": "0.25.8", "typescript": "5.8.3" } diff --git a/packages/lint-opam/package.json b/packages/lint-opam/package.json index 6497ff4f..2ea80fff 100644 --- a/packages/lint-opam/package.json +++ b/packages/lint-opam/package.json @@ -14,7 +14,7 @@ "devDependencies": { "@tsconfig/node24": "24.0.1", "@tsconfig/strictest": "2.0.5", - "@types/node": "24.0.15", + "@types/node": "24.1.0", "esbuild": "0.25.8", "typescript": "5.8.3" } diff --git a/packages/setup-ocaml/package.json b/packages/setup-ocaml/package.json index fa4979d4..9a1bb43e 100644 --- a/packages/setup-ocaml/package.json +++ b/packages/setup-ocaml/package.json @@ -18,7 +18,7 @@ "@actions/io": "1.1.3", "@actions/tool-cache": "2.0.2", "@octokit/plugin-retry": "8.0.1", - "cheerio": "1.1.0", + "cheerio": "1.1.2", "exponential-backoff": "3.1.2", "semver": "7.7.2", "systeminformation": "5.27.7", @@ -27,7 +27,7 @@ "devDependencies": { "@tsconfig/node24": "24.0.1", "@tsconfig/strictest": "2.0.5", - "@types/node": "24.0.15", + "@types/node": "24.1.0", "@types/semver": "7.7.0", "esbuild": "0.25.8", "typescript": "5.8.3" diff --git a/packages/setup-ocaml/src/constants.ts b/packages/setup-ocaml/src/constants.ts index 26296f2e..7c89683f 100644 --- a/packages/setup-ocaml/src/constants.ts +++ b/packages/setup-ocaml/src/constants.ts @@ -58,8 +58,7 @@ export const CYGWIN_MIRROR = "https://mirrors.kernel.org/sourceware/cygwin/"; export const GITHUB_WORKSPACE = process.env.GITHUB_WORKSPACE ?? process.cwd(); -// [HACK] https://github.com/ocaml/setup-ocaml/pull/55 -export const CYGWIN_ROOT = path.join("D:", "cygwin"); +export const CYGWIN_ROOT = path.join("C:", "cygwin"); export const CYGWIN_ROOT_BIN = path.join(CYGWIN_ROOT, "bin"); @@ -73,16 +72,14 @@ export const DUNE_CACHE_ROOT = (() => { return path.join(xdgCacheHome, "dune"); } if (PLATFORM === "windows") { - // [HACK] https://github.com/ocaml/setup-ocaml/pull/55 - return path.join("D:", "dune"); + return path.join("C:", "dune"); } return path.join(os.homedir(), ".cache", "dune"); })(); export const OPAM_ROOT = (() => { if (PLATFORM === "windows") { - // [HACK] https://github.com/ocaml/setup-ocaml/pull/55 - return path.join("D:", ".opam"); + return path.join("C:", ".opam"); } return path.join(os.homedir(), ".opam"); })(); diff --git a/yarn.lock b/yarn.lock index 3a4a9b27..f80f2085 100644 --- a/yarn.lock +++ b/yarn.lock @@ -125,7 +125,7 @@ __metadata: languageName: node linkType: hard -"@azure/core-auth@npm:^1.1.4, @azure/core-auth@npm:^1.4.0, @azure/core-auth@npm:^1.8.0": +"@azure/core-auth@npm:^1.1.4, @azure/core-auth@npm:^1.4.0, @azure/core-auth@npm:^1.8.0, @azure/core-auth@npm:^1.9.0": version: 1.10.0 resolution: "@azure/core-auth@npm:1.10.0" dependencies: @@ -136,7 +136,7 @@ __metadata: languageName: node linkType: hard -"@azure/core-client@npm:^1.3.0, @azure/core-client@npm:^1.6.2": +"@azure/core-client@npm:^1.3.0, @azure/core-client@npm:^1.9.3": version: 1.10.0 resolution: "@azure/core-client@npm:1.10.0" dependencies: @@ -151,7 +151,7 @@ __metadata: languageName: node linkType: hard -"@azure/core-http-compat@npm:^2.0.0": +"@azure/core-http-compat@npm:^2.2.0": version: 2.3.0 resolution: "@azure/core-http-compat@npm:2.3.0" dependencies: @@ -174,7 +174,7 @@ __metadata: languageName: node linkType: hard -"@azure/core-paging@npm:^1.1.1": +"@azure/core-paging@npm:^1.6.2": version: 1.6.2 resolution: "@azure/core-paging@npm:1.6.2" dependencies: @@ -183,7 +183,7 @@ __metadata: languageName: node linkType: hard -"@azure/core-rest-pipeline@npm:^1.10.1, @azure/core-rest-pipeline@npm:^1.20.0": +"@azure/core-rest-pipeline@npm:^1.19.1, @azure/core-rest-pipeline@npm:^1.20.0": version: 1.22.0 resolution: "@azure/core-rest-pipeline@npm:1.22.0" dependencies: @@ -198,7 +198,7 @@ __metadata: languageName: node linkType: hard -"@azure/core-tracing@npm:^1.0.0, @azure/core-tracing@npm:^1.0.1, @azure/core-tracing@npm:^1.1.2": +"@azure/core-tracing@npm:^1.0.0, @azure/core-tracing@npm:^1.0.1, @azure/core-tracing@npm:^1.2.0": version: 1.3.0 resolution: "@azure/core-tracing@npm:1.3.0" dependencies: @@ -218,7 +218,7 @@ __metadata: languageName: node linkType: hard -"@azure/core-xml@npm:^1.4.3": +"@azure/core-xml@npm:^1.4.5": version: 1.5.0 resolution: "@azure/core-xml@npm:1.5.0" dependencies: @@ -228,7 +228,7 @@ __metadata: languageName: node linkType: hard -"@azure/logger@npm:^1.0.0": +"@azure/logger@npm:^1.0.0, @azure/logger@npm:^1.1.4": version: 1.3.0 resolution: "@azure/logger@npm:1.3.0" dependencies: @@ -255,23 +255,41 @@ __metadata: linkType: hard "@azure/storage-blob@npm:^12.13.0": - version: 12.27.0 - resolution: "@azure/storage-blob@npm:12.27.0" + version: 12.28.0 + resolution: "@azure/storage-blob@npm:12.28.0" dependencies: "@azure/abort-controller": "npm:^2.1.2" - "@azure/core-auth": "npm:^1.4.0" - "@azure/core-client": "npm:^1.6.2" - "@azure/core-http-compat": "npm:^2.0.0" + "@azure/core-auth": "npm:^1.9.0" + "@azure/core-client": "npm:^1.9.3" + "@azure/core-http-compat": "npm:^2.2.0" "@azure/core-lro": "npm:^2.2.0" - "@azure/core-paging": "npm:^1.1.1" - "@azure/core-rest-pipeline": "npm:^1.10.1" - "@azure/core-tracing": "npm:^1.1.2" - "@azure/core-util": "npm:^1.6.1" - "@azure/core-xml": "npm:^1.4.3" - "@azure/logger": "npm:^1.0.0" + "@azure/core-paging": "npm:^1.6.2" + "@azure/core-rest-pipeline": "npm:^1.19.1" + "@azure/core-tracing": "npm:^1.2.0" + "@azure/core-util": "npm:^1.11.0" + "@azure/core-xml": "npm:^1.4.5" + "@azure/logger": "npm:^1.1.4" + "@azure/storage-common": "npm:^12.0.0-beta.2" events: "npm:^3.0.0" - tslib: "npm:^2.2.0" - checksum: 10c0/287447d0de274183a88e200ad63347a8cfc606cad7f023e073d3c0cf00de70a40097404b180e7f589f5010ed035f2c8f7a6a770280fc8192448e614ca7311f7e + tslib: "npm:^2.8.1" + checksum: 10c0/a97f43193da9c5fbb58492156ad6dd3f0c9f1bf2fdf72cbc5a1fdfe16a586b0a4bb7bfaa7e24de63d689927c2a606d4ca65332594e0f2278c55d48f7c90d1a54 + languageName: node + linkType: hard + +"@azure/storage-common@npm:^12.0.0-beta.2": + version: 12.0.0 + resolution: "@azure/storage-common@npm:12.0.0" + dependencies: + "@azure/abort-controller": "npm:^2.1.2" + "@azure/core-auth": "npm:^1.9.0" + "@azure/core-http-compat": "npm:^2.2.0" + "@azure/core-rest-pipeline": "npm:^1.19.1" + "@azure/core-tracing": "npm:^1.2.0" + "@azure/core-util": "npm:^1.11.0" + "@azure/logger": "npm:^1.1.4" + events: "npm:^3.3.0" + tslib: "npm:^2.8.1" + checksum: 10c0/79cd046142b71ceb85e2961855a8229c5476ac05136b65a5c79717384b3c53253b0ced5a71bc7eb87a7320a8cffb3b2b440105dc56baf1fe4fceecd174f7f86e languageName: node linkType: hard @@ -366,21 +384,21 @@ __metadata: languageName: node linkType: hard -"@bufbuild/protobuf@npm:2.6.1, @bufbuild/protobuf@npm:^2.4.0": - version: 2.6.1 - resolution: "@bufbuild/protobuf@npm:2.6.1" - checksum: 10c0/7de38230244814bb48915f89280c3beb366abc7b8cdfefeec6f7ef9f9fbe452695845d1baad8c3fd5ba3b0391b45367bdf9c648e32c08a09ecdb0a79afeb0033 +"@bufbuild/protobuf@npm:2.6.2, @bufbuild/protobuf@npm:^2.4.0": + version: 2.6.2 + resolution: "@bufbuild/protobuf@npm:2.6.2" + checksum: 10c0/e191fa17ae253eeb65671a2e0b8efb205772937a21906724bb4d362681a2105e81a5c1b0bf9170cbb500d1db62e8becd16bc125a0516880e3abbc16ae8336e21 languageName: node linkType: hard "@bufbuild/protoplugin@npm:^2.4.0": - version: 2.6.1 - resolution: "@bufbuild/protoplugin@npm:2.6.1" + version: 2.6.2 + resolution: "@bufbuild/protoplugin@npm:2.6.2" dependencies: - "@bufbuild/protobuf": "npm:2.6.1" + "@bufbuild/protobuf": "npm:2.6.2" "@typescript/vfs": "npm:^1.5.2" typescript: "npm:5.4.5" - checksum: 10c0/bb1b79fa5915eae8f77787aa18c905ea8ede5198237b6f7e3780924306f2d3b8c4392e351e2efddf68d3db8e1f0bbae1afabb40328f4e5b4bfe37bceb91f380a + checksum: 10c0/9e7e011d8b53cad4c4d65a99db50422a79710eb804de84fca451cf85b0d8f7b30bcff6bd04ec32422a4a787c4d5d927a08cea6f71c8ceb2649d21c43640f4695 languageName: node linkType: hard @@ -597,7 +615,7 @@ __metadata: "@github/dependency-submission-toolkit": "npm:2.0.5" "@tsconfig/node24": "npm:24.0.1" "@tsconfig/strictest": "npm:2.0.5" - "@types/node": "npm:24.0.15" + "@types/node": "npm:24.1.0" esbuild: "npm:0.25.8" packageurl-js: "npm:2.0.1" strip-ansi: "npm:7.1.0" @@ -613,7 +631,7 @@ __metadata: "@actions/exec": "npm:1.1.1" "@tsconfig/node24": "npm:24.0.1" "@tsconfig/strictest": "npm:2.0.5" - "@types/node": "npm:24.0.15" + "@types/node": "npm:24.1.0" esbuild: "npm:0.25.8" typescript: "npm:5.8.3" languageName: unknown @@ -627,7 +645,7 @@ __metadata: "@actions/exec": "npm:1.1.1" "@tsconfig/node24": "npm:24.0.1" "@tsconfig/strictest": "npm:2.0.5" - "@types/node": "npm:24.0.15" + "@types/node": "npm:24.1.0" esbuild: "npm:0.25.8" typescript: "npm:5.8.3" languageName: unknown @@ -641,7 +659,7 @@ __metadata: "@actions/exec": "npm:1.1.1" "@tsconfig/node24": "npm:24.0.1" "@tsconfig/strictest": "npm:2.0.5" - "@types/node": "npm:24.0.15" + "@types/node": "npm:24.1.0" esbuild: "npm:0.25.8" typescript: "npm:5.8.3" languageName: unknown @@ -662,9 +680,9 @@ __metadata: "@octokit/plugin-retry": "npm:8.0.1" "@tsconfig/node24": "npm:24.0.1" "@tsconfig/strictest": "npm:2.0.5" - "@types/node": "npm:24.0.15" + "@types/node": "npm:24.1.0" "@types/semver": "npm:7.7.0" - cheerio: "npm:1.1.0" + cheerio: "npm:1.1.2" esbuild: "npm:0.25.8" exponential-backoff: "npm:3.1.2" semver: "npm:7.7.2" @@ -904,12 +922,12 @@ __metadata: languageName: node linkType: hard -"@types/node@npm:24.0.15": - version: 24.0.15 - resolution: "@types/node@npm:24.0.15" +"@types/node@npm:24.1.0": + version: 24.1.0 + resolution: "@types/node@npm:24.1.0" dependencies: undici-types: "npm:~7.8.0" - checksum: 10c0/39ead0c0ff25dde29357630b5eaa7dd73cf3af796dbd0f01ed439a8af01cbddfa6b68aa9d67fb3243962836170a4463ff856c47fa822250c585987f707eb42b3 + checksum: 10c0/6c4686bc144f6ce7bffd4cadc3e1196e2217c1da4c639c637213719c8a3ee58b6c596b994befcbffeacd9d9eb0c3bff6529d2bc27da5d1cb9d58b1da0056f9f4 languageName: node linkType: hard @@ -1034,22 +1052,22 @@ __metadata: languageName: node linkType: hard -"cheerio@npm:1.1.0": - version: 1.1.0 - resolution: "cheerio@npm:1.1.0" +"cheerio@npm:1.1.2": + version: 1.1.2 + resolution: "cheerio@npm:1.1.2" dependencies: cheerio-select: "npm:^2.1.0" dom-serializer: "npm:^2.0.0" domhandler: "npm:^5.0.3" domutils: "npm:^3.2.2" - encoding-sniffer: "npm:^0.2.0" + encoding-sniffer: "npm:^0.2.1" htmlparser2: "npm:^10.0.0" parse5: "npm:^7.3.0" parse5-htmlparser2-tree-adapter: "npm:^7.1.0" parse5-parser-stream: "npm:^7.1.2" - undici: "npm:^7.10.0" + undici: "npm:^7.12.0" whatwg-mimetype: "npm:^4.0.0" - checksum: 10c0/f7b940a89e1fe77bf6b4fe3b993f17b02a358942cc0b9d3b55ea235a0bc322829dbc47151763ef9986fd237494c00380909af759e46582c72470a10643b85abd + checksum: 10c0/2c6d2274666fe122f54fdca457ee76453e1a993b19563acaa23eb565bf7776f0f01e4c3800092f00e84aa13c83a161f0cf000ac0a8332d1d7f2b2387d6ecc5fc languageName: node linkType: hard @@ -1164,7 +1182,7 @@ __metadata: languageName: node linkType: hard -"encoding-sniffer@npm:^0.2.0": +"encoding-sniffer@npm:^0.2.1": version: 0.2.1 resolution: "encoding-sniffer@npm:0.2.1" dependencies: @@ -1319,7 +1337,7 @@ __metadata: languageName: node linkType: hard -"events@npm:^3.0.0": +"events@npm:^3.0.0, events@npm:^3.3.0": version: 3.3.0 resolution: "events@npm:3.3.0" checksum: 10c0/d6b6f2adbccbcda74ddbab52ed07db727ef52e31a61ed26db9feb7dc62af7fc8e060defa65e5f8af9449b86b52cc1a1f6a79f2eafcf4e62add2b7a1fa4a432f6 @@ -1828,7 +1846,7 @@ __metadata: languageName: node linkType: hard -"undici@npm:^7.10.0": +"undici@npm:^7.12.0": version: 7.12.0 resolution: "undici@npm:7.12.0" checksum: 10c0/27aada7bd8d2588f146a2ba554f2f6b7593f9c2d14bf3797bbc7b74825d9bee38eb3ab61e2d0ee01bb827a9ac8017eb2991cb4f06d9ed9971c8a142d8306fbab