From 56337c425554a6be30cdef71bf441f15be286854 Mon Sep 17 00:00:00 2001 From: Deepak Dahiya <59823596+t-dedah@users.noreply.github.com> Date: Mon, 11 Apr 2022 19:06:18 +0530 Subject: [PATCH] Updated @actions/cache (#460) * Updated @actions/cache * Licensed cache * new build * Updated version --- .licenses/npm/@actions/cache.dep.yml | 2 +- .../core-asynciterator-polyfill.dep.yml | 40 +- .licenses/npm/@azure/core-http.dep.yml | 2 +- .licenses/npm/@azure/core-lro.dep.yml | 2 +- .licenses/npm/@azure/core-paging.dep.yml | 2 +- .licenses/npm/@azure/ms-rest-js.dep.yml | 2 +- .licenses/npm/@azure/storage-blob.dep.yml | 2 +- .licenses/npm/@opentelemetry/api.dep.yml | 6 +- .licenses/npm/@types/node-fetch.dep.yml | 2 +- dist/cache-save/index.js | 9022 ++++++++++------- dist/setup/index.js | 5204 ++++++---- package-lock.json | 125 +- package.json | 4 +- 13 files changed, 8621 insertions(+), 5794 deletions(-) diff --git a/.licenses/npm/@actions/cache.dep.yml b/.licenses/npm/@actions/cache.dep.yml index 35b0a4bf..f7aa5837 100644 --- a/.licenses/npm/@actions/cache.dep.yml +++ b/.licenses/npm/@actions/cache.dep.yml @@ -1,6 +1,6 @@ --- name: "@actions/cache" -version: 2.0.0 +version: 2.0.2 type: npm summary: Actions cache lib homepage: https://github.com/actions/toolkit/tree/main/packages/cache diff --git a/.licenses/npm/@azure/core-asynciterator-polyfill.dep.yml b/.licenses/npm/@azure/core-asynciterator-polyfill.dep.yml index 6e36cb3e..ea530528 100644 --- a/.licenses/npm/@azure/core-asynciterator-polyfill.dep.yml +++ b/.licenses/npm/@azure/core-asynciterator-polyfill.dep.yml @@ -1,32 +1,32 @@ --- name: "@azure/core-asynciterator-polyfill" -version: 1.0.0 +version: 1.0.2 type: npm summary: Polyfill for IE/Node 8 for Symbol.asyncIterator -homepage: https://github.com/Azure/azure-sdk-for-js/tree/master/sdk/core/core-asynciterator-polyfill +homepage: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/core-asynciterator-polyfill/README.md license: mit licenses: - sources: LICENSE - text: |2 - MIT License + text: | + The MIT License (MIT) - Copyright (c) Microsoft Corporation. All rights reserved. + Copyright (c) 2020 Microsoft - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. notices: [] diff --git a/.licenses/npm/@azure/core-http.dep.yml b/.licenses/npm/@azure/core-http.dep.yml index 6a2bfc5b..411b834d 100644 --- a/.licenses/npm/@azure/core-http.dep.yml +++ b/.licenses/npm/@azure/core-http.dep.yml @@ -1,6 +1,6 @@ --- name: "@azure/core-http" -version: 2.2.2 +version: 2.2.4 type: npm summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client libraries generated using AutoRest diff --git a/.licenses/npm/@azure/core-lro.dep.yml b/.licenses/npm/@azure/core-lro.dep.yml index d454075b..5f8c3da3 100644 --- a/.licenses/npm/@azure/core-lro.dep.yml +++ b/.licenses/npm/@azure/core-lro.dep.yml @@ -1,6 +1,6 @@ --- name: "@azure/core-lro" -version: 2.2.1 +version: 2.2.4 type: npm summary: Isomorphic client library for supporting long-running operations in node.js and browser. diff --git a/.licenses/npm/@azure/core-paging.dep.yml b/.licenses/npm/@azure/core-paging.dep.yml index 72d23ac0..6c805290 100644 --- a/.licenses/npm/@azure/core-paging.dep.yml +++ b/.licenses/npm/@azure/core-paging.dep.yml @@ -1,6 +1,6 @@ --- name: "@azure/core-paging" -version: 1.2.0 +version: 1.2.1 type: npm summary: Core types for paging async iterable iterators homepage: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/core-paging/README.md diff --git a/.licenses/npm/@azure/ms-rest-js.dep.yml b/.licenses/npm/@azure/ms-rest-js.dep.yml index fb2116fc..4f729c7b 100644 --- a/.licenses/npm/@azure/ms-rest-js.dep.yml +++ b/.licenses/npm/@azure/ms-rest-js.dep.yml @@ -1,6 +1,6 @@ --- name: "@azure/ms-rest-js" -version: 2.6.0 +version: 2.6.1 type: npm summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client libraries generated using AutoRest diff --git a/.licenses/npm/@azure/storage-blob.dep.yml b/.licenses/npm/@azure/storage-blob.dep.yml index 7b187d05..c364cf8e 100644 --- a/.licenses/npm/@azure/storage-blob.dep.yml +++ b/.licenses/npm/@azure/storage-blob.dep.yml @@ -1,6 +1,6 @@ --- name: "@azure/storage-blob" -version: 12.8.0 +version: 12.9.0 type: npm summary: Microsoft Azure Storage SDK for JavaScript - Blob homepage: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/ diff --git a/.licenses/npm/@opentelemetry/api.dep.yml b/.licenses/npm/@opentelemetry/api.dep.yml index 5674e022..98dcaf46 100644 --- a/.licenses/npm/@opentelemetry/api.dep.yml +++ b/.licenses/npm/@opentelemetry/api.dep.yml @@ -1,6 +1,6 @@ --- name: "@opentelemetry/api" -version: 1.0.3 +version: 1.0.4 type: npm summary: Public API for OpenTelemetry homepage: https://github.com/open-telemetry/opentelemetry-js-api#readme @@ -218,10 +218,6 @@ licenses: [discussions-url]: https://github.com/open-telemetry/opentelemetry-js/discussions [license-url]: https://github.com/open-telemetry/opentelemetry-js-api/blob/main/LICENSE [license-image]: https://img.shields.io/badge/license-Apache_2.0-green.svg?style=flat - [dependencies-image]: https://status.david-dm.org/gh/open-telemetry/opentelemetry-js-api.svg - [dependencies-url]: https://david-dm.org/open-telemetry/opentelemetry-js-api - [devDependencies-image]: https://status.david-dm.org/gh/open-telemetry/opentelemetry-js-api.svg?type=dev - [devDependencies-url]: https://david-dm.org/open-telemetry/opentelemetry-js-api?type=dev [npm-url]: https://www.npmjs.com/package/@opentelemetry/api [npm-img]: https://badge.fury.io/js/%40opentelemetry%2Fapi.svg [docs-tracing]: https://github.com/open-telemetry/opentelemetry-js-api/blob/main/docs/tracing.md diff --git a/.licenses/npm/@types/node-fetch.dep.yml b/.licenses/npm/@types/node-fetch.dep.yml index 094ab278..2580f942 100644 --- a/.licenses/npm/@types/node-fetch.dep.yml +++ b/.licenses/npm/@types/node-fetch.dep.yml @@ -1,6 +1,6 @@ --- name: "@types/node-fetch" -version: 2.5.12 +version: 2.6.1 type: npm summary: TypeScript definitions for node-fetch homepage: https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node-fetch diff --git a/dist/cache-save/index.js b/dist/cache-save/index.js index 5836c4a0..dc20be1a 100644 --- a/dist/cache-save/index.js +++ b/dist/cache-save/index.js @@ -1148,6 +1148,11 @@ function assertDefined(name, value) { return value; } exports.assertDefined = assertDefined; +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; +} +exports.isGhes = isGhes; //# sourceMappingURL=cacheUtils.js.map /***/ }), @@ -1617,7 +1622,7 @@ exports.default = _default; /* 91 */ /***/ (function(module, __unusedexports, __webpack_require__) { -var serialOrdered = __webpack_require__(892); +var serialOrdered = __webpack_require__(192); // Public API module.exports = serial; @@ -2593,7 +2598,19 @@ Object.defineProperty(exports, "__esModule", { value: true }); /***/ }), /* 96 */, -/* 97 */, +/* 97 */ +/***/ (function() { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +if (typeof Symbol === undefined || !Symbol.asyncIterator) { + Symbol.asyncIterator = Symbol.for("Symbol.asyncIterator"); +} +//# sourceMappingURL=index.js.map + +/***/ }), /* 98 */ /***/ (function(__unusedmodule, exports, __webpack_require__) { @@ -3217,18 +3234,18 @@ function downloadCache(archiveLocation, archivePath, options) { exports.downloadCache = downloadCache; // Reserve Cache function reserveCache(key, paths, options) { - var _a, _b; return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const reserveCacheRequest = { key, - version + version, + cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize }; const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); })); - return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1; + return response; }); } exports.reserveCache = reserveCache; @@ -3365,14 +3382,15 @@ var DiagAPI = /** @class */ (function () { function DiagAPI() { function _logProxy(funcName) { return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } var logger = global_utils_1.getGlobal('diag'); // shortcut if logger not set if (!logger) return; - return logger[funcName].apply(logger, - // work around Function.prototype.apply types - // eslint-disable-next-line @typescript-eslint/no-explicit-any - arguments); + return logger[funcName].apply(logger, args); }; } // Using self local variable for minification purposes as 'this' cannot be minified @@ -4491,30 +4509,7 @@ exports.NoopTracerProvider = NoopTracerProvider; /***/ }), /* 163 */, /* 164 */, -/* 165 */ -/***/ (function(__unusedmodule, exports) { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=types.js.map - -/***/ }), +/* 165 */, /* 166 */, /* 167 */, /* 168 */, @@ -4767,7 +4762,87 @@ module.exports = v1; /***/ }), /* 190 */, /* 191 */, -/* 192 */, +/* 192 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var iterate = __webpack_require__(157) + , initState = __webpack_require__(903) + , terminator = __webpack_require__(939) + ; + +// Public API +module.exports = serialOrdered; +// sorting helpers +module.exports.ascending = ascending; +module.exports.descending = descending; + +/** + * Runs iterator over provided sorted array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} sortMethod - custom sort function + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serialOrdered(list, iterator, sortMethod, callback) +{ + var state = initState(list, sortMethod); + + iterate(list, iterator, state, function iteratorHandler(error, result) + { + if (error) + { + callback(error, result); + return; + } + + state.index++; + + // are we there yet? + if (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, iteratorHandler); + return; + } + + // done here + callback(null, state.results); + }); + + return terminator.bind(state, callback); +} + +/* + * -- Sort methods + */ + +/** + * sort helper to sort array elements in ascending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function ascending(a, b) +{ + return a < b ? -1 : a > b ? 1 : 0; +} + +/** + * sort helper to sort array elements in descending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function descending(a, b) +{ + return -1 * ascending(a, b); +} + + +/***/ }), /* 193 */, /* 194 */, /* 195 */, @@ -5281,17 +5356,22 @@ var DiagConsoleLogger = /** @class */ (function () { function DiagConsoleLogger() { function _consoleFunc(funcName) { return function () { - var orgArguments = arguments; + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } if (console) { // Some environments only expose the console when the F12 developer console is open + // eslint-disable-next-line no-console var theFunc = console[funcName]; if (typeof theFunc !== 'function') { // Not all environments support all functions + // eslint-disable-next-line no-console theFunc = console.log; } // One last final check if (typeof theFunc === 'function') { - return theFunc.apply(console, orgArguments); + return theFunc.apply(console, args); } } }; @@ -6005,7 +6085,7 @@ exports.implementation = class URLImpl { this.children = []; this.baseURI = null; if (!XMLElement) { - XMLElement = __webpack_require__(845); + XMLElement = __webpack_require__(701); XMLCData = __webpack_require__(657); XMLComment = __webpack_require__(919); XMLDeclaration = __webpack_require__(738); @@ -8956,7 +9036,7 @@ function expand(str, isTop) { XMLDocumentCB = __webpack_require__(768); - XMLStringWriter = __webpack_require__(347); + XMLStringWriter = __webpack_require__(750); XMLStreamWriter = __webpack_require__(458); @@ -9040,7 +9120,7 @@ module.exports = { parallel : __webpack_require__(424), serial : __webpack_require__(91), - serialOrdered : __webpack_require__(892) + serialOrdered : __webpack_require__(192) }; @@ -9048,17 +9128,7 @@ module.exports = /* 335 */, /* 336 */, /* 337 */, -/* 338 */ -/***/ (function() { - -"use strict"; - -if (typeof Symbol === undefined || !Symbol.asyncIterator) { - Symbol.asyncIterator = Symbol.for("Symbol.asyncIterator"); -} -//# sourceMappingURL=index.js.map - -/***/ }), +/* 338 */, /* 339 */, /* 340 */ /***/ (function(__unusedmodule, exports) { @@ -9113,47 +9183,7 @@ var SamplingDecision; /* 344 */, /* 345 */, /* 346 */, -/* 347 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -// Generated by CoffeeScript 1.12.7 -(function() { - var XMLStringWriter, XMLWriterBase, - extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, - hasProp = {}.hasOwnProperty; - - XMLWriterBase = __webpack_require__(423); - - module.exports = XMLStringWriter = (function(superClass) { - extend(XMLStringWriter, superClass); - - function XMLStringWriter(options) { - XMLStringWriter.__super__.constructor.call(this, options); - } - - XMLStringWriter.prototype.document = function(doc, options) { - var child, i, len, r, ref; - options = this.filterOptions(options); - r = ''; - ref = doc.children; - for (i = 0, len = ref.length; i < len; i++) { - child = ref[i]; - r += this.writeChildNode(child, options, 0); - } - if (options.pretty && r.slice(-options.newline.length) === options.newline) { - r = r.slice(0, -options.newline.length); - } - return r; - }; - - return XMLStringWriter; - - })(XMLWriterBase); - -}).call(this); - - -/***/ }), +/* 347 */, /* 348 */, /* 349 */ /***/ (function(__unusedmodule, exports, __webpack_require__) { @@ -9526,6 +9556,29 @@ var events = __webpack_require__(614); var fs = __webpack_require__(747); var util = __webpack_require__(669); +function _interopNamespace(e) { + if (e && e.__esModule) return e; + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n["default"] = e; + return Object.freeze(n); +} + +var coreHttp__namespace = /*#__PURE__*/_interopNamespace(coreHttp); +var os__namespace = /*#__PURE__*/_interopNamespace(os); +var fs__namespace = /*#__PURE__*/_interopNamespace(fs); +var util__namespace = /*#__PURE__*/_interopNamespace(util); + /* * Copyright (c) Microsoft Corporation. * Licensed under the MIT License. @@ -10480,10 +10533,10 @@ const BlobItemInternal = { modelProperties: { name: { serializedName: "Name", - required: true, xmlName: "Name", type: { - name: "String" + name: "Composite", + className: "BlobName" } }, deleted: { @@ -10558,6 +10611,30 @@ const BlobItemInternal = { } } }; +const BlobName = { + serializedName: "BlobName", + type: { + name: "Composite", + className: "BlobName", + modelProperties: { + encoded: { + serializedName: "Encoded", + xmlName: "Encoded", + xmlIsAttribute: true, + type: { + name: "Boolean" + } + }, + content: { + serializedName: "content", + xmlName: "content", + type: { + name: "String" + } + } + } + } +}; const BlobPropertiesInternal = { serializedName: "BlobPropertiesInternal", xmlName: "Properties", @@ -11001,10 +11078,10 @@ const BlobPrefix = { modelProperties: { name: { serializedName: "Name", - required: true, xmlName: "Name", type: { - name: "String" + name: "Composite", + className: "BlobName" } } } @@ -12627,6 +12704,59 @@ const ContainerSubmitBatchExceptionHeaders = { } } }; +const ContainerFilterBlobsHeaders = { + serializedName: "Container_filterBlobsHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerFilterBlobsExceptionHeaders = { + serializedName: "Container_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; const ContainerAcquireLeaseHeaders = { serializedName: "Container_acquireLeaseHeaders", type: { @@ -15135,6 +15265,13 @@ const BlobCopyFromURLHeaders = { name: "ByteArray" } }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -17652,6 +17789,7 @@ var Mappers = /*#__PURE__*/Object.freeze({ ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse, BlobFlatListSegment: BlobFlatListSegment, BlobItemInternal: BlobItemInternal, + BlobName: BlobName, BlobPropertiesInternal: BlobPropertiesInternal, ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse, BlobHierarchyListSegment: BlobHierarchyListSegment, @@ -17703,6 +17841,8 @@ var Mappers = /*#__PURE__*/Object.freeze({ ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders, ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders, ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders, + ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders, + ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders, ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders, ContainerAcquireLeaseExceptionHeaders: ContainerAcquireLeaseExceptionHeaders, ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders, @@ -17890,7 +18030,7 @@ const timeoutInSeconds = { const version = { parameterPath: "version", mapper: { - defaultValue: "2020-10-02", + defaultValue: "2021-04-10", isConstant: true, serializedName: "x-ms-version", type: { @@ -17985,7 +18125,7 @@ const include = { element: { type: { name: "Enum", - allowedValues: ["metadata", "deleted"] + allowedValues: ["metadata", "deleted", "system"] } } } @@ -18507,11 +18647,10 @@ const encryptionKeySha256 = { } }; const encryptionAlgorithm = { - parameterPath: ["options", "encryptionAlgorithm"], + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], mapper: { - defaultValue: "AES256", - isConstant: true, serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", type: { name: "String" } @@ -19428,7 +19567,7 @@ class Service { setProperties(blobServiceProperties, options) { const operationArguments = { blobServiceProperties, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); } @@ -19439,9 +19578,9 @@ class Service { */ getProperties(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); } /** * Retrieves statistics related to replication for the Blob service. It is only available on the @@ -19451,7 +19590,7 @@ class Service { */ getStatistics(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); } @@ -19461,7 +19600,7 @@ class Service { */ listContainersSegment(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); } @@ -19474,7 +19613,7 @@ class Service { getUserDelegationKey(keyInfo, options) { const operationArguments = { keyInfo, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); } @@ -19484,9 +19623,9 @@ class Service { */ getAccountInfo(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); } /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. @@ -19501,9 +19640,9 @@ class Service { contentLength, multipartContentType, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); } /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a @@ -19513,13 +19652,13 @@ class Service { */ filterBlobs(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec$1); } } // Operation Specifications -const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const xmlSerializer$5 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); const setPropertiesOperationSpec = { path: "/", httpMethod: "PUT", @@ -19548,9 +19687,9 @@ const setPropertiesOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer$5 }; -const getPropertiesOperationSpec = { +const getPropertiesOperationSpec$2 = { path: "/", httpMethod: "GET", responses: { @@ -19575,7 +19714,7 @@ const getPropertiesOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; const getStatisticsOperationSpec = { path: "/", @@ -19602,7 +19741,7 @@ const getStatisticsOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; const listContainersSegmentOperationSpec = { path: "/", @@ -19632,7 +19771,7 @@ const listContainersSegmentOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; const getUserDelegationKeyOperationSpec = { path: "/", @@ -19663,9 +19802,9 @@ const getUserDelegationKeyOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer$5 }; -const getAccountInfoOperationSpec = { +const getAccountInfoOperationSpec$2 = { path: "/", httpMethod: "GET", responses: { @@ -19681,9 +19820,9 @@ const getAccountInfoOperationSpec = { urlParameters: [url], headerParameters: [version, accept1], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; -const submitBatchOperationSpec = { +const submitBatchOperationSpec$1 = { path: "/", httpMethod: "POST", responses: { @@ -19713,9 +19852,9 @@ const submitBatchOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer$5 }; -const filterBlobsOperationSpec = { +const filterBlobsOperationSpec$1 = { path: "/", httpMethod: "GET", responses: { @@ -19742,7 +19881,7 @@ const filterBlobsOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; /* @@ -19768,9 +19907,9 @@ class Container { */ create(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec); + return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); } /** * returns all user-defined metadata and system properties for the specified container. The data @@ -19779,7 +19918,7 @@ class Container { */ getProperties(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$1); } @@ -19790,9 +19929,9 @@ class Container { */ delete(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); } /** * operation sets one or more user-defined name-value pairs for the specified container. @@ -19800,9 +19939,9 @@ class Container { */ setMetadata(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); } /** * gets the permissions for the specified container. The permissions indicate whether container data @@ -19811,7 +19950,7 @@ class Container { */ getAccessPolicy(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); } @@ -19822,7 +19961,7 @@ class Container { */ setAccessPolicy(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); } @@ -19832,7 +19971,7 @@ class Container { */ restore(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); } @@ -19844,7 +19983,7 @@ class Container { rename(sourceContainerName, options) { const operationArguments = { sourceContainerName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, renameOperationSpec); } @@ -19861,9 +20000,20 @@ class Container { contentLength, multipartContentType, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -19872,9 +20022,9 @@ class Container { */ acquireLease(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -19885,9 +20035,9 @@ class Container { releaseLease(leaseId, options) { const operationArguments = { leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -19898,9 +20048,9 @@ class Container { renewLease(leaseId, options) { const operationArguments = { leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -19909,9 +20059,9 @@ class Container { */ breakLease(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -19926,9 +20076,9 @@ class Container { const operationArguments = { leaseId, proposedLeaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); } /** * [Update] The List Blobs operation returns a list of the blobs under the specified container @@ -19936,7 +20086,7 @@ class Container { */ listBlobFlatSegment(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); } @@ -19951,7 +20101,7 @@ class Container { listBlobHierarchySegment(delimiter, options) { const operationArguments = { delimiter, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); } @@ -19961,14 +20111,14 @@ class Container { */ getAccountInfo(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$1); } } // Operation Specifications -const xmlSerializer$1 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const createOperationSpec = { +const xmlSerializer$4 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const createOperationSpec$2 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -19992,7 +20142,7 @@ const createOperationSpec = { preventEncryptionScopeOverride ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const getPropertiesOperationSpec$1 = { path: "/{containerName}", @@ -20015,9 +20165,9 @@ const getPropertiesOperationSpec$1 = { leaseId ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const deleteOperationSpec = { +const deleteOperationSpec$1 = { path: "/{containerName}", httpMethod: "DELETE", responses: { @@ -20040,9 +20190,9 @@ const deleteOperationSpec = { ifUnmodifiedSince ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const setMetadataOperationSpec = { +const setMetadataOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -20069,7 +20219,7 @@ const setMetadataOperationSpec = { ifModifiedSince ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const getAccessPolicyOperationSpec = { path: "/{containerName}", @@ -20108,7 +20258,7 @@ const getAccessPolicyOperationSpec = { leaseId ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const setAccessPolicyOperationSpec = { path: "/{containerName}", @@ -20142,7 +20292,7 @@ const setAccessPolicyOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const restoreOperationSpec = { path: "/{containerName}", @@ -20170,7 +20320,7 @@ const restoreOperationSpec = { deletedContainerVersion ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const renameOperationSpec = { path: "/{containerName}", @@ -20198,9 +20348,9 @@ const renameOperationSpec = { sourceLeaseId ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const submitBatchOperationSpec$1 = { +const submitBatchOperationSpec = { path: "/{containerName}", httpMethod: "POST", responses: { @@ -20234,9 +20384,39 @@ const submitBatchOperationSpec$1 = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const acquireLeaseOperationSpec = { +const filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ContainerFilterBlobsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where, + restype2 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const acquireLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -20265,9 +20445,9 @@ const acquireLeaseOperationSpec = { proposedLeaseId ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const releaseLeaseOperationSpec = { +const releaseLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -20295,9 +20475,9 @@ const releaseLeaseOperationSpec = { leaseId1 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const renewLeaseOperationSpec = { +const renewLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -20325,9 +20505,9 @@ const renewLeaseOperationSpec = { action2 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const breakLeaseOperationSpec = { +const breakLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -20355,9 +20535,9 @@ const breakLeaseOperationSpec = { breakPeriod ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const changeLeaseOperationSpec = { +const changeLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -20386,7 +20566,7 @@ const changeLeaseOperationSpec = { proposedLeaseId1 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const listBlobFlatSegmentOperationSpec = { path: "/{containerName}", @@ -20417,7 +20597,7 @@ const listBlobFlatSegmentOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const listBlobHierarchySegmentOperationSpec = { path: "/{containerName}", @@ -20449,7 +20629,7 @@ const listBlobHierarchySegmentOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const getAccountInfoOperationSpec$1 = { path: "/{containerName}", @@ -20467,7 +20647,7 @@ const getAccountInfoOperationSpec$1 = { urlParameters: [url], headerParameters: [version, accept1], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; /* @@ -20493,7 +20673,7 @@ class Blob$1 { */ download(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); } @@ -20504,9 +20684,9 @@ class Blob$1 { */ getProperties(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); } /** * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is @@ -20525,9 +20705,9 @@ class Blob$1 { */ delete(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); } /** * Undelete a blob that was previously soft deleted @@ -20535,7 +20715,7 @@ class Blob$1 { */ undelete(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); } @@ -20547,7 +20727,7 @@ class Blob$1 { setExpiry(expiryOptions, options) { const operationArguments = { expiryOptions, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); } @@ -20557,7 +20737,7 @@ class Blob$1 { */ setHttpHeaders(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); } @@ -20567,7 +20747,7 @@ class Blob$1 { */ setImmutabilityPolicy(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setImmutabilityPolicyOperationSpec); } @@ -20577,7 +20757,7 @@ class Blob$1 { */ deleteImmutabilityPolicy(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, deleteImmutabilityPolicyOperationSpec); } @@ -20589,7 +20769,7 @@ class Blob$1 { setLegalHold(legalHold, options) { const operationArguments = { legalHold, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setLegalHoldOperationSpec); } @@ -20600,9 +20780,9 @@ class Blob$1 { */ setMetadata(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -20611,9 +20791,9 @@ class Blob$1 { */ acquireLease(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -20624,9 +20804,9 @@ class Blob$1 { releaseLease(leaseId, options) { const operationArguments = { leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -20637,9 +20817,9 @@ class Blob$1 { renewLease(leaseId, options) { const operationArguments = { leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -20654,9 +20834,9 @@ class Blob$1 { const operationArguments = { leaseId, proposedLeaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -20665,9 +20845,9 @@ class Blob$1 { */ breakLease(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); } /** * The Create Snapshot operation creates a read-only snapshot of a blob @@ -20675,7 +20855,7 @@ class Blob$1 { */ createSnapshot(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); } @@ -20690,7 +20870,7 @@ class Blob$1 { startCopyFromURL(copySource, options) { const operationArguments = { copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); } @@ -20706,7 +20886,7 @@ class Blob$1 { copyFromURL(copySource, options) { const operationArguments = { copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); } @@ -20720,7 +20900,7 @@ class Blob$1 { abortCopyFromURL(copyId, options) { const operationArguments = { copyId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); } @@ -20736,7 +20916,7 @@ class Blob$1 { setTier(tier, options) { const operationArguments = { tier, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); } @@ -20746,9 +20926,9 @@ class Blob$1 { */ getAccountInfo(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); } /** * The Query operation enables users to select/project on blob data by providing simple query @@ -20757,7 +20937,7 @@ class Blob$1 { */ query(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, queryOperationSpec); } @@ -20767,7 +20947,7 @@ class Blob$1 { */ getTags(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); } @@ -20777,13 +20957,13 @@ class Blob$1 { */ setTags(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); } } // Operation Specifications -const xmlSerializer$2 = new coreHttp.Serializer(Mappers, /* isXml */ true); +const xmlSerializer$3 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); const downloadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", @@ -20831,9 +21011,9 @@ const downloadOperationSpec = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const getPropertiesOperationSpec$2 = { +const getPropertiesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "HEAD", responses: { @@ -20866,9 +21046,9 @@ const getPropertiesOperationSpec$2 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const deleteOperationSpec$1 = { +const deleteOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "DELETE", responses: { @@ -20900,7 +21080,7 @@ const deleteOperationSpec$1 = { deleteSnapshots ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const undeleteOperationSpec = { path: "/{containerName}/{blob}", @@ -20922,7 +21102,7 @@ const undeleteOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setExpiryOperationSpec = { path: "/{containerName}/{blob}", @@ -20946,7 +21126,7 @@ const setExpiryOperationSpec = { expiresOn ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setHttpHeadersOperationSpec = { path: "/{containerName}/{blob}", @@ -20980,7 +21160,7 @@ const setHttpHeadersOperationSpec = { blobContentDisposition ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", @@ -21005,7 +21185,7 @@ const setImmutabilityPolicyOperationSpec = { immutabilityPolicyMode ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const deleteImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", @@ -21027,7 +21207,7 @@ const deleteImmutabilityPolicyOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setLegalHoldOperationSpec = { path: "/{containerName}/{blob}", @@ -21050,9 +21230,9 @@ const setLegalHoldOperationSpec = { legalHold ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const setMetadataOperationSpec$1 = { +const setMetadataOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -21083,9 +21263,9 @@ const setMetadataOperationSpec$1 = { encryptionScope ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const acquireLeaseOperationSpec$1 = { +const acquireLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -21113,9 +21293,9 @@ const acquireLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const releaseLeaseOperationSpec$1 = { +const releaseLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -21142,9 +21322,9 @@ const releaseLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const renewLeaseOperationSpec$1 = { +const renewLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -21171,9 +21351,9 @@ const renewLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const changeLeaseOperationSpec$1 = { +const changeLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -21201,9 +21381,9 @@ const changeLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const breakLeaseOperationSpec$1 = { +const breakLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -21230,7 +21410,7 @@ const breakLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const createSnapshotOperationSpec = { path: "/{containerName}/{blob}", @@ -21263,7 +21443,7 @@ const createSnapshotOperationSpec = { encryptionScope ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const startCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -21305,7 +21485,7 @@ const startCopyFromURLOperationSpec = { legalHold1 ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const copyFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -21334,6 +21514,7 @@ const copyFromURLOperationSpec = { ifTags, immutabilityPolicyExpiry, immutabilityPolicyMode, + encryptionScope, tier, sourceIfModifiedSince, sourceIfUnmodifiedSince, @@ -21347,7 +21528,7 @@ const copyFromURLOperationSpec = { copySourceAuthorization ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const abortCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -21375,7 +21556,7 @@ const abortCopyFromURLOperationSpec = { copyActionAbortConstant ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setTierOperationSpec = { path: "/{containerName}/{blob}", @@ -21409,9 +21590,9 @@ const setTierOperationSpec = { tier1 ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const getAccountInfoOperationSpec$2 = { +const getAccountInfoOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { @@ -21427,7 +21608,7 @@ const getAccountInfoOperationSpec$2 = { urlParameters: [url], headerParameters: [version, accept1], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const queryOperationSpec = { path: "/{containerName}/{blob}", @@ -21477,7 +21658,7 @@ const queryOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const getTagsOperationSpec = { path: "/{containerName}/{blob}", @@ -21507,7 +21688,7 @@ const getTagsOperationSpec = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setTagsOperationSpec = { path: "/{containerName}/{blob}", @@ -21541,7 +21722,7 @@ const setTagsOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; /* @@ -21571,7 +21752,7 @@ class PageBlob { const operationArguments = { contentLength, blobContentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, createOperationSpec$1); } @@ -21585,7 +21766,7 @@ class PageBlob { const operationArguments = { contentLength, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); } @@ -21597,7 +21778,7 @@ class PageBlob { clearPages(contentLength, options) { const operationArguments = { contentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); } @@ -21618,7 +21799,7 @@ class PageBlob { sourceRange, contentLength, range, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); } @@ -21629,7 +21810,7 @@ class PageBlob { */ getPageRanges(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); } @@ -21640,7 +21821,7 @@ class PageBlob { */ getPageRangesDiff(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); } @@ -21653,7 +21834,7 @@ class PageBlob { resize(blobContentLength, options) { const operationArguments = { blobContentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); } @@ -21667,7 +21848,7 @@ class PageBlob { updateSequenceNumber(sequenceNumberAction, options) { const operationArguments = { sequenceNumberAction, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); } @@ -21686,14 +21867,14 @@ class PageBlob { copyIncremental(copySource, options) { const operationArguments = { copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); } } // Operation Specifications -const xmlSerializer$3 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +const xmlSerializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); const createOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", @@ -21740,7 +21921,7 @@ const createOperationSpec$1 = { blobSequenceNumber ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const uploadPagesOperationSpec = { path: "/{containerName}/{blob}", @@ -21782,7 +21963,7 @@ const uploadPagesOperationSpec = { ifSequenceNumberEqualTo ], mediaType: "binary", - serializer + serializer: serializer$2 }; const clearPagesOperationSpec = { path: "/{containerName}/{blob}", @@ -21820,7 +22001,7 @@ const clearPagesOperationSpec = { pageWrite1 ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const uploadPagesFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -21867,7 +22048,7 @@ const uploadPagesFromURLOperationSpec = { range1 ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const getPageRangesOperationSpec = { path: "/{containerName}/{blob}", @@ -21901,7 +22082,7 @@ const getPageRangesOperationSpec = { ifTags ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const getPageRangesDiffOperationSpec = { path: "/{containerName}/{blob}", @@ -21937,7 +22118,7 @@ const getPageRangesDiffOperationSpec = { prevSnapshotUrl ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const resizeOperationSpec = { path: "/{containerName}/{blob}", @@ -21970,7 +22151,7 @@ const resizeOperationSpec = { blobContentLength ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const updateSequenceNumberOperationSpec = { path: "/{containerName}/{blob}", @@ -22000,7 +22181,7 @@ const updateSequenceNumberOperationSpec = { sequenceNumberAction ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const copyIncrementalOperationSpec = { path: "/{containerName}/{blob}", @@ -22028,7 +22209,7 @@ const copyIncrementalOperationSpec = { copySource ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; /* @@ -22055,9 +22236,9 @@ class AppendBlob { create(contentLength, options) { const operationArguments = { contentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); + return this.client.sendOperationRequest(operationArguments, createOperationSpec); } /** * The Append Block operation commits a new block of data to the end of an existing append blob. The @@ -22071,7 +22252,7 @@ class AppendBlob { const operationArguments = { contentLength, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); } @@ -22088,7 +22269,7 @@ class AppendBlob { const operationArguments = { sourceUrl, contentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); } @@ -22099,15 +22280,15 @@ class AppendBlob { */ seal(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, sealOperationSpec); } } // Operation Specifications -const xmlSerializer$4 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer$1 = new coreHttp.Serializer(Mappers, /* isXml */ false); -const createOperationSpec$2 = { +const xmlSerializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const createOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -22150,7 +22331,7 @@ const createOperationSpec$2 = { blobType1 ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$1 }; const appendBlockOperationSpec = { path: "/{containerName}/{blob}", @@ -22234,7 +22415,7 @@ const appendBlockFromUrlOperationSpec = { sourceRange1 ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$1 }; const sealOperationSpec = { path: "/{containerName}/{blob}", @@ -22262,7 +22443,7 @@ const sealOperationSpec = { appendPosition ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$1 }; /* @@ -22294,7 +22475,7 @@ class BlockBlob { const operationArguments = { contentLength, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); } @@ -22315,7 +22496,7 @@ class BlockBlob { const operationArguments = { contentLength, copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); } @@ -22333,7 +22514,7 @@ class BlockBlob { blockId, contentLength, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); } @@ -22352,7 +22533,7 @@ class BlockBlob { blockId, contentLength, sourceUrl, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); } @@ -22370,7 +22551,7 @@ class BlockBlob { commitBlockList(blocks, options) { const operationArguments = { blocks, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); } @@ -22384,14 +22565,14 @@ class BlockBlob { getBlockList(listType, options) { const operationArguments = { listType, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); } } // Operation Specifications -const xmlSerializer$5 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer$2 = new coreHttp.Serializer(Mappers, /* isXml */ false); +const xmlSerializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); const uploadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", @@ -22439,7 +22620,7 @@ const uploadOperationSpec = { blobType2 ], mediaType: "binary", - serializer: serializer$2 + serializer }; const putBlobFromUrlOperationSpec = { path: "/{containerName}/{blob}", @@ -22492,7 +22673,7 @@ const putBlobFromUrlOperationSpec = { copySourceBlobProperties ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer }; const stageBlockOperationSpec = { path: "/{containerName}/{blob}", @@ -22528,7 +22709,7 @@ const stageBlockOperationSpec = { accept2 ], mediaType: "binary", - serializer: serializer$2 + serializer }; const stageBlockFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -22569,7 +22750,7 @@ const stageBlockFromURLOperationSpec = { sourceRange1 ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer }; const commitBlockListOperationSpec = { path: "/{containerName}/{blob}", @@ -22619,7 +22800,7 @@ const commitBlockListOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$5 + serializer: xmlSerializer }; const getBlockListOperationSpec = { path: "/{containerName}/{blob}", @@ -22649,7 +22830,7 @@ const getBlockListOperationSpec = { ifTags ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer }; // Copyright (c) Microsoft Corporation. @@ -22660,8 +22841,8 @@ const logger = logger$1.createClientLogger("storage-blob"); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const SDK_VERSION = "12.8.0"; -const SERVICE_VERSION = "2020-10-02"; +const SDK_VERSION = "12.9.0"; +const SERVICE_VERSION = "2021-04-10"; const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB const BLOCK_BLOB_MAX_BLOCKS = 50000; @@ -22678,15 +22859,15 @@ const URLConstants = { SIGNATURE: "sig", SNAPSHOT: "snapshot", VERSIONID: "versionid", - TIMEOUT: "timeout" - } + TIMEOUT: "timeout", + }, }; const HTTPURLConnection = { HTTP_ACCEPTED: 202, HTTP_CONFLICT: 409, HTTP_NOT_FOUND: 404, HTTP_PRECON_FAILED: 412, - HTTP_RANGE_NOT_SATISFIABLE: 416 + HTTP_RANGE_NOT_SATISFIABLE: 416, }; const HeaderConstants = { AUTHORIZATION: "Authorization", @@ -22711,7 +22892,7 @@ const HeaderConstants = { X_MS_COPY_SOURCE: "x-ms-copy-source", X_MS_DATE: "x-ms-date", X_MS_ERROR_CODE: "x-ms-error-code", - X_MS_VERSION: "x-ms-version" + X_MS_VERSION: "x-ms-version", }; const ETagNone = ""; const ETagAny = "*"; @@ -22816,7 +22997,7 @@ const StorageBlobLoggingAllowedHeaderNames = [ "x-ms-tag-count", "x-ms-encryption-key-sha256", "x-ms-if-tags", - "x-ms-source-if-tags" + "x-ms-source-if-tags", ]; const StorageBlobLoggingAllowedQueryParameters = [ "comp", @@ -22851,8 +23032,9 @@ const StorageBlobLoggingAllowedQueryParameters = [ "skt", "sktid", "skv", - "snapshot" + "snapshot", ]; +const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; // Copyright (c) Microsoft Corporation. /** @@ -22992,7 +23174,7 @@ function extractConnectionStringParts(connectionString) { url: blobEndpoint, accountName, accountKey, - proxyUri + proxyUri, }; } else { @@ -23324,14 +23506,14 @@ function toBlobTags(tags) { return undefined; } const res = { - blobTagSet: [] + blobTagSet: [], }; for (const key in tags) { if (Object.prototype.hasOwnProperty.call(tags, key)) { const value = tags[key]; res.blobTagSet.push({ key, - value + value, }); } } @@ -23371,33 +23553,33 @@ function toQuerySerialization(textConfiguration) { fieldQuote: textConfiguration.fieldQuote || "", recordSeparator: textConfiguration.recordSeparator, escapeChar: textConfiguration.escapeCharacter || "", - headersPresent: textConfiguration.hasHeaders || false - } - } + headersPresent: textConfiguration.hasHeaders || false, + }, + }, }; case "json": return { format: { type: "json", jsonTextConfiguration: { - recordSeparator: textConfiguration.recordSeparator - } - } + recordSeparator: textConfiguration.recordSeparator, + }, + }, }; case "arrow": return { format: { type: "arrow", arrowConfiguration: { - schema: textConfiguration.schema - } - } + schema: textConfiguration.schema, + }, + }, }; case "parquet": return { format: { - type: "parquet" - } + type: "parquet", + }, }; default: throw Error("Invalid BlobQueryTextConfiguration."); @@ -23421,7 +23603,7 @@ function parseObjectReplicationRecord(objectReplicationRecord) { } const rule = { ruleId: ids[1], - replicationStatus: objectReplicationRecord[key] + replicationStatus: objectReplicationRecord[key], }; const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); if (policyIndex > -1) { @@ -23430,7 +23612,7 @@ function parseObjectReplicationRecord(objectReplicationRecord) { else { orProperties.push({ policyId: ids[0], - rules: [rule] + rules: [rule], }); } } @@ -23449,6 +23631,202 @@ function attachCredential(thing, credential) { function httpAuthorizationToString(httpAuthorization) { return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; } +function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); + } + else { + return name.content; + } +} +function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + var _a; + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function decodeBase64String(value) { + if (coreHttp.isNode) { + return Buffer.from(value, "base64"); + } + else { + const byteString = atob(value); + const arr = new Uint8Array(byteString.length); + for (let i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; + } +} +function ParseBoolean(content) { + if (content === undefined) + return undefined; + if (content === "true") + return true; + if (content === "false") + return false; + return undefined; +} +function ParseBlobName(blobNameInXML) { + if (blobNameInXML["$"] !== undefined && blobNameInXML["#"] !== undefined) { + return { + encoded: ParseBoolean(blobNameInXML["$"]["Encoded"]), + content: blobNameInXML["#"], + }; + } + else { + return { + encoded: false, + content: blobNameInXML, + }; + } +} +function ParseBlobItem(blobInXML) { + const blobPropertiesInXML = blobInXML["Properties"]; + const blobProperties = { + createdOn: new Date(blobPropertiesInXML["Creation-Time"]), + lastModified: new Date(blobPropertiesInXML["Last-Modified"]), + etag: blobPropertiesInXML["Etag"], + contentLength: blobPropertiesInXML["Content-Length"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["Content-Length"]), + contentType: blobPropertiesInXML["Content-Type"], + contentEncoding: blobPropertiesInXML["Content-Encoding"], + contentLanguage: blobPropertiesInXML["Content-Language"], + contentMD5: decodeBase64String(blobPropertiesInXML["Content-MD5"]), + contentDisposition: blobPropertiesInXML["Content-Disposition"], + cacheControl: blobPropertiesInXML["Cache-Control"], + blobSequenceNumber: blobPropertiesInXML["x-ms-blob-sequence-number"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["x-ms-blob-sequence-number"]), + blobType: blobPropertiesInXML["BlobType"], + leaseStatus: blobPropertiesInXML["LeaseStatus"], + leaseState: blobPropertiesInXML["LeaseState"], + leaseDuration: blobPropertiesInXML["LeaseDuration"], + copyId: blobPropertiesInXML["CopyId"], + copyStatus: blobPropertiesInXML["CopyStatus"], + copySource: blobPropertiesInXML["CopySource"], + copyProgress: blobPropertiesInXML["CopyProgress"], + copyCompletedOn: blobPropertiesInXML["CopyCompletionTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["CopyCompletionTime"]), + copyStatusDescription: blobPropertiesInXML["CopyStatusDescription"], + serverEncrypted: ParseBoolean(blobPropertiesInXML["ServerEncrypted"]), + incrementalCopy: ParseBoolean(blobPropertiesInXML["IncrementalCopy"]), + destinationSnapshot: blobPropertiesInXML["DestinationSnapshot"], + deletedOn: blobPropertiesInXML["DeletedTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["DeletedTime"]), + remainingRetentionDays: blobPropertiesInXML["RemainingRetentionDays"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["RemainingRetentionDays"]), + accessTier: blobPropertiesInXML["AccessTier"], + accessTierInferred: ParseBoolean(blobPropertiesInXML["AccessTierInferred"]), + archiveStatus: blobPropertiesInXML["ArchiveStatus"], + customerProvidedKeySha256: blobPropertiesInXML["CustomerProvidedKeySha256"], + encryptionScope: blobPropertiesInXML["EncryptionScope"], + accessTierChangedOn: blobPropertiesInXML["AccessTierChangeTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["AccessTierChangeTime"]), + tagCount: blobPropertiesInXML["TagCount"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["TagCount"]), + expiresOn: blobPropertiesInXML["Expiry-Time"] === undefined + ? undefined + : new Date(blobPropertiesInXML["Expiry-Time"]), + isSealed: ParseBoolean(blobPropertiesInXML["Sealed"]), + rehydratePriority: blobPropertiesInXML["RehydratePriority"], + lastAccessedOn: blobPropertiesInXML["LastAccessTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["LastAccessTime"]), + immutabilityPolicyExpiresOn: blobPropertiesInXML["ImmutabilityPolicyUntilDate"] === undefined + ? undefined + : new Date(blobPropertiesInXML["ImmutabilityPolicyUntilDate"]), + immutabilityPolicyMode: blobPropertiesInXML["ImmutabilityPolicyMode"], + legalHold: ParseBoolean(blobPropertiesInXML["LegalHold"]), + }; + return { + name: ParseBlobName(blobInXML["Name"]), + deleted: ParseBoolean(blobInXML["Deleted"]), + snapshot: blobInXML["Snapshot"], + versionId: blobInXML["VersionId"], + isCurrentVersion: ParseBoolean(blobInXML["IsCurrentVersion"]), + properties: blobProperties, + metadata: blobInXML["Metadata"], + blobTags: ParseBlobTags(blobInXML["Tags"]), + objectReplicationMetadata: blobInXML["OrMetadata"], + hasVersionsOnly: ParseBoolean(blobInXML["HasVersionsOnly"]), + }; +} +function ParseBlobPrefix(blobPrefixInXML) { + return { + name: ParseBlobName(blobPrefixInXML["Name"]), + }; +} +function ParseBlobTag(blobTagInXML) { + return { + key: blobTagInXML["Key"], + value: blobTagInXML["Value"], + }; +} +function ParseBlobTags(blobTagsInXML) { + if (blobTagsInXML === undefined || + blobTagsInXML["TagSet"] === undefined || + blobTagsInXML["TagSet"]["Tag"] === undefined) { + return undefined; + } + const blobTagSet = []; + if (blobTagsInXML["TagSet"]["Tag"] instanceof Array) { + blobTagsInXML["TagSet"]["Tag"].forEach((blobTagInXML) => { + blobTagSet.push(ParseBlobTag(blobTagInXML)); + }); + } + else { + blobTagSet.push(ParseBlobTag(blobTagsInXML["TagSet"]["Tag"])); + } + return { blobTagSet: blobTagSet }; +} +function ProcessBlobItems(blobArrayInXML) { + const blobItems = []; + if (blobArrayInXML instanceof Array) { + blobArrayInXML.forEach((blobInXML) => { + blobItems.push(ParseBlobItem(blobInXML)); + }); + } + else { + blobItems.push(ParseBlobItem(blobArrayInXML)); + } + return blobItems; +} +function ProcessBlobPrefixes(blobPrefixesInXML) { + const blobPrefixes = []; + if (blobPrefixesInXML instanceof Array) { + blobPrefixesInXML.forEach((blobPrefixInXML) => { + blobPrefixes.push(ParseBlobPrefix(blobPrefixInXML)); + }); + } + else { + blobPrefixes.push(ParseBlobPrefix(blobPrefixesInXML)); + } + return blobPrefixes; +} // Copyright (c) Microsoft Corporation. /** @@ -23479,9 +23857,16 @@ class StorageBrowserPolicy extends coreHttp.BaseRequestPolicy { * @param request - */ async sendRequest(request) { - { + if (coreHttp.isNode) { return this._nextPolicy.sendRequest(request); } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.remove(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.remove(HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); } } @@ -23502,6 +23887,10 @@ class StorageBrowserPolicyFactory { } // Copyright (c) Microsoft Corporation. +/** + * RetryPolicy types. + */ +exports.StorageRetryPolicyType = void 0; (function (StorageRetryPolicyType) { /** * Exponential retry. Retry time delay grows exponentially. @@ -23519,7 +23908,7 @@ const DEFAULT_RETRY_OPTIONS = { retryDelayInMs: 4 * 1000, retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, secondaryHost: "", - tryTimeoutInMs: undefined // Use server side default timeout strategy + tryTimeoutInMs: undefined, // Use server side default timeout strategy }; const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); /** @@ -23556,7 +23945,7 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost - : DEFAULT_RETRY_OPTIONS.secondaryHost + : DEFAULT_RETRY_OPTIONS.secondaryHost, }; } /** @@ -23633,7 +24022,7 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { "ENOTFOUND", "TIMEOUT", "EPIPE", - "REQUEST_SEND_ERROR" // For default xhr based http client provided in ms-rest-js + "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js ]; if (err) { for (const retriableError of retriableErrors) { @@ -23819,7 +24208,7 @@ class TelemetryPolicy extends coreHttp.BaseRequestPolicy { * @param request - */ async sendRequest(request) { - { + if (coreHttp.isNode) { if (!request.headers) { request.headers = new coreHttp.HttpHeaders(); } @@ -23842,7 +24231,7 @@ class TelemetryPolicyFactory { */ constructor(telemetry) { const userAgentInfo = []; - { + if (coreHttp.isNode) { if (telemetry) { const telemetryString = telemetry.userAgentPrefix || ""; if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { @@ -23855,7 +24244,7 @@ class TelemetryPolicyFactory { userAgentInfo.push(libInfo); } // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299) - const runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`; + const runtimeInfo = `(NODE-VERSION ${process.version}; ${os__namespace.type()} ${os__namespace.release()})`; if (userAgentInfo.indexOf(runtimeInfo) === -1) { userAgentInfo.push(runtimeInfo); } @@ -23879,6 +24268,247 @@ function getCachedDefaultHttpClient() { return _defaultHttpClient; } +// Copyright (c) Microsoft Corporation. +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + }, +}; +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await coreHttp.delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * Converts: `Bearer a="b" c="d"`. + * Into: `[ { a: 'b', c: 'd' }]`. + * + * @internal + */ +function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +function storageBearerTokenChallengeAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + let getToken = createTokenCycler(credential, scopes); + class StorageBearerTokenChallengeAuthenticationPolicy extends coreHttp.BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const getTokenInternal = getToken; + const token = (await getTokenInternal({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + })).token; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + const response = await this._nextPolicy.sendRequest(webResource); + if ((response === null || response === void 0 ? void 0 : response.status) === 401) { + const challenge = getChallenge(response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope; + const parsedAuthUri = coreHttp.URLBuilder.parse(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.getPath().split("/"); + const tenantId = pathSegments[1]; + const getTokenForChallenge = createTokenCycler(credential, challengeScopes); + const tokenForChallenge = (await getTokenForChallenge({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + tenantId: tenantId, + })).token; + getToken = getTokenForChallenge; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${tokenForChallenge}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return response; + } + } + return { + create: (nextPolicy, options) => { + return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options); + }, + }; +} + // Copyright (c) Microsoft Corporation. /** * A helper to decide if a given argument satisfies the Pipeline contract @@ -23924,7 +24554,7 @@ class Pipeline { toServiceClientOptions() { return { httpClient: this.options.httpClient, - requestPolicyFactories: this.factories + requestPolicyFactories: this.factories, }; } } @@ -23936,6 +24566,7 @@ class Pipeline { * @returns A new Pipeline object. */ function newPipeline(credential, pipelineOptions = {}) { + var _a; if (credential === undefined) { credential = new AnonymousCredential(); } @@ -23957,16 +24588,16 @@ function newPipeline(credential, pipelineOptions = {}) { coreHttp.logPolicy({ logger: logger.info, allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, - allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters - }) + allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, + }), ]; - { + if (coreHttp.isNode) { // policies only available in Node.js runtime, not in browsers factories.push(coreHttp.proxyPolicy(pipelineOptions.proxyOptions)); factories.push(coreHttp.disableResponseDecompressionPolicy()); } factories.push(coreHttp.isTokenCredential(credential) - ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) + ? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential) : credential); return new Pipeline(factories, pipelineOptions); } @@ -23993,7 +24624,9 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { */ signRequest(request) { request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); - if (request.body && typeof request.body === "string" && request.body.length > 0) { + if (request.body && + (typeof request.body === "string" || request.body !== undefined) && + request.body.length > 0) { request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } const stringToSign = [ @@ -24008,7 +24641,7 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), - this.getHeaderValueToSign(request, HeaderConstants.RANGE) + this.getHeaderValueToSign(request, HeaderConstants.RANGE), ].join("\n") + "\n" + this.getCanonicalizedHeadersString(request) + @@ -24137,9 +24770,7 @@ class StorageSharedKeyCredential extends Credential { * @param stringToSign - */ computeHMACSHA256(stringToSign) { - return crypto.createHmac("sha256", this.accountKey) - .update(stringToSign, "utf8") - .digest("base64"); + return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); } } @@ -24151,8 +24782,8 @@ class StorageSharedKeyCredential extends Credential { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ const packageName = "azure-storage-blob"; -const packageVersion = "12.8.0"; -class StorageClientContext extends coreHttp.ServiceClient { +const packageVersion = "12.9.0"; +class StorageClientContext extends coreHttp__namespace.ServiceClient { /** * Initializes a new instance of the StorageClientContext class. * @param url The URL of the service account, container, or blob that is the target of the desired @@ -24168,7 +24799,7 @@ class StorageClientContext extends coreHttp.ServiceClient { options = {}; } if (!options.userAgent) { - const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + const defaultUserAgent = coreHttp__namespace.getDefaultUserAgentValue(); options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; } super(undefined, options); @@ -24177,7 +24808,7 @@ class StorageClientContext extends coreHttp.ServiceClient { // Parameter assignments this.url = url; // Assigning values to Constant parameters - this.version = options.version || "2020-10-02"; + this.version = options.version || "2021-04-10"; } } @@ -24224,7 +24855,7 @@ class StorageClient { */ const createSpan = coreTracing.createSpanFunction({ packagePrefix: "Azure.Storage.Blob", - namespace: "Microsoft.Storage" + namespace: "Microsoft.Storage", }); /** * @internal @@ -24238,7 +24869,7 @@ function convertTracingToRequestOptionsBase(options) { return { // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions, - tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext + tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext, }; } @@ -24295,6 +24926,10 @@ class BlobSASPermissions { * Specifies SetImmutabilityPolicy access granted. */ this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; } /** * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an @@ -24336,6 +24971,9 @@ class BlobSASPermissions { case "i": blobSASPermissions.setImmutabilityPolicy = true; break; + case "y": + blobSASPermissions.permanentDelete = true; + break; default: throw new RangeError(`Invalid permission: ${char}`); } @@ -24380,6 +25018,9 @@ class BlobSASPermissions { if (permissionLike.setImmutabilityPolicy) { blobSASPermissions.setImmutabilityPolicy = true; } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } return blobSASPermissions; } /** @@ -24420,6 +25061,9 @@ class BlobSASPermissions { if (this.setImmutabilityPolicy) { permissions.push("i"); } + if (this.permanentDelete) { + permissions.push("y"); + } return permissions.join(""); } } @@ -24479,6 +25123,14 @@ class ContainerSASPermissions { * Specifies SetImmutabilityPolicy access granted. */ this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + this.filterByTags = false; } /** * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an @@ -24523,6 +25175,12 @@ class ContainerSASPermissions { case "i": containerSASPermissions.setImmutabilityPolicy = true; break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; default: throw new RangeError(`Invalid permission ${char}`); } @@ -24570,6 +25228,12 @@ class ContainerSASPermissions { if (permissionLike.setImmutabilityPolicy) { containerSASPermissions.setImmutabilityPolicy = true; } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } return containerSASPermissions; } /** @@ -24615,6 +25279,12 @@ class ContainerSASPermissions { if (this.setImmutabilityPolicy) { permissions.push("i"); } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } return permissions.join(""); } } @@ -24644,9 +25314,7 @@ class UserDelegationKeyCredential { */ computeHMACSHA256(stringToSign) { // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); - return crypto.createHmac("sha256", this.key) - .update(stringToSign, "utf8") - .digest("base64"); + return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); } } @@ -24664,6 +25332,10 @@ function ipRangeToString(ipRange) { } // Copyright (c) Microsoft Corporation. +/** + * Protocols for generated SAS. + */ +exports.SASProtocol = void 0; (function (SASProtocol) { /** * Protocol that allows HTTPS only @@ -24684,7 +25356,7 @@ function ipRangeToString(ipRange) { * NOTE: Instances of this class are immutable. */ class SASQueryParameters { - constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId) { + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { this.version = version; this.signature = signature; if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { @@ -24697,6 +25369,7 @@ class SASQueryParameters { this.expiresOn = permissionsOrOptions.expiresOn; this.ipRangeInner = permissionsOrOptions.ipRange; this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; this.resource = permissionsOrOptions.resource; this.cacheControl = permissionsOrOptions.cacheControl; this.contentDisposition = permissionsOrOptions.contentDisposition; @@ -24722,6 +25395,7 @@ class SASQueryParameters { this.protocol = protocol; this.startsOn = startsOn; this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; this.identifier = identifier; this.resource = resource; this.cacheControl = cacheControl; @@ -24750,7 +25424,7 @@ class SASQueryParameters { if (this.ipRangeInner) { return { end: this.ipRangeInner.end, - start: this.ipRangeInner.start + start: this.ipRangeInner.start, }; } return undefined; @@ -24769,6 +25443,7 @@ class SASQueryParameters { "se", "sip", "si", + "ses", "skoid", "sktid", "skt", @@ -24784,7 +25459,7 @@ class SASQueryParameters { "rscl", "rsct", "saoid", - "scid" + "scid", ]; const queries = []; for (const param of params) { @@ -24813,6 +25488,9 @@ class SASQueryParameters { case "si": this.tryAppendQueryParameter(queries, param, this.identifier); break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; case "skoid": // Signed object ID this.tryAppendQueryParameter(queries, param, this.signedOid); break; @@ -24897,6 +25575,15 @@ function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredent if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); } + // Version 2020-12-06 adds support for encryptionscope in SAS. + if (version >= "2020-12-06") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + } + } // Version 2019-12-12 adds support for the blob tags permission. // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string @@ -24978,7 +25665,7 @@ function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKe blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", ].join("\n"); const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); @@ -25047,11 +25734,81 @@ function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKe blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", ].join("\n"); const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); } +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope); +} /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. @@ -25123,7 +25880,7 @@ function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userD blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType + blobSASSignatureValues.contentType, ].join("\n"); const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); @@ -25202,11 +25959,91 @@ function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userD blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType + blobSASSignatureValues.contentType, ].join("\n"); const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); } +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope); +} function getCanonicalName(accountName, containerName, blobName) { // Container: "/blob/account/containerName" // Blob: "/blob/account/containerName/blobName" @@ -25240,6 +26077,11 @@ function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { version < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + } if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.tag && version < "2019-12-12") { @@ -25250,10 +26092,18 @@ function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); } + if (version < "2021-04-10" && + blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + } if (version < "2020-02-10" && (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); } + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } blobSASSignatureValues.version = version; return blobSASSignatureValues; } @@ -25327,7 +26177,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -25362,7 +26212,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -25395,7 +26245,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -25427,7 +26277,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -25462,7 +26312,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -25542,8 +26392,7 @@ class RetriableReadableStream extends stream.Readable { }); } else { - this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this - .offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); } } else { @@ -26388,7 +27237,7 @@ class AvroReader { } async initialize(options = {}) { const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); if (!arraysEqual(header, AVRO_INIT_BYTES)) { throw new Error("Stream is not an Avro file."); @@ -26396,7 +27245,7 @@ class AvroReader { // File metadata is written as if defined by the following map schema: // { "type": "map", "values": "bytes"} this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); // Validate codec const codec = this._metadata[AVRO_CODEC_KEY]; @@ -26405,7 +27254,7 @@ class AvroReader { } // The 16-byte, randomly-generated sync marker for this file. this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); // Parse the schema const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); @@ -26414,7 +27263,7 @@ class AvroReader { this._blockOffset = this._initialBlockOffset + this._dataStream.position; } this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); // skip block length await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); @@ -26436,13 +27285,13 @@ class AvroReader { } while (this.hasNext()) { const result = yield tslib.__await(this._itemType.read(this._dataStream, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, })); this._itemsRemainingInBlock--; this._objectIndex++; if (this._itemsRemainingInBlock == 0) { const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, })); this._blockOffset = this._initialBlockOffset + this._dataStream.position; this._objectIndex = 0; @@ -26451,7 +27300,7 @@ class AvroReader { } try { this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, })); } catch (err) { @@ -26650,7 +27499,7 @@ class BlobQuickQueryStream extends stream.Readable { position, name, isFatal: fatal, - description + description, }); } break; @@ -27026,6 +27875,11 @@ class BlobQueryResponse { } // Copyright (c) Microsoft Corporation. +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +exports.BlockBlobTier = void 0; (function (BlockBlobTier) { /** * Optimized for storing data that is accessed frequently. @@ -27041,6 +27895,12 @@ class BlobQueryResponse { */ BlockBlobTier["Archive"] = "Archive"; })(exports.BlockBlobTier || (exports.BlockBlobTier = {})); +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +exports.PremiumPageBlobTier = void 0; (function (PremiumPageBlobTier) { /** * P4 Tier. @@ -27101,6 +27961,20 @@ function ensureCpkIfSpecified(cpk, isHttps) { cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; } } +/** + * Defines the known cloud audiences for Storage. + */ +exports.StorageBlobAudience = void 0; +(function (StorageBlobAudience) { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; +})(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -27113,16 +27987,16 @@ function ensureCpkIfSpecified(cpk, isHttps) { function rangeResponseFromModel(response) { const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ offset: x.start, - count: x.end - x.start + count: x.end - x.start, })); const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ offset: x.start, - count: x.end - x.start + count: x.end - x.start, })); return Object.assign(Object.assign({}, response), { pageRange, clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { pageRange, - clearRange + clearRange, } }) }); } @@ -27135,7 +28009,7 @@ function rangeResponseFromModel(response) { */ class BlobBeginCopyFromUrlPoller extends coreLro.Poller { constructor(options) { - const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions } = options; + const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; let state; if (resumeFrom) { state = JSON.parse(resumeFrom).state; @@ -27171,7 +28045,7 @@ const cancel = async function cancel(options = {}) { } // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call await state.blobClient.abortCopyFromURL(copyId, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); state.isCancelled = true; return makeBlobBeginCopyFromURLPollOperation(state); @@ -27249,7 +28123,7 @@ function makeBlobBeginCopyFromURLPollOperation(state) { state: Object.assign({}, state), cancel, toString, - update + update, }; } @@ -27891,7 +28765,7 @@ async function streamToBuffer2(stream, buffer, encoding) { */ async function readStreamToLocalFile(rs, file) { return new Promise((resolve, reject) => { - const ws = fs.createWriteStream(file); + const ws = fs__namespace.createWriteStream(file); rs.on("error", (err) => { reject(err); }); @@ -27907,8 +28781,8 @@ async function readStreamToLocalFile(rs, file) { * * Promisified version of fs.stat(). */ -const fsStat = util.promisify(fs.stat); -const fsCreateReadStream = fs.createReadStream; +const fsStat = util__namespace.promisify(fs__namespace.stat); +const fsCreateReadStream = fs__namespace.createReadStream; /** * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, @@ -27951,12 +28825,17 @@ class BlobClient extends StorageClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -27973,10 +28852,8 @@ class BlobClient extends StorageClient { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url, pipeline); - ({ - blobName: this._name, - containerName: this._containerName - } = this.getBlobAndContainerNamesFromUrl()); + ({ blobName: this._name, containerName: this._containerName } = + this.getBlobAndContainerNamesFromUrl()); this.blobContext = new Blob$1(this.storageClientContext); this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); @@ -28101,11 +28978,13 @@ class BlobClient extends StorageClient { const { span, updatedOptions } = createSpan("BlobClient-download", options); try { const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress // for Node.js, progress is reported by RetriableReadableStream + onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); // Return browser response immediately - if (false) {} + if (!coreHttp.isNode) { + return wrappedRes; + } // We support retrying when download stream unexpected ends in Node.js runtime // Following code shouldn't be bundled into browser build, however some // bundlers may try to bundle following code and "FileReadResponse.ts". @@ -28130,16 +29009,16 @@ class BlobClient extends StorageClient { ifModifiedSince: options.conditions.ifModifiedSince, ifNoneMatch: options.conditions.ifNoneMatch, ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, - ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions + ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, }, range: rangeToString({ count: offset + res.contentLength - start, - offset: start + offset: start, }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, - cpkInfo: options.customerProvidedKey + cpkInfo: options.customerProvidedKey, }; // Debug purpose only // console.log( @@ -28150,13 +29029,13 @@ class BlobClient extends StorageClient { return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; }, offset, res.contentLength, { maxRetryRequests: options.maxRetryRequests, - onProgress: options.onProgress + onProgress: options.onProgress, }); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28181,21 +29060,23 @@ class BlobClient extends StorageClient { abortSignal: options.abortSignal, customerProvidedKey: options.customerProvidedKey, conditions: options.conditions, - tracingOptions: updatedOptions.tracingOptions + tracingOptions: updatedOptions.tracingOptions, }); return true; } catch (e) { if (e.statusCode === 404) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when checking blob existence" - }); + // Expected exception when checking blob existence return false; } + else if (e.statusCode === 409 && + e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg) { + // Expected exception when checking blob existence + return true; + } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28227,7 +29108,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28254,7 +29135,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28276,20 +29157,19 @@ class BlobClient extends StorageClient { const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); try { const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a blob or snapshot only if it exists." + message: "Expected exception when deleting a blob or snapshot only if it exists.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28313,7 +29193,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28347,7 +29227,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28377,7 +29257,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28403,7 +29283,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28427,7 +29307,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28461,7 +29341,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28545,7 +29425,7 @@ class BlobClient extends StorageClient { const client = { abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), getProperties: (...args) => this.getProperties(...args), - startCopyFromURL: (...args) => this.startCopyFromURL(...args) + startCopyFromURL: (...args) => this.startCopyFromURL(...args), }; const poller = new BlobBeginCopyFromUrlPoller({ blobClient: client, @@ -28553,7 +29433,7 @@ class BlobClient extends StorageClient { intervalInMs: options.intervalInMs, onProgress: options.onProgress, resumeFrom: options.resumeFrom, - startCopyFromURLOptions: options + startCopyFromURLOptions: options, }); // Trigger the startCopyFromURL call by calling poll. // Any errors from this method should be surfaced to the user. @@ -28576,7 +29456,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28602,13 +29482,13 @@ class BlobClient extends StorageClient { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince - }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold }, convertTracingToRequestOptionsBase(updatedOptions))); + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28636,7 +29516,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28713,7 +29593,7 @@ class BlobClient extends StorageClient { conditions: options.conditions, maxRetryRequests: options.maxRetryRequestsPerBlock, customerProvidedKey: options.customerProvidedKey, - tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) + tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)), }); const stream = response.readableStreamBody; await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); @@ -28732,7 +29612,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28770,7 +29650,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28851,13 +29731,13 @@ class BlobClient extends StorageClient { sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - sourceIfTags: options.sourceConditions.tagConditions + sourceIfTags: options.sourceConditions.tagConditions, }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28898,7 +29778,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28919,7 +29799,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28940,7 +29820,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -28991,12 +29871,17 @@ class AppendBlobClient extends BlobClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -29051,7 +29936,7 @@ class AppendBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29072,20 +29957,19 @@ class AppendBlobClient extends BlobClient { const conditions = { ifNoneMatch: ETagAny }; try { const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist." + message: "Expected exception when creating a blob only if it does not already exist.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29108,7 +29992,7 @@ class AppendBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29147,13 +30031,13 @@ class AppendBlobClient extends BlobClient { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return await this.appendBlobContext.appendBlock(contentLength, body, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress + onUploadProgress: options.onProgress, }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29186,13 +30070,13 @@ class AppendBlobClient extends BlobClient { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29243,12 +30127,17 @@ class BlockBlobClient extends BlobClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -29314,23 +30203,25 @@ class BlockBlobClient extends BlobClient { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); try { - if (false) {} + if (!coreHttp.isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { queryType: "SQL", expression: query, inputSerialization: toQuerySerialization(options.inputTextConfiguration), - outputSerialization: toQuerySerialization(options.outputTextConfiguration) + outputSerialization: toQuerySerialization(options.outputTextConfiguration), }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); return new BlobQueryResponse(response, { abortSignal: options.abortSignal, onProgress: options.onProgress, - onError: options.onError + onError: options.onError, }); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29372,13 +30263,13 @@ class BlockBlobClient extends BlobClient { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return await this.blockBlobContext.upload(contentLength, body, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress + onUploadProgress: options.onProgress, }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29415,13 +30306,13 @@ class BlockBlobClient extends BlobClient { sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, - sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions + sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions, }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }), convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29445,13 +30336,13 @@ class BlockBlobClient extends BlobClient { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return await this.blockBlobContext.stageBlock(blockId, contentLength, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { - onUploadProgress: options.onProgress + onUploadProgress: options.onProgress, }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29489,7 +30380,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29520,7 +30411,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29554,7 +30445,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29581,7 +30472,7 @@ class BlockBlobClient extends BlobClient { async uploadData(data, options = {}) { const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); try { - if (true) { + if (coreHttp.isNode) { let buffer; if (data instanceof Buffer) { buffer = data; @@ -29595,12 +30486,15 @@ class BlockBlobClient extends BlobClient { } return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); } - else {} + else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29636,7 +30530,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29649,7 +30543,7 @@ class BlockBlobClient extends BlobClient { * Uploads data to block blob. Requires a bodyFactory as the data source, * which need to return a {@link HttpRequestBody} object with the offset and size provided. * - * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} * to commit the block list. @@ -29715,14 +30609,14 @@ class BlockBlobClient extends BlobClient { abortSignal: options.abortSignal, conditions: options.conditions, encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions + tracingOptions: updatedOptions.tracingOptions, }); // Update progress after block is successfully uploaded to server, in case of block trying // TODO: Hook with convenience layer progress event in finer level transferProgress += contentLength; if (options.onProgress) { options.onProgress({ - loadedBytes: transferProgress + loadedBytes: transferProgress, }); } }); @@ -29733,7 +30627,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29762,14 +30656,14 @@ class BlockBlobClient extends BlobClient { return () => fsCreateReadStream(filePath, { autoClose: true, end: count ? offset + count - 1 : Infinity, - start: offset + start: offset, }); }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29813,7 +30707,7 @@ class BlockBlobClient extends BlobClient { await this.stageBlock(blockID, body, length, { conditions: options.conditions, encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions + tracingOptions: updatedOptions.tracingOptions, }); // Update progress after block is successfully uploaded to server, in case of block trying transferProgress += length; @@ -29832,7 +30726,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29883,12 +30777,17 @@ class PageBlobClient extends BlobClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -29938,7 +30837,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29961,20 +30860,19 @@ class PageBlobClient extends BlobClient { try { const conditions = { ifNoneMatch: ETagAny }; const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist." + message: "Expected exception when creating a blob only if it does not already exist.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -29999,13 +30897,13 @@ class PageBlobClient extends BlobClient { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return await this.pageBlobContext.uploadPages(count, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress + onUploadProgress: options.onProgress, }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30035,13 +30933,13 @@ class PageBlobClient extends BlobClient { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30068,7 +30966,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30097,7 +30995,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30127,7 +31025,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30157,7 +31055,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30183,7 +31081,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30210,7 +31108,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30240,7 +31138,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30388,7 +31286,7 @@ class BatchResponseParser { return { subResponses: deserializedSubResponses, subResponsesSucceededCount: subResponsesSucceededCount, - subResponsesFailedCount: subResponsesFailedCount + subResponsesFailedCount: subResponsesFailedCount, }; } } @@ -30535,7 +31433,7 @@ class BlobBatch { this.setBatchType("delete"); await this.addSubRequestInternal({ url: url, - credential: credential + credential: credential, }, async () => { await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); }); @@ -30543,7 +31441,7 @@ class BlobBatch { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30582,7 +31480,7 @@ class BlobBatch { this.setBatchType("setAccessTier"); await this.addSubRequestInternal({ url: url, - credential: credential + credential: credential, }, async () => { await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); }); @@ -30590,7 +31488,7 @@ class BlobBatch { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30647,7 +31545,7 @@ class InnerBatchRequest { this.subRequestPrefix, `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, "", - `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}` // sub request start line with method + `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method ].join(HTTP_LINE_ENDING); for (const header of request.headers.headersArray()) { this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; @@ -30687,7 +31585,7 @@ class BatchRequestAssemblePolicy extends coreHttp.BaseRequestPolicy { this.dummyResponse = { request: new coreHttp.WebResource(), status: 200, - headers: new coreHttp.HttpHeaders() + headers: new coreHttp.HttpHeaders(), }; this.batchRequest = batchRequest; } @@ -30854,14 +31752,14 @@ class BlobBatchClient { version: rawBatchResponse.version, subResponses: responseSummary.subResponses, subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, - subResponsesFailedCount: responseSummary.subResponsesFailedCount + subResponsesFailedCount: responseSummary.subResponsesFailedCount, }; return res; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30907,12 +31805,17 @@ class ContainerClient extends StorageClient { const containerName = credentialOrPipelineOrContainerName; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -30964,7 +31867,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30984,20 +31887,19 @@ class ContainerClient extends StorageClient { const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); try { const res = await this.create(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a container only if it does not already exist." + message: "Expected exception when creating a container only if it does not already exist.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -31019,7 +31921,7 @@ class ContainerClient extends StorageClient { try { await this.getProperties({ abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions + tracingOptions: updatedOptions.tracingOptions, }); return true; } @@ -31027,13 +31929,13 @@ class ContainerClient extends StorageClient { if (e.statusCode === 404) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when checking container existence" + message: "Expected exception when checking container existence", }); return false; } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -31107,7 +32009,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -31133,7 +32035,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -31153,20 +32055,19 @@ class ContainerClient extends StorageClient { const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); try { const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a container only if it exists." + message: "Expected exception when deleting a container only if it exists.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -31200,7 +32101,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -31236,13 +32137,13 @@ class ContainerClient extends StorageClient { requestId: response.requestId, clientRequestId: response.clientRequestId, signedIdentifiers: [], - version: response.version + version: response.version, }; for (const identifier of response) { let accessPolicy = undefined; if (identifier.accessPolicy) { accessPolicy = { - permissions: identifier.accessPolicy.permissions + permissions: identifier.accessPolicy.permissions, }; if (identifier.accessPolicy.expiresOn) { accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); @@ -31253,7 +32154,7 @@ class ContainerClient extends StorageClient { } res.signedIdentifiers.push({ accessPolicy, - id: identifier.id + id: identifier.id, }); } return res; @@ -31261,7 +32162,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -31300,9 +32201,9 @@ class ContainerClient extends StorageClient { permissions: identifier.accessPolicy.permissions, startsOn: identifier.accessPolicy.startsOn ? truncatedISO8061Date(identifier.accessPolicy.startsOn) - : "" + : "", }, - id: identifier.id + id: identifier.id, }); } return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); @@ -31310,7 +32211,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -31356,13 +32257,13 @@ class ContainerClient extends StorageClient { const response = await blockBlobClient.upload(body, contentLength, updatedOptions); return { blockBlobClient, - response + response, }; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -31393,7 +32294,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -31415,8 +32316,12 @@ class ContainerClient extends StorageClient { const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); try { const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); return blobItem; }) }) }); return wrappedResponse; @@ -31424,7 +32329,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -31444,19 +32349,33 @@ class ContainerClient extends StorageClient { * @param options - Options to Container List Blob Hierarchy Segment operation. */ async listBlobHierarchySegment(delimiter, marker, options = {}) { + var _a; const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); try { const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + response.segment.blobPrefixes = []; + if (response.segment["BlobPrefix"] !== undefined) { + response.segment.blobPrefixes = ProcessBlobPrefixes(response.segment["BlobPrefix"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); return blobItem; + }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; }) }) }); return wrappedResponse; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -31638,7 +32557,7 @@ class ContainerClient extends StorageClient { */ byPage: (settings = {}) => { return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); - } + }, }; } /** @@ -31712,7 +32631,7 @@ class ContainerClient extends StorageClient { * if (item.kind === "prefix") { * console.log(`\tBlobPrefix: ${item.name}`); * } else { - * console.log(`\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`); + * console.log(`\tBlobItem: name - ${item.name}`); * } * } * ``` @@ -31727,7 +32646,7 @@ class ContainerClient extends StorageClient { * if (item.kind === "prefix") { * console.log(`\tBlobPrefix: ${item.name}`); * } else { - * console.log(`\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`); + * console.log(`\tBlobItem: name - ${item.name}`); * } * entity = await iter.next(); * } @@ -31745,7 +32664,7 @@ class ContainerClient extends StorageClient { * } * } * for (const blob of response.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`); + * console.log(`\tBlobItem: name - ${blob.name}`); * } * } * ``` @@ -31756,7 +32675,9 @@ class ContainerClient extends StorageClient { * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); * * let i = 1; - * for await (const response of containerClient.listBlobsByHierarchy("/", { prefix: "prefix2/sub1/"}).byPage({ maxPageSize: 2 })) { + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { * console.log(`Page ${i++}`); * const segment = response.segment; * @@ -31767,7 +32688,7 @@ class ContainerClient extends StorageClient { * } * * for (const blob of response.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`); + * console.log(`\tBlobItem: name - ${blob.name}`); * } * } * ``` @@ -31834,7 +32755,208 @@ class ContainerClient extends StorageClient { */ byPage: (settings = {}) => { return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); + try { + const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); + } while (marker); } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_3, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } + catch (e_3_1) { e_3 = { error: e_3_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_3) throw e_3.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, }; } getContainerNameFromUrl() { @@ -31967,6 +33089,10 @@ class AccountSASPermissions { * Permission to set immutability policy. */ this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; } /** * Parse initializes the AccountSASPermissions fields from a string. @@ -32013,6 +33139,9 @@ class AccountSASPermissions { case "i": accountSASPermissions.setImmutabilityPolicy = true; break; + case "y": + accountSASPermissions.permanentDelete = true; + break; default: throw new RangeError(`Invalid permission character: ${c}`); } @@ -32063,6 +33192,9 @@ class AccountSASPermissions { if (permissionLike.setImmutabilityPolicy) { accountSASPermissions.setImmutabilityPolicy = true; } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; + } return accountSASPermissions; } /** @@ -32116,6 +33248,9 @@ class AccountSASPermissions { if (this.setImmutabilityPolicy) { permissions.push("i"); } + if (this.permanentDelete) { + permissions.push("y"); + } return permissions.join(""); } } @@ -32298,6 +33433,11 @@ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyC version < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version < "2019-12-12") { @@ -32308,25 +33448,48 @@ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyC version < "2019-12-12") { throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); } + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); - const stringToSign = [ - sharedKeyCredential.accountName, - parsedPermissions, - parsedServices, - parsedResourceTypes, - accountSASSignatureValues.startsOn - ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) - : "", - truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", - accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version, - "" // Account SAS requires an additional newline character - ].join("\n"); + let stringToSign; + if (version >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "", // Account SAS requires an additional newline character + ].join("\n"); + } + else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + "", // Account SAS requires an additional newline character + ].join("\n"); + } const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange); + return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope); } /** @@ -32373,12 +33536,17 @@ class BlobServiceClient extends StorageClient { options = options || {}; const extractedCreds = extractConnectionStringParts(connectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } const pipeline = newPipeline(sharedKeyCredential, options); return new BlobServiceClient(extractedCreds.url, pipeline); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { const pipeline = newPipeline(new AnonymousCredential(), options); @@ -32417,13 +33585,13 @@ class BlobServiceClient extends StorageClient { const containerCreateResponse = await containerClient.create(updatedOptions); return { containerClient, - containerCreateResponse + containerCreateResponse, }; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -32447,7 +33615,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -32477,7 +33645,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -32507,7 +33675,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -32531,7 +33699,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -32556,7 +33724,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -32581,7 +33749,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -32607,7 +33775,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -32637,7 +33805,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -32680,7 +33848,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -32846,7 +34014,7 @@ class BlobServiceClient extends StorageClient { */ byPage: (settings = {}) => { return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - } + }, }; } /** @@ -32984,6 +34152,9 @@ class BlobServiceClient extends StorageClient { if (options.includeMetadata) { include.push("metadata"); } + if (options.includeSystem) { + include.push("system"); + } // AsyncIterableIterator to iterate over containers const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); const iter = this.listItems(listSegmentOptions); @@ -33005,7 +34176,7 @@ class BlobServiceClient extends StorageClient { */ byPage: (settings = {}) => { return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - } + }, }; } /** @@ -33024,7 +34195,7 @@ class BlobServiceClient extends StorageClient { try { const response = await this.serviceContext.getUserDelegationKey({ startsOn: truncatedISO8061Date(startsOn, false), - expiresOn: truncatedISO8061Date(expiresOn, false) + expiresOn: truncatedISO8061Date(expiresOn, false), }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); const userDelegationKey = { signedObjectId: response.signedObjectId, @@ -33033,7 +34204,7 @@ class BlobServiceClient extends StorageClient { signedExpiresOn: new Date(response.signedExpiresOn), signedService: response.signedService, signedVersion: response.signedVersion, - value: response.value + value: response.value, }; const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); return res; @@ -33041,7 +34212,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33090,39 +34261,27 @@ class BlobServiceClient extends StorageClient { Object.defineProperty(exports, 'BaseRequestPolicy', { enumerable: true, - get: function () { - return coreHttp.BaseRequestPolicy; - } + get: function () { return coreHttp.BaseRequestPolicy; } }); Object.defineProperty(exports, 'HttpHeaders', { enumerable: true, - get: function () { - return coreHttp.HttpHeaders; - } + get: function () { return coreHttp.HttpHeaders; } }); Object.defineProperty(exports, 'RequestPolicyOptions', { enumerable: true, - get: function () { - return coreHttp.RequestPolicyOptions; - } + get: function () { return coreHttp.RequestPolicyOptions; } }); Object.defineProperty(exports, 'RestError', { enumerable: true, - get: function () { - return coreHttp.RestError; - } + get: function () { return coreHttp.RestError; } }); Object.defineProperty(exports, 'WebResource', { enumerable: true, - get: function () { - return coreHttp.WebResource; - } + get: function () { return coreHttp.WebResource; } }); Object.defineProperty(exports, 'deserializationPolicy', { enumerable: true, - get: function () { - return coreHttp.deserializationPolicy; - } + get: function () { return coreHttp.deserializationPolicy; } }); exports.AccountSASPermissions = AccountSASPermissions; exports.AccountSASResourceTypes = AccountSASResourceTypes; @@ -33454,7 +34613,7 @@ module.exports = require("crypto"); XMLComment = __webpack_require__(919); - XMLElement = __webpack_require__(845); + XMLElement = __webpack_require__(701); XMLRaw = __webpack_require__(660); @@ -34418,7 +35577,7 @@ Object.defineProperty(exports, "INVALID_SPANID", { enumerable: true, get: functi Object.defineProperty(exports, "INVALID_TRACEID", { enumerable: true, get: function () { return invalid_span_constants_1.INVALID_TRACEID; } }); Object.defineProperty(exports, "INVALID_SPAN_CONTEXT", { enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPAN_CONTEXT; } }); __exportStar(__webpack_require__(132), exports); -__exportStar(__webpack_require__(165), exports); +__exportStar(__webpack_require__(845), exports); var context_1 = __webpack_require__(492); /** Entrypoint for context API */ exports.context = context_1.ContextAPI.getInstance(); @@ -37368,7 +38527,7 @@ function defer(fn) Object.defineProperty(exports, '__esModule', { value: true }); -__webpack_require__(338); +__webpack_require__(97); var tslib = __webpack_require__(671); // Copyright (c) Microsoft Corporation. @@ -37391,7 +38550,7 @@ function getPagedAsyncIterator(pagedResult) { }, byPage: (_a = pagedResult === null || pagedResult === void 0 ? void 0 : pagedResult.byPage) !== null && _a !== void 0 ? _a : ((settings) => { return getPageAsyncIterator(pagedResult, settings === null || settings === void 0 ? void 0 : settings.maxPageSize); - }) + }), }; } function getItemAsyncIterator(pagedResult, maxPageSize) { @@ -37526,7 +38685,7 @@ module.exports = {"application/1d-interleaved-parityfec":{"source":"iana"},"appl * POSSIBILITY OF SUCH DAMAGE. */ -const psl = __webpack_require__(750); +const psl = __webpack_require__(632); function getPublicSuffix(domain) { return psl.get(domain); @@ -38619,7 +39778,7 @@ CombinedStream.prototype._emitError = function(err) { XMLStringifier = __webpack_require__(602); - XMLStringWriter = __webpack_require__(347); + XMLStringWriter = __webpack_require__(750); module.exports = XMLDocument = (function(superClass) { extend(XMLDocument, superClass); @@ -40045,7 +41204,282 @@ exports.wrapSpanContext = wrapSpanContext; module.exports = require("net"); /***/ }), -/* 632 */, +/* 632 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; +/*eslint no-var:0, prefer-arrow-callback: 0, object-shorthand: 0 */ + + + +var Punycode = __webpack_require__(815); + + +var internals = {}; + + +// +// Read rules from file. +// +internals.rules = __webpack_require__(50).map(function (rule) { + + return { + rule: rule, + suffix: rule.replace(/^(\*\.|\!)/, ''), + punySuffix: -1, + wildcard: rule.charAt(0) === '*', + exception: rule.charAt(0) === '!' + }; +}); + + +// +// Check is given string ends with `suffix`. +// +internals.endsWith = function (str, suffix) { + + return str.indexOf(suffix, str.length - suffix.length) !== -1; +}; + + +// +// Find rule for a given domain. +// +internals.findRule = function (domain) { + + var punyDomain = Punycode.toASCII(domain); + return internals.rules.reduce(function (memo, rule) { + + if (rule.punySuffix === -1){ + rule.punySuffix = Punycode.toASCII(rule.suffix); + } + if (!internals.endsWith(punyDomain, '.' + rule.punySuffix) && punyDomain !== rule.punySuffix) { + return memo; + } + // This has been commented out as it never seems to run. This is because + // sub tlds always appear after their parents and we never find a shorter + // match. + //if (memo) { + // var memoSuffix = Punycode.toASCII(memo.suffix); + // if (memoSuffix.length >= punySuffix.length) { + // return memo; + // } + //} + return rule; + }, null); +}; + + +// +// Error codes and messages. +// +exports.errorCodes = { + DOMAIN_TOO_SHORT: 'Domain name too short.', + DOMAIN_TOO_LONG: 'Domain name too long. It should be no more than 255 chars.', + LABEL_STARTS_WITH_DASH: 'Domain name label can not start with a dash.', + LABEL_ENDS_WITH_DASH: 'Domain name label can not end with a dash.', + LABEL_TOO_LONG: 'Domain name label should be at most 63 chars long.', + LABEL_TOO_SHORT: 'Domain name label should be at least 1 character long.', + LABEL_INVALID_CHARS: 'Domain name label can only contain alphanumeric characters or dashes.' +}; + + +// +// Validate domain name and throw if not valid. +// +// From wikipedia: +// +// Hostnames are composed of series of labels concatenated with dots, as are all +// domain names. Each label must be between 1 and 63 characters long, and the +// entire hostname (including the delimiting dots) has a maximum of 255 chars. +// +// Allowed chars: +// +// * `a-z` +// * `0-9` +// * `-` but not as a starting or ending character +// * `.` as a separator for the textual portions of a domain name +// +// * http://en.wikipedia.org/wiki/Domain_name +// * http://en.wikipedia.org/wiki/Hostname +// +internals.validate = function (input) { + + // Before we can validate we need to take care of IDNs with unicode chars. + var ascii = Punycode.toASCII(input); + + if (ascii.length < 1) { + return 'DOMAIN_TOO_SHORT'; + } + if (ascii.length > 255) { + return 'DOMAIN_TOO_LONG'; + } + + // Check each part's length and allowed chars. + var labels = ascii.split('.'); + var label; + + for (var i = 0; i < labels.length; ++i) { + label = labels[i]; + if (!label.length) { + return 'LABEL_TOO_SHORT'; + } + if (label.length > 63) { + return 'LABEL_TOO_LONG'; + } + if (label.charAt(0) === '-') { + return 'LABEL_STARTS_WITH_DASH'; + } + if (label.charAt(label.length - 1) === '-') { + return 'LABEL_ENDS_WITH_DASH'; + } + if (!/^[a-z0-9\-]+$/.test(label)) { + return 'LABEL_INVALID_CHARS'; + } + } +}; + + +// +// Public API +// + + +// +// Parse domain. +// +exports.parse = function (input) { + + if (typeof input !== 'string') { + throw new TypeError('Domain name must be a string.'); + } + + // Force domain to lowercase. + var domain = input.slice(0).toLowerCase(); + + // Handle FQDN. + // TODO: Simply remove trailing dot? + if (domain.charAt(domain.length - 1) === '.') { + domain = domain.slice(0, domain.length - 1); + } + + // Validate and sanitise input. + var error = internals.validate(domain); + if (error) { + return { + input: input, + error: { + message: exports.errorCodes[error], + code: error + } + }; + } + + var parsed = { + input: input, + tld: null, + sld: null, + domain: null, + subdomain: null, + listed: false + }; + + var domainParts = domain.split('.'); + + // Non-Internet TLD + if (domainParts[domainParts.length - 1] === 'local') { + return parsed; + } + + var handlePunycode = function () { + + if (!/xn--/.test(domain)) { + return parsed; + } + if (parsed.domain) { + parsed.domain = Punycode.toASCII(parsed.domain); + } + if (parsed.subdomain) { + parsed.subdomain = Punycode.toASCII(parsed.subdomain); + } + return parsed; + }; + + var rule = internals.findRule(domain); + + // Unlisted tld. + if (!rule) { + if (domainParts.length < 2) { + return parsed; + } + parsed.tld = domainParts.pop(); + parsed.sld = domainParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); + if (domainParts.length) { + parsed.subdomain = domainParts.pop(); + } + return handlePunycode(); + } + + // At this point we know the public suffix is listed. + parsed.listed = true; + + var tldParts = rule.suffix.split('.'); + var privateParts = domainParts.slice(0, domainParts.length - tldParts.length); + + if (rule.exception) { + privateParts.push(tldParts.shift()); + } + + parsed.tld = tldParts.join('.'); + + if (!privateParts.length) { + return handlePunycode(); + } + + if (rule.wildcard) { + tldParts.unshift(privateParts.pop()); + parsed.tld = tldParts.join('.'); + } + + if (!privateParts.length) { + return handlePunycode(); + } + + parsed.sld = privateParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); + + if (privateParts.length) { + parsed.subdomain = privateParts.join('.'); + } + + return handlePunycode(); +}; + + +// +// Get domain. +// +exports.get = function (domain) { + + if (!domain) { + return null; + } + return exports.parse(domain).domain || null; +}; + + +// +// Check whether domain belongs to a known public suffix. +// +exports.isValid = function (domain) { + + var parsed = exports.parse(domain); + return Boolean(parsed.domain && parsed.listed); +}; + + +/***/ }), /* 633 */, /* 634 */, /* 635 */, @@ -43350,18 +44784,12 @@ exports.restoreCache = restoreCache; * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails */ function saveCache(paths, key, options) { + var _a, _b, _c, _d, _e; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); checkKey(key); const compressionMethod = yield utils.getCompressionMethod(); - core.debug('Reserving Cache'); - const cacheId = yield cacheHttpClient.reserveCache(key, paths, { - compressionMethod - }); - if (cacheId === -1) { - throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); - } - core.debug(`Cache ID: ${cacheId}`); + let cacheId = null; const cachePaths = yield utils.resolvePaths(paths); core.debug('Cache Paths:'); core.debug(`${JSON.stringify(cachePaths)}`); @@ -43376,9 +44804,24 @@ function saveCache(paths, key, options) { const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.debug(`File Size: ${archiveFileSize}`); - if (archiveFileSize > fileSizeLimit) { + // For GHES, this check will take place in ReserveCache API with enterprise file size limit + if (archiveFileSize > fileSizeLimit && !utils.isGhes()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } + core.debug('Reserving Cache'); + const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { + compressionMethod, + cacheSize: archiveFileSize + }); + if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { + cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; + } + else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { + throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); + } + else { + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); + } core.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, options); } @@ -43453,1680 +44896,306 @@ Object.defineProperty(exports, "__esModule", { value: true }); /* 699 */, /* 700 */, /* 701 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ (function(module, __unusedexports, __webpack_require__) { -"use strict"; -/*! - * Copyright (c) 2015, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ +// Generated by CoffeeScript 1.12.7 +(function() { + var NodeType, XMLAttribute, XMLElement, XMLNamedNodeMap, XMLNode, getValue, isFunction, isObject, ref, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; -const punycode = __webpack_require__(815); -const urlParse = __webpack_require__(835).parse; -const util = __webpack_require__(669); -const pubsuffix = __webpack_require__(519); -const Store = __webpack_require__(627).Store; -const MemoryCookieStore = __webpack_require__(349).MemoryCookieStore; -const pathMatch = __webpack_require__(54).pathMatch; -const VERSION = __webpack_require__(459); -const { fromCallback } = __webpack_require__(147); + ref = __webpack_require__(582), isObject = ref.isObject, isFunction = ref.isFunction, getValue = ref.getValue; -// From RFC6265 S4.1.1 -// note that it excludes \x3B ";" -const COOKIE_OCTETS = /^[\x21\x23-\x2B\x2D-\x3A\x3C-\x5B\x5D-\x7E]+$/; + XMLNode = __webpack_require__(257); -const CONTROL_CHARS = /[\x00-\x1F]/; + NodeType = __webpack_require__(683); -// From Chromium // '\r', '\n' and '\0' should be treated as a terminator in -// the "relaxed" mode, see: -// https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L60 -const TERMINATORS = ["\n", "\r", "\0"]; + XMLAttribute = __webpack_require__(884); -// RFC6265 S4.1.1 defines path value as 'any CHAR except CTLs or ";"' -// Note ';' is \x3B -const PATH_VALUE = /[\x20-\x3A\x3C-\x7E]+/; + XMLNamedNodeMap = __webpack_require__(451); -// date-time parsing constants (RFC6265 S5.1.1) + module.exports = XMLElement = (function(superClass) { + extend(XMLElement, superClass); -const DATE_DELIM = /[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]/; - -const MONTH_TO_NUM = { - jan: 0, - feb: 1, - mar: 2, - apr: 3, - may: 4, - jun: 5, - jul: 6, - aug: 7, - sep: 8, - oct: 9, - nov: 10, - dec: 11 -}; - -const MAX_TIME = 2147483647000; // 31-bit max -const MIN_TIME = 0; // 31-bit min -const SAME_SITE_CONTEXT_VAL_ERR = - 'Invalid sameSiteContext option for getCookies(); expected one of "strict", "lax", or "none"'; - -function checkSameSiteContext(value) { - const context = String(value).toLowerCase(); - if (context === "none" || context === "lax" || context === "strict") { - return context; - } else { - return null; - } -} - -const PrefixSecurityEnum = Object.freeze({ - SILENT: "silent", - STRICT: "strict", - DISABLED: "unsafe-disabled" -}); - -// Dumped from ip-regex@4.0.0, with the following changes: -// * all capturing groups converted to non-capturing -- "(?:)" -// * support for IPv6 Scoped Literal ("%eth1") removed -// * lowercase hexadecimal only -var IP_REGEX_LOWERCASE =/(?:^(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}$)|(?:^(?:(?:[a-f\d]{1,4}:){7}(?:[a-f\d]{1,4}|:)|(?:[a-f\d]{1,4}:){6}(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|:[a-f\d]{1,4}|:)|(?:[a-f\d]{1,4}:){5}(?::(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,2}|:)|(?:[a-f\d]{1,4}:){4}(?:(?::[a-f\d]{1,4}){0,1}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,3}|:)|(?:[a-f\d]{1,4}:){3}(?:(?::[a-f\d]{1,4}){0,2}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,4}|:)|(?:[a-f\d]{1,4}:){2}(?:(?::[a-f\d]{1,4}){0,3}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,5}|:)|(?:[a-f\d]{1,4}:){1}(?:(?::[a-f\d]{1,4}){0,4}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,6}|:)|(?::(?:(?::[a-f\d]{1,4}){0,5}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,7}|:)))$)/; - -/* - * Parses a Natural number (i.e., non-negative integer) with either the - * *DIGIT ( non-digit *OCTET ) - * or - * *DIGIT - * grammar (RFC6265 S5.1.1). - * - * The "trailingOK" boolean controls if the grammar accepts a - * "( non-digit *OCTET )" trailer. - */ -function parseDigits(token, minDigits, maxDigits, trailingOK) { - let count = 0; - while (count < token.length) { - const c = token.charCodeAt(count); - // "non-digit = %x00-2F / %x3A-FF" - if (c <= 0x2f || c >= 0x3a) { - break; - } - count++; - } - - // constrain to a minimum and maximum number of digits. - if (count < minDigits || count > maxDigits) { - return null; - } - - if (!trailingOK && count != token.length) { - return null; - } - - return parseInt(token.substr(0, count), 10); -} - -function parseTime(token) { - const parts = token.split(":"); - const result = [0, 0, 0]; - - /* RF6256 S5.1.1: - * time = hms-time ( non-digit *OCTET ) - * hms-time = time-field ":" time-field ":" time-field - * time-field = 1*2DIGIT - */ - - if (parts.length !== 3) { - return null; - } - - for (let i = 0; i < 3; i++) { - // "time-field" must be strictly "1*2DIGIT", HOWEVER, "hms-time" can be - // followed by "( non-digit *OCTET )" so therefore the last time-field can - // have a trailer - const trailingOK = i == 2; - const num = parseDigits(parts[i], 1, 2, trailingOK); - if (num === null) { - return null; - } - result[i] = num; - } - - return result; -} - -function parseMonth(token) { - token = String(token) - .substr(0, 3) - .toLowerCase(); - const num = MONTH_TO_NUM[token]; - return num >= 0 ? num : null; -} - -/* - * RFC6265 S5.1.1 date parser (see RFC for full grammar) - */ -function parseDate(str) { - if (!str) { - return; - } - - /* RFC6265 S5.1.1: - * 2. Process each date-token sequentially in the order the date-tokens - * appear in the cookie-date - */ - const tokens = str.split(DATE_DELIM); - if (!tokens) { - return; - } - - let hour = null; - let minute = null; - let second = null; - let dayOfMonth = null; - let month = null; - let year = null; - - for (let i = 0; i < tokens.length; i++) { - const token = tokens[i].trim(); - if (!token.length) { - continue; - } - - let result; - - /* 2.1. If the found-time flag is not set and the token matches the time - * production, set the found-time flag and set the hour- value, - * minute-value, and second-value to the numbers denoted by the digits in - * the date-token, respectively. Skip the remaining sub-steps and continue - * to the next date-token. - */ - if (second === null) { - result = parseTime(token); - if (result) { - hour = result[0]; - minute = result[1]; - second = result[2]; - continue; + function XMLElement(parent, name, attributes) { + var child, j, len, ref1; + XMLElement.__super__.constructor.call(this, parent); + if (name == null) { + throw new Error("Missing element name. " + this.debugInfo()); } - } - - /* 2.2. If the found-day-of-month flag is not set and the date-token matches - * the day-of-month production, set the found-day-of- month flag and set - * the day-of-month-value to the number denoted by the date-token. Skip - * the remaining sub-steps and continue to the next date-token. - */ - if (dayOfMonth === null) { - // "day-of-month = 1*2DIGIT ( non-digit *OCTET )" - result = parseDigits(token, 1, 2, true); - if (result !== null) { - dayOfMonth = result; - continue; + this.name = this.stringify.name(name); + this.type = NodeType.Element; + this.attribs = {}; + this.schemaTypeInfo = null; + if (attributes != null) { + this.attribute(attributes); } - } - - /* 2.3. If the found-month flag is not set and the date-token matches the - * month production, set the found-month flag and set the month-value to - * the month denoted by the date-token. Skip the remaining sub-steps and - * continue to the next date-token. - */ - if (month === null) { - result = parseMonth(token); - if (result !== null) { - month = result; - continue; - } - } - - /* 2.4. If the found-year flag is not set and the date-token matches the - * year production, set the found-year flag and set the year-value to the - * number denoted by the date-token. Skip the remaining sub-steps and - * continue to the next date-token. - */ - if (year === null) { - // "year = 2*4DIGIT ( non-digit *OCTET )" - result = parseDigits(token, 2, 4, true); - if (result !== null) { - year = result; - /* From S5.1.1: - * 3. If the year-value is greater than or equal to 70 and less - * than or equal to 99, increment the year-value by 1900. - * 4. If the year-value is greater than or equal to 0 and less - * than or equal to 69, increment the year-value by 2000. - */ - if (year >= 70 && year <= 99) { - year += 1900; - } else if (year >= 0 && year <= 69) { - year += 2000; - } - } - } - } - - /* RFC 6265 S5.1.1 - * "5. Abort these steps and fail to parse the cookie-date if: - * * at least one of the found-day-of-month, found-month, found- - * year, or found-time flags is not set, - * * the day-of-month-value is less than 1 or greater than 31, - * * the year-value is less than 1601, - * * the hour-value is greater than 23, - * * the minute-value is greater than 59, or - * * the second-value is greater than 59. - * (Note that leap seconds cannot be represented in this syntax.)" - * - * So, in order as above: - */ - if ( - dayOfMonth === null || - month === null || - year === null || - second === null || - dayOfMonth < 1 || - dayOfMonth > 31 || - year < 1601 || - hour > 23 || - minute > 59 || - second > 59 - ) { - return; - } - - return new Date(Date.UTC(year, month, dayOfMonth, hour, minute, second)); -} - -function formatDate(date) { - return date.toUTCString(); -} - -// S5.1.2 Canonicalized Host Names -function canonicalDomain(str) { - if (str == null) { - return null; - } - str = str.trim().replace(/^\./, ""); // S4.1.2.3 & S5.2.3: ignore leading . - - // convert to IDN if any non-ASCII characters - if (punycode && /[^\u0001-\u007f]/.test(str)) { - str = punycode.toASCII(str); - } - - return str.toLowerCase(); -} - -// S5.1.3 Domain Matching -function domainMatch(str, domStr, canonicalize) { - if (str == null || domStr == null) { - return null; - } - if (canonicalize !== false) { - str = canonicalDomain(str); - domStr = canonicalDomain(domStr); - } - - /* - * S5.1.3: - * "A string domain-matches a given domain string if at least one of the - * following conditions hold:" - * - * " o The domain string and the string are identical. (Note that both the - * domain string and the string will have been canonicalized to lower case at - * this point)" - */ - if (str == domStr) { - return true; - } - - /* " o All of the following [three] conditions hold:" */ - - /* "* The domain string is a suffix of the string" */ - const idx = str.indexOf(domStr); - if (idx <= 0) { - return false; // it's a non-match (-1) or prefix (0) - } - - // next, check it's a proper suffix - // e.g., "a.b.c".indexOf("b.c") === 2 - // 5 === 3+2 - if (str.length !== domStr.length + idx) { - return false; // it's not a suffix - } - - /* " * The last character of the string that is not included in the - * domain string is a %x2E (".") character." */ - if (str.substr(idx-1,1) !== '.') { - return false; // doesn't align on "." - } - - /* " * The string is a host name (i.e., not an IP address)." */ - if (IP_REGEX_LOWERCASE.test(str)) { - return false; // it's an IP address - } - - return true; -} - -// RFC6265 S5.1.4 Paths and Path-Match - -/* - * "The user agent MUST use an algorithm equivalent to the following algorithm - * to compute the default-path of a cookie:" - * - * Assumption: the path (and not query part or absolute uri) is passed in. - */ -function defaultPath(path) { - // "2. If the uri-path is empty or if the first character of the uri-path is not - // a %x2F ("/") character, output %x2F ("/") and skip the remaining steps. - if (!path || path.substr(0, 1) !== "/") { - return "/"; - } - - // "3. If the uri-path contains no more than one %x2F ("/") character, output - // %x2F ("/") and skip the remaining step." - if (path === "/") { - return path; - } - - const rightSlash = path.lastIndexOf("/"); - if (rightSlash === 0) { - return "/"; - } - - // "4. Output the characters of the uri-path from the first character up to, - // but not including, the right-most %x2F ("/")." - return path.slice(0, rightSlash); -} - -function trimTerminator(str) { - for (let t = 0; t < TERMINATORS.length; t++) { - const terminatorIdx = str.indexOf(TERMINATORS[t]); - if (terminatorIdx !== -1) { - str = str.substr(0, terminatorIdx); - } - } - - return str; -} - -function parseCookiePair(cookiePair, looseMode) { - cookiePair = trimTerminator(cookiePair); - - let firstEq = cookiePair.indexOf("="); - if (looseMode) { - if (firstEq === 0) { - // '=' is immediately at start - cookiePair = cookiePair.substr(1); - firstEq = cookiePair.indexOf("="); // might still need to split on '=' - } - } else { - // non-loose mode - if (firstEq <= 0) { - // no '=' or is at start - return; // needs to have non-empty "cookie-name" - } - } - - let cookieName, cookieValue; - if (firstEq <= 0) { - cookieName = ""; - cookieValue = cookiePair.trim(); - } else { - cookieName = cookiePair.substr(0, firstEq).trim(); - cookieValue = cookiePair.substr(firstEq + 1).trim(); - } - - if (CONTROL_CHARS.test(cookieName) || CONTROL_CHARS.test(cookieValue)) { - return; - } - - const c = new Cookie(); - c.key = cookieName; - c.value = cookieValue; - return c; -} - -function parse(str, options) { - if (!options || typeof options !== "object") { - options = {}; - } - str = str.trim(); - - // We use a regex to parse the "name-value-pair" part of S5.2 - const firstSemi = str.indexOf(";"); // S5.2 step 1 - const cookiePair = firstSemi === -1 ? str : str.substr(0, firstSemi); - const c = parseCookiePair(cookiePair, !!options.loose); - if (!c) { - return; - } - - if (firstSemi === -1) { - return c; - } - - // S5.2.3 "unparsed-attributes consist of the remainder of the set-cookie-string - // (including the %x3B (";") in question)." plus later on in the same section - // "discard the first ";" and trim". - const unparsed = str.slice(firstSemi + 1).trim(); - - // "If the unparsed-attributes string is empty, skip the rest of these - // steps." - if (unparsed.length === 0) { - return c; - } - - /* - * S5.2 says that when looping over the items "[p]rocess the attribute-name - * and attribute-value according to the requirements in the following - * subsections" for every item. Plus, for many of the individual attributes - * in S5.3 it says to use the "attribute-value of the last attribute in the - * cookie-attribute-list". Therefore, in this implementation, we overwrite - * the previous value. - */ - const cookie_avs = unparsed.split(";"); - while (cookie_avs.length) { - const av = cookie_avs.shift().trim(); - if (av.length === 0) { - // happens if ";;" appears - continue; - } - const av_sep = av.indexOf("="); - let av_key, av_value; - - if (av_sep === -1) { - av_key = av; - av_value = null; - } else { - av_key = av.substr(0, av_sep); - av_value = av.substr(av_sep + 1); - } - - av_key = av_key.trim().toLowerCase(); - - if (av_value) { - av_value = av_value.trim(); - } - - switch (av_key) { - case "expires": // S5.2.1 - if (av_value) { - const exp = parseDate(av_value); - // "If the attribute-value failed to parse as a cookie date, ignore the - // cookie-av." - if (exp) { - // over and underflow not realistically a concern: V8's getTime() seems to - // store something larger than a 32-bit time_t (even with 32-bit node) - c.expires = exp; + if (parent.type === NodeType.Document) { + this.isRoot = true; + this.documentObject = parent; + parent.rootObject = this; + if (parent.children) { + ref1 = parent.children; + for (j = 0, len = ref1.length; j < len; j++) { + child = ref1[j]; + if (child.type === NodeType.DocType) { + child.name = this.name; + break; + } } } - break; - - case "max-age": // S5.2.2 - if (av_value) { - // "If the first character of the attribute-value is not a DIGIT or a "-" - // character ...[or]... If the remainder of attribute-value contains a - // non-DIGIT character, ignore the cookie-av." - if (/^-?[0-9]+$/.test(av_value)) { - const delta = parseInt(av_value, 10); - // "If delta-seconds is less than or equal to zero (0), let expiry-time - // be the earliest representable date and time." - c.setMaxAge(delta); - } - } - break; - - case "domain": // S5.2.3 - // "If the attribute-value is empty, the behavior is undefined. However, - // the user agent SHOULD ignore the cookie-av entirely." - if (av_value) { - // S5.2.3 "Let cookie-domain be the attribute-value without the leading %x2E - // (".") character." - const domain = av_value.trim().replace(/^\./, ""); - if (domain) { - // "Convert the cookie-domain to lower case." - c.domain = domain.toLowerCase(); - } - } - break; - - case "path": // S5.2.4 - /* - * "If the attribute-value is empty or if the first character of the - * attribute-value is not %x2F ("/"): - * Let cookie-path be the default-path. - * Otherwise: - * Let cookie-path be the attribute-value." - * - * We'll represent the default-path as null since it depends on the - * context of the parsing. - */ - c.path = av_value && av_value[0] === "/" ? av_value : null; - break; - - case "secure": // S5.2.5 - /* - * "If the attribute-name case-insensitively matches the string "Secure", - * the user agent MUST append an attribute to the cookie-attribute-list - * with an attribute-name of Secure and an empty attribute-value." - */ - c.secure = true; - break; - - case "httponly": // S5.2.6 -- effectively the same as 'secure' - c.httpOnly = true; - break; - - case "samesite": // RFC6265bis-02 S5.3.7 - const enforcement = av_value ? av_value.toLowerCase() : ""; - switch (enforcement) { - case "strict": - c.sameSite = "strict"; - break; - case "lax": - c.sameSite = "lax"; - break; - default: - // RFC6265bis-02 S5.3.7 step 1: - // "If cookie-av's attribute-value is not a case-insensitive match - // for "Strict" or "Lax", ignore the "cookie-av"." - // This effectively sets it to 'none' from the prototype. - break; - } - break; - - default: - c.extensions = c.extensions || []; - c.extensions.push(av); - break; - } - } - - return c; -} - -/** - * If the cookie-name begins with a case-sensitive match for the - * string "__Secure-", abort these steps and ignore the cookie - * entirely unless the cookie's secure-only-flag is true. - * @param cookie - * @returns boolean - */ -function isSecurePrefixConditionMet(cookie) { - return !cookie.key.startsWith("__Secure-") || cookie.secure; -} - -/** - * If the cookie-name begins with a case-sensitive match for the - * string "__Host-", abort these steps and ignore the cookie - * entirely unless the cookie meets all the following criteria: - * 1. The cookie's secure-only-flag is true. - * 2. The cookie's host-only-flag is true. - * 3. The cookie-attribute-list contains an attribute with an - * attribute-name of "Path", and the cookie's path is "/". - * @param cookie - * @returns boolean - */ -function isHostPrefixConditionMet(cookie) { - return ( - !cookie.key.startsWith("__Host-") || - (cookie.secure && - cookie.hostOnly && - cookie.path != null && - cookie.path === "/") - ); -} - -// avoid the V8 deoptimization monster! -function jsonParse(str) { - let obj; - try { - obj = JSON.parse(str); - } catch (e) { - return e; - } - return obj; -} - -function fromJSON(str) { - if (!str) { - return null; - } - - let obj; - if (typeof str === "string") { - obj = jsonParse(str); - if (obj instanceof Error) { - return null; - } - } else { - // assume it's an Object - obj = str; - } - - const c = new Cookie(); - for (let i = 0; i < Cookie.serializableProperties.length; i++) { - const prop = Cookie.serializableProperties[i]; - if (obj[prop] === undefined || obj[prop] === cookieDefaults[prop]) { - continue; // leave as prototype default - } - - if (prop === "expires" || prop === "creation" || prop === "lastAccessed") { - if (obj[prop] === null) { - c[prop] = null; - } else { - c[prop] = obj[prop] == "Infinity" ? "Infinity" : new Date(obj[prop]); } - } else { - c[prop] = obj[prop]; - } - } - - return c; -} - -/* Section 5.4 part 2: - * "* Cookies with longer paths are listed before cookies with - * shorter paths. - * - * * Among cookies that have equal-length path fields, cookies with - * earlier creation-times are listed before cookies with later - * creation-times." - */ - -function cookieCompare(a, b) { - let cmp = 0; - - // descending for length: b CMP a - const aPathLen = a.path ? a.path.length : 0; - const bPathLen = b.path ? b.path.length : 0; - cmp = bPathLen - aPathLen; - if (cmp !== 0) { - return cmp; - } - - // ascending for time: a CMP b - const aTime = a.creation ? a.creation.getTime() : MAX_TIME; - const bTime = b.creation ? b.creation.getTime() : MAX_TIME; - cmp = aTime - bTime; - if (cmp !== 0) { - return cmp; - } - - // break ties for the same millisecond (precision of JavaScript's clock) - cmp = a.creationIndex - b.creationIndex; - - return cmp; -} - -// Gives the permutation of all possible pathMatch()es of a given path. The -// array is in longest-to-shortest order. Handy for indexing. -function permutePath(path) { - if (path === "/") { - return ["/"]; - } - const permutations = [path]; - while (path.length > 1) { - const lindex = path.lastIndexOf("/"); - if (lindex === 0) { - break; - } - path = path.substr(0, lindex); - permutations.push(path); - } - permutations.push("/"); - return permutations; -} - -function getCookieContext(url) { - if (url instanceof Object) { - return url; - } - // NOTE: decodeURI will throw on malformed URIs (see GH-32). - // Therefore, we will just skip decoding for such URIs. - try { - url = decodeURI(url); - } catch (err) { - // Silently swallow error - } - - return urlParse(url); -} - -const cookieDefaults = { - // the order in which the RFC has them: - key: "", - value: "", - expires: "Infinity", - maxAge: null, - domain: null, - path: null, - secure: false, - httpOnly: false, - extensions: null, - // set by the CookieJar: - hostOnly: null, - pathIsDefault: null, - creation: null, - lastAccessed: null, - sameSite: "none" -}; - -class Cookie { - constructor(options = {}) { - if (util.inspect.custom) { - this[util.inspect.custom] = this.inspect; } - Object.assign(this, cookieDefaults, options); - this.creation = this.creation || new Date(); - - // used to break creation ties in cookieCompare(): - Object.defineProperty(this, "creationIndex", { - configurable: false, - enumerable: false, // important for assert.deepEqual checks - writable: true, - value: ++Cookie.cookiesCreated + Object.defineProperty(XMLElement.prototype, 'tagName', { + get: function() { + return this.name; + } }); - } - inspect() { - const now = Date.now(); - const hostOnly = this.hostOnly != null ? this.hostOnly : "?"; - const createAge = this.creation - ? `${now - this.creation.getTime()}ms` - : "?"; - const accessAge = this.lastAccessed - ? `${now - this.lastAccessed.getTime()}ms` - : "?"; - return `Cookie="${this.toString()}; hostOnly=${hostOnly}; aAge=${accessAge}; cAge=${createAge}"`; - } - - toJSON() { - const obj = {}; - - for (const prop of Cookie.serializableProperties) { - if (this[prop] === cookieDefaults[prop]) { - continue; // leave as prototype default + Object.defineProperty(XMLElement.prototype, 'namespaceURI', { + get: function() { + return ''; } + }); - if ( - prop === "expires" || - prop === "creation" || - prop === "lastAccessed" - ) { - if (this[prop] === null) { - obj[prop] = null; - } else { - obj[prop] = - this[prop] == "Infinity" // intentionally not === - ? "Infinity" - : this[prop].toISOString(); - } - } else if (prop === "maxAge") { - if (this[prop] !== null) { - // again, intentionally not === - obj[prop] = - this[prop] == Infinity || this[prop] == -Infinity - ? this[prop].toString() - : this[prop]; - } - } else { - if (this[prop] !== cookieDefaults[prop]) { - obj[prop] = this[prop]; + Object.defineProperty(XMLElement.prototype, 'prefix', { + get: function() { + return ''; + } + }); + + Object.defineProperty(XMLElement.prototype, 'localName', { + get: function() { + return this.name; + } + }); + + Object.defineProperty(XMLElement.prototype, 'id', { + get: function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + } + }); + + Object.defineProperty(XMLElement.prototype, 'className', { + get: function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + } + }); + + Object.defineProperty(XMLElement.prototype, 'classList', { + get: function() { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + } + }); + + Object.defineProperty(XMLElement.prototype, 'attributes', { + get: function() { + if (!this.attributeMap || !this.attributeMap.nodes) { + this.attributeMap = new XMLNamedNodeMap(this.attribs); } + return this.attributeMap; } - } + }); - return obj; - } - - clone() { - return fromJSON(this.toJSON()); - } - - validate() { - if (!COOKIE_OCTETS.test(this.value)) { - return false; - } - if ( - this.expires != Infinity && - !(this.expires instanceof Date) && - !parseDate(this.expires) - ) { - return false; - } - if (this.maxAge != null && this.maxAge <= 0) { - return false; // "Max-Age=" non-zero-digit *DIGIT - } - if (this.path != null && !PATH_VALUE.test(this.path)) { - return false; - } - - const cdomain = this.cdomain(); - if (cdomain) { - if (cdomain.match(/\.$/)) { - return false; // S4.1.2.3 suggests that this is bad. domainMatch() tests confirm this + XMLElement.prototype.clone = function() { + var att, attName, clonedSelf, ref1; + clonedSelf = Object.create(this); + if (clonedSelf.isRoot) { + clonedSelf.documentObject = null; } - const suffix = pubsuffix.getPublicSuffix(cdomain); - if (suffix == null) { - // it's a public suffix - return false; + clonedSelf.attribs = {}; + ref1 = this.attribs; + for (attName in ref1) { + if (!hasProp.call(ref1, attName)) continue; + att = ref1[attName]; + clonedSelf.attribs[attName] = att.clone(); } - } - return true; - } - - setExpires(exp) { - if (exp instanceof Date) { - this.expires = exp; - } else { - this.expires = parseDate(exp) || "Infinity"; - } - } - - setMaxAge(age) { - if (age === Infinity || age === -Infinity) { - this.maxAge = age.toString(); // so JSON.stringify() works - } else { - this.maxAge = age; - } - } - - cookieString() { - let val = this.value; - if (val == null) { - val = ""; - } - if (this.key === "") { - return val; - } - return `${this.key}=${val}`; - } - - // gives Set-Cookie header format - toString() { - let str = this.cookieString(); - - if (this.expires != Infinity) { - if (this.expires instanceof Date) { - str += `; Expires=${formatDate(this.expires)}`; - } else { - str += `; Expires=${this.expires}`; - } - } - - if (this.maxAge != null && this.maxAge != Infinity) { - str += `; Max-Age=${this.maxAge}`; - } - - if (this.domain && !this.hostOnly) { - str += `; Domain=${this.domain}`; - } - if (this.path) { - str += `; Path=${this.path}`; - } - - if (this.secure) { - str += "; Secure"; - } - if (this.httpOnly) { - str += "; HttpOnly"; - } - if (this.sameSite && this.sameSite !== "none") { - const ssCanon = Cookie.sameSiteCanonical[this.sameSite.toLowerCase()]; - str += `; SameSite=${ssCanon ? ssCanon : this.sameSite}`; - } - if (this.extensions) { - this.extensions.forEach(ext => { - str += `; ${ext}`; + clonedSelf.children = []; + this.children.forEach(function(child) { + var clonedChild; + clonedChild = child.clone(); + clonedChild.parent = clonedSelf; + return clonedSelf.children.push(clonedChild); }); - } + return clonedSelf; + }; - return str; - } - - // TTL() partially replaces the "expiry-time" parts of S5.3 step 3 (setCookie() - // elsewhere) - // S5.3 says to give the "latest representable date" for which we use Infinity - // For "expired" we use 0 - TTL(now) { - /* RFC6265 S4.1.2.2 If a cookie has both the Max-Age and the Expires - * attribute, the Max-Age attribute has precedence and controls the - * expiration date of the cookie. - * (Concurs with S5.3 step 3) - */ - if (this.maxAge != null) { - return this.maxAge <= 0 ? 0 : this.maxAge * 1000; - } - - let expires = this.expires; - if (expires != Infinity) { - if (!(expires instanceof Date)) { - expires = parseDate(expires) || Infinity; + XMLElement.prototype.attribute = function(name, value) { + var attName, attValue; + if (name != null) { + name = getValue(name); } - - if (expires == Infinity) { - return Infinity; - } - - return expires.getTime() - (now || Date.now()); - } - - return Infinity; - } - - // expiryTime() replaces the "expiry-time" parts of S5.3 step 3 (setCookie() - // elsewhere) - expiryTime(now) { - if (this.maxAge != null) { - const relativeTo = now || this.creation || new Date(); - const age = this.maxAge <= 0 ? -Infinity : this.maxAge * 1000; - return relativeTo.getTime() + age; - } - - if (this.expires == Infinity) { - return Infinity; - } - return this.expires.getTime(); - } - - // expiryDate() replaces the "expiry-time" parts of S5.3 step 3 (setCookie() - // elsewhere), except it returns a Date - expiryDate(now) { - const millisec = this.expiryTime(now); - if (millisec == Infinity) { - return new Date(MAX_TIME); - } else if (millisec == -Infinity) { - return new Date(MIN_TIME); - } else { - return new Date(millisec); - } - } - - // This replaces the "persistent-flag" parts of S5.3 step 3 - isPersistent() { - return this.maxAge != null || this.expires != Infinity; - } - - // Mostly S5.1.2 and S5.2.3: - canonicalizedDomain() { - if (this.domain == null) { - return null; - } - return canonicalDomain(this.domain); - } - - cdomain() { - return this.canonicalizedDomain(); - } -} - -Cookie.cookiesCreated = 0; -Cookie.parse = parse; -Cookie.fromJSON = fromJSON; -Cookie.serializableProperties = Object.keys(cookieDefaults); -Cookie.sameSiteLevel = { - strict: 3, - lax: 2, - none: 1 -}; - -Cookie.sameSiteCanonical = { - strict: "Strict", - lax: "Lax" -}; - -function getNormalizedPrefixSecurity(prefixSecurity) { - if (prefixSecurity != null) { - const normalizedPrefixSecurity = prefixSecurity.toLowerCase(); - /* The three supported options */ - switch (normalizedPrefixSecurity) { - case PrefixSecurityEnum.STRICT: - case PrefixSecurityEnum.SILENT: - case PrefixSecurityEnum.DISABLED: - return normalizedPrefixSecurity; - } - } - /* Default is SILENT */ - return PrefixSecurityEnum.SILENT; -} - -class CookieJar { - constructor(store, options = { rejectPublicSuffixes: true }) { - if (typeof options === "boolean") { - options = { rejectPublicSuffixes: options }; - } - this.rejectPublicSuffixes = options.rejectPublicSuffixes; - this.enableLooseMode = !!options.looseMode; - this.allowSpecialUseDomain = !!options.allowSpecialUseDomain; - this.store = store || new MemoryCookieStore(); - this.prefixSecurity = getNormalizedPrefixSecurity(options.prefixSecurity); - this._cloneSync = syncWrap("clone"); - this._importCookiesSync = syncWrap("_importCookies"); - this.getCookiesSync = syncWrap("getCookies"); - this.getCookieStringSync = syncWrap("getCookieString"); - this.getSetCookieStringsSync = syncWrap("getSetCookieStrings"); - this.removeAllCookiesSync = syncWrap("removeAllCookies"); - this.setCookieSync = syncWrap("setCookie"); - this.serializeSync = syncWrap("serialize"); - } - - setCookie(cookie, url, options, cb) { - let err; - const context = getCookieContext(url); - if (typeof options === "function") { - cb = options; - options = {}; - } - - const host = canonicalDomain(context.hostname); - const loose = options.loose || this.enableLooseMode; - - let sameSiteContext = null; - if (options.sameSiteContext) { - sameSiteContext = checkSameSiteContext(options.sameSiteContext); - if (!sameSiteContext) { - return cb(new Error(SAME_SITE_CONTEXT_VAL_ERR)); - } - } - - // S5.3 step 1 - if (typeof cookie === "string" || cookie instanceof String) { - cookie = Cookie.parse(cookie, { loose: loose }); - if (!cookie) { - err = new Error("Cookie failed to parse"); - return cb(options.ignoreError ? null : err); - } - } else if (!(cookie instanceof Cookie)) { - // If you're seeing this error, and are passing in a Cookie object, - // it *might* be a Cookie object from another loaded version of tough-cookie. - err = new Error( - "First argument to setCookie must be a Cookie object or string" - ); - return cb(options.ignoreError ? null : err); - } - - // S5.3 step 2 - const now = options.now || new Date(); // will assign later to save effort in the face of errors - - // S5.3 step 3: NOOP; persistent-flag and expiry-time is handled by getCookie() - - // S5.3 step 4: NOOP; domain is null by default - - // S5.3 step 5: public suffixes - if (this.rejectPublicSuffixes && cookie.domain) { - const suffix = pubsuffix.getPublicSuffix(cookie.cdomain()); - if (suffix == null) { - // e.g. "com" - err = new Error("Cookie has domain set to a public suffix"); - return cb(options.ignoreError ? null : err); - } - } - - // S5.3 step 6: - if (cookie.domain) { - if (!domainMatch(host, cookie.cdomain(), false)) { - err = new Error( - `Cookie not in this host's domain. Cookie:${cookie.cdomain()} Request:${host}` - ); - return cb(options.ignoreError ? null : err); - } - - if (cookie.hostOnly == null) { - // don't reset if already set - cookie.hostOnly = false; - } - } else { - cookie.hostOnly = true; - cookie.domain = host; - } - - //S5.2.4 If the attribute-value is empty or if the first character of the - //attribute-value is not %x2F ("/"): - //Let cookie-path be the default-path. - if (!cookie.path || cookie.path[0] !== "/") { - cookie.path = defaultPath(context.pathname); - cookie.pathIsDefault = true; - } - - // S5.3 step 8: NOOP; secure attribute - // S5.3 step 9: NOOP; httpOnly attribute - - // S5.3 step 10 - if (options.http === false && cookie.httpOnly) { - err = new Error("Cookie is HttpOnly and this isn't an HTTP API"); - return cb(options.ignoreError ? null : err); - } - - // 6252bis-02 S5.4 Step 13 & 14: - if (cookie.sameSite !== "none" && sameSiteContext) { - // "If the cookie's "same-site-flag" is not "None", and the cookie - // is being set from a context whose "site for cookies" is not an - // exact match for request-uri's host's registered domain, then - // abort these steps and ignore the newly created cookie entirely." - if (sameSiteContext === "none") { - err = new Error( - "Cookie is SameSite but this is a cross-origin request" - ); - return cb(options.ignoreError ? null : err); - } - } - - /* 6265bis-02 S5.4 Steps 15 & 16 */ - const ignoreErrorForPrefixSecurity = - this.prefixSecurity === PrefixSecurityEnum.SILENT; - const prefixSecurityDisabled = - this.prefixSecurity === PrefixSecurityEnum.DISABLED; - /* If prefix checking is not disabled ...*/ - if (!prefixSecurityDisabled) { - let errorFound = false; - let errorMsg; - /* Check secure prefix condition */ - if (!isSecurePrefixConditionMet(cookie)) { - errorFound = true; - errorMsg = "Cookie has __Secure prefix but Secure attribute is not set"; - } else if (!isHostPrefixConditionMet(cookie)) { - /* Check host prefix condition */ - errorFound = true; - errorMsg = - "Cookie has __Host prefix but either Secure or HostOnly attribute is not set or Path is not '/'"; - } - if (errorFound) { - return cb( - options.ignoreError || ignoreErrorForPrefixSecurity - ? null - : new Error(errorMsg) - ); - } - } - - const store = this.store; - - if (!store.updateCookie) { - store.updateCookie = function(oldCookie, newCookie, cb) { - this.putCookie(newCookie, cb); - }; - } - - function withCookie(err, oldCookie) { - if (err) { - return cb(err); - } - - const next = function(err) { - if (err) { - return cb(err); - } else { - cb(null, cookie); - } - }; - - if (oldCookie) { - // S5.3 step 11 - "If the cookie store contains a cookie with the same name, - // domain, and path as the newly created cookie:" - if (options.http === false && oldCookie.httpOnly) { - // step 11.2 - err = new Error("old Cookie is HttpOnly and this isn't an HTTP API"); - return cb(options.ignoreError ? null : err); - } - cookie.creation = oldCookie.creation; // step 11.3 - cookie.creationIndex = oldCookie.creationIndex; // preserve tie-breaker - cookie.lastAccessed = now; - // Step 11.4 (delete cookie) is implied by just setting the new one: - store.updateCookie(oldCookie, cookie, next); // step 12 - } else { - cookie.creation = cookie.lastAccessed = now; - store.putCookie(cookie, next); // step 12 - } - } - - store.findCookie(cookie.domain, cookie.path, cookie.key, withCookie); - } - - // RFC6365 S5.4 - getCookies(url, options, cb) { - const context = getCookieContext(url); - if (typeof options === "function") { - cb = options; - options = {}; - } - - const host = canonicalDomain(context.hostname); - const path = context.pathname || "/"; - - let secure = options.secure; - if ( - secure == null && - context.protocol && - (context.protocol == "https:" || context.protocol == "wss:") - ) { - secure = true; - } - - let sameSiteLevel = 0; - if (options.sameSiteContext) { - const sameSiteContext = checkSameSiteContext(options.sameSiteContext); - sameSiteLevel = Cookie.sameSiteLevel[sameSiteContext]; - if (!sameSiteLevel) { - return cb(new Error(SAME_SITE_CONTEXT_VAL_ERR)); - } - } - - let http = options.http; - if (http == null) { - http = true; - } - - const now = options.now || Date.now(); - const expireCheck = options.expire !== false; - const allPaths = !!options.allPaths; - const store = this.store; - - function matchingCookie(c) { - // "Either: - // The cookie's host-only-flag is true and the canonicalized - // request-host is identical to the cookie's domain. - // Or: - // The cookie's host-only-flag is false and the canonicalized - // request-host domain-matches the cookie's domain." - if (c.hostOnly) { - if (c.domain != host) { - return false; + if (isObject(name)) { + for (attName in name) { + if (!hasProp.call(name, attName)) continue; + attValue = name[attName]; + this.attribute(attName, attValue); } } else { - if (!domainMatch(host, c.domain, false)) { + if (isFunction(value)) { + value = value.apply(); + } + if (this.options.keepNullAttributes && (value == null)) { + this.attribs[name] = new XMLAttribute(this, name, ""); + } else if (value != null) { + this.attribs[name] = new XMLAttribute(this, name, value); + } + } + return this; + }; + + XMLElement.prototype.removeAttribute = function(name) { + var attName, j, len; + if (name == null) { + throw new Error("Missing attribute name. " + this.debugInfo()); + } + name = getValue(name); + if (Array.isArray(name)) { + for (j = 0, len = name.length; j < len; j++) { + attName = name[j]; + delete this.attribs[attName]; + } + } else { + delete this.attribs[name]; + } + return this; + }; + + XMLElement.prototype.toString = function(options) { + return this.options.writer.element(this, this.options.writer.filterOptions(options)); + }; + + XMLElement.prototype.att = function(name, value) { + return this.attribute(name, value); + }; + + XMLElement.prototype.a = function(name, value) { + return this.attribute(name, value); + }; + + XMLElement.prototype.getAttribute = function(name) { + if (this.attribs.hasOwnProperty(name)) { + return this.attribs[name].value; + } else { + return null; + } + }; + + XMLElement.prototype.setAttribute = function(name, value) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getAttributeNode = function(name) { + if (this.attribs.hasOwnProperty(name)) { + return this.attribs[name]; + } else { + return null; + } + }; + + XMLElement.prototype.setAttributeNode = function(newAttr) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.removeAttributeNode = function(oldAttr) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getElementsByTagName = function(name) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getAttributeNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.setAttributeNS = function(namespaceURI, qualifiedName, value) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.removeAttributeNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getAttributeNodeNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.setAttributeNodeNS = function(newAttr) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getElementsByTagNameNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.hasAttribute = function(name) { + return this.attribs.hasOwnProperty(name); + }; + + XMLElement.prototype.hasAttributeNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.setIdAttribute = function(name, isId) { + if (this.attribs.hasOwnProperty(name)) { + return this.attribs[name].isId; + } else { + return isId; + } + }; + + XMLElement.prototype.setIdAttributeNS = function(namespaceURI, localName, isId) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.setIdAttributeNode = function(idAttr, isId) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getElementsByTagName = function(tagname) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getElementsByTagNameNS = function(namespaceURI, localName) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.getElementsByClassName = function(classNames) { + throw new Error("This DOM method is not implemented." + this.debugInfo()); + }; + + XMLElement.prototype.isEqualNode = function(node) { + var i, j, ref1; + if (!XMLElement.__super__.isEqualNode.apply(this, arguments).isEqualNode(node)) { + return false; + } + if (node.namespaceURI !== this.namespaceURI) { + return false; + } + if (node.prefix !== this.prefix) { + return false; + } + if (node.localName !== this.localName) { + return false; + } + if (node.attribs.length !== this.attribs.length) { + return false; + } + for (i = j = 0, ref1 = this.attribs.length - 1; 0 <= ref1 ? j <= ref1 : j >= ref1; i = 0 <= ref1 ? ++j : --j) { + if (!this.attribs[i].isEqualNode(node.attribs[i])) { return false; } } - - // "The request-uri's path path-matches the cookie's path." - if (!allPaths && !pathMatch(path, c.path)) { - return false; - } - - // "If the cookie's secure-only-flag is true, then the request-uri's - // scheme must denote a "secure" protocol" - if (c.secure && !secure) { - return false; - } - - // "If the cookie's http-only-flag is true, then exclude the cookie if the - // cookie-string is being generated for a "non-HTTP" API" - if (c.httpOnly && !http) { - return false; - } - - // RFC6265bis-02 S5.3.7 - if (sameSiteLevel) { - const cookieLevel = Cookie.sameSiteLevel[c.sameSite || "none"]; - if (cookieLevel > sameSiteLevel) { - // only allow cookies at or below the request level - return false; - } - } - - // deferred from S5.3 - // non-RFC: allow retention of expired cookies by choice - if (expireCheck && c.expiryTime() <= now) { - store.removeCookie(c.domain, c.path, c.key, () => {}); // result ignored - return false; - } - return true; - } - - store.findCookies( - host, - allPaths ? null : path, - this.allowSpecialUseDomain, - (err, cookies) => { - if (err) { - return cb(err); - } - - cookies = cookies.filter(matchingCookie); - - // sorting of S5.4 part 2 - if (options.sort !== false) { - cookies = cookies.sort(cookieCompare); - } - - // S5.4 part 3 - const now = new Date(); - for (const cookie of cookies) { - cookie.lastAccessed = now; - } - // TODO persist lastAccessed - - cb(null, cookies); - } - ); - } - - getCookieString(...args) { - const cb = args.pop(); - const next = function(err, cookies) { - if (err) { - cb(err); - } else { - cb( - null, - cookies - .sort(cookieCompare) - .map(c => c.cookieString()) - .join("; ") - ); - } - }; - args.push(next); - this.getCookies.apply(this, args); - } - - getSetCookieStrings(...args) { - const cb = args.pop(); - const next = function(err, cookies) { - if (err) { - cb(err); - } else { - cb( - null, - cookies.map(c => { - return c.toString(); - }) - ); - } - }; - args.push(next); - this.getCookies.apply(this, args); - } - - serialize(cb) { - let type = this.store.constructor.name; - if (type === "Object") { - type = null; - } - - // update README.md "Serialization Format" if you change this, please! - const serialized = { - // The version of tough-cookie that serialized this jar. Generally a good - // practice since future versions can make data import decisions based on - // known past behavior. When/if this matters, use `semver`. - version: `tough-cookie@${VERSION}`, - - // add the store type, to make humans happy: - storeType: type, - - // CookieJar configuration: - rejectPublicSuffixes: !!this.rejectPublicSuffixes, - - // this gets filled from getAllCookies: - cookies: [] }; - if ( - !( - this.store.getAllCookies && - typeof this.store.getAllCookies === "function" - ) - ) { - return cb( - new Error( - "store does not support getAllCookies and cannot be serialized" - ) - ); - } + return XMLElement; - this.store.getAllCookies((err, cookies) => { - if (err) { - return cb(err); - } + })(XMLNode); - serialized.cookies = cookies.map(cookie => { - // convert to serialized 'raw' cookies - cookie = cookie instanceof Cookie ? cookie.toJSON() : cookie; - - // Remove the index so new ones get assigned during deserialization - delete cookie.creationIndex; - - return cookie; - }); - - return cb(null, serialized); - }); - } - - toJSON() { - return this.serializeSync(); - } - - // use the class method CookieJar.deserialize instead of calling this directly - _importCookies(serialized, cb) { - let cookies = serialized.cookies; - if (!cookies || !Array.isArray(cookies)) { - return cb(new Error("serialized jar has no cookies array")); - } - cookies = cookies.slice(); // do not modify the original - - const putNext = err => { - if (err) { - return cb(err); - } - - if (!cookies.length) { - return cb(err, this); - } - - let cookie; - try { - cookie = fromJSON(cookies.shift()); - } catch (e) { - return cb(e); - } - - if (cookie === null) { - return putNext(null); // skip this cookie - } - - this.store.putCookie(cookie, putNext); - }; - - putNext(); - } - - clone(newStore, cb) { - if (arguments.length === 1) { - cb = newStore; - newStore = null; - } - - this.serialize((err, serialized) => { - if (err) { - return cb(err); - } - CookieJar.deserialize(serialized, newStore, cb); - }); - } - - cloneSync(newStore) { - if (arguments.length === 0) { - return this._cloneSync(); - } - if (!newStore.synchronous) { - throw new Error( - "CookieJar clone destination store is not synchronous; use async API instead." - ); - } - return this._cloneSync(newStore); - } - - removeAllCookies(cb) { - const store = this.store; - - // Check that the store implements its own removeAllCookies(). The default - // implementation in Store will immediately call the callback with a "not - // implemented" Error. - if ( - typeof store.removeAllCookies === "function" && - store.removeAllCookies !== Store.prototype.removeAllCookies - ) { - return store.removeAllCookies(cb); - } - - store.getAllCookies((err, cookies) => { - if (err) { - return cb(err); - } - - if (cookies.length === 0) { - return cb(null); - } - - let completedCount = 0; - const removeErrors = []; - - function removeCookieCb(removeErr) { - if (removeErr) { - removeErrors.push(removeErr); - } - - completedCount++; - - if (completedCount === cookies.length) { - return cb(removeErrors.length ? removeErrors[0] : null); - } - } - - cookies.forEach(cookie => { - store.removeCookie( - cookie.domain, - cookie.path, - cookie.key, - removeCookieCb - ); - }); - }); - } - - static deserialize(strOrObj, store, cb) { - if (arguments.length !== 3) { - // store is optional - cb = store; - store = null; - } - - let serialized; - if (typeof strOrObj === "string") { - serialized = jsonParse(strOrObj); - if (serialized instanceof Error) { - return cb(serialized); - } - } else { - serialized = strOrObj; - } - - const jar = new CookieJar(store, serialized.rejectPublicSuffixes); - jar._importCookies(serialized, err => { - if (err) { - return cb(err); - } - cb(null, jar); - }); - } - - static deserializeSync(strOrObj, store) { - const serialized = - typeof strOrObj === "string" ? JSON.parse(strOrObj) : strOrObj; - const jar = new CookieJar(store, serialized.rejectPublicSuffixes); - - // catch this mistake early: - if (!jar.store.synchronous) { - throw new Error( - "CookieJar store is not synchronous; use async API instead." - ); - } - - jar._importCookiesSync(serialized); - return jar; - } -} -CookieJar.fromJSON = CookieJar.deserializeSync; - -[ - "_importCookies", - "clone", - "getCookies", - "getCookieString", - "getSetCookieStrings", - "removeAllCookies", - "serialize", - "setCookie" -].forEach(name => { - CookieJar.prototype[name] = fromCallback(CookieJar.prototype[name]); -}); -CookieJar.deserialize = fromCallback(CookieJar.deserialize); - -// Use a closure to provide a true imperative API for synchronous stores. -function syncWrap(method) { - return function(...args) { - if (!this.store.synchronous) { - throw new Error( - "CookieJar store is not synchronous; use async API instead." - ); - } - - let syncErr, syncResult; - this[method](...args, (err, result) => { - syncErr = err; - syncResult = result; - }); - - if (syncErr) { - throw syncErr; - } - return syncResult; - }; -} - -exports.version = VERSION; -exports.CookieJar = CookieJar; -exports.Cookie = Cookie; -exports.Store = Store; -exports.MemoryCookieStore = MemoryCookieStore; -exports.parseDate = parseDate; -exports.formatDate = formatDate; -exports.parse = parse; -exports.fromJSON = fromJSON; -exports.domainMatch = domainMatch; -exports.defaultPath = defaultPath; -exports.pathMatch = pathMatch; -exports.getPublicSuffix = pubsuffix.getPublicSuffix; -exports.cookieCompare = cookieCompare; -exports.permuteDomain = __webpack_require__(383).permuteDomain; -exports.permutePath = permutePath; -exports.canonicalDomain = canonicalDomain; -exports.PrefixSecurityEnum = PrefixSecurityEnum; +}).call(this); /***/ }), @@ -45708,278 +45777,43 @@ module.exports = require("fs"); /* 748 */, /* 749 */, /* 750 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ (function(module, __unusedexports, __webpack_require__) { -"use strict"; -/*eslint no-var:0, prefer-arrow-callback: 0, object-shorthand: 0 */ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLStringWriter, XMLWriterBase, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + XMLWriterBase = __webpack_require__(423); + module.exports = XMLStringWriter = (function(superClass) { + extend(XMLStringWriter, superClass); -var Punycode = __webpack_require__(815); - - -var internals = {}; - - -// -// Read rules from file. -// -internals.rules = __webpack_require__(50).map(function (rule) { - - return { - rule: rule, - suffix: rule.replace(/^(\*\.|\!)/, ''), - punySuffix: -1, - wildcard: rule.charAt(0) === '*', - exception: rule.charAt(0) === '!' - }; -}); - - -// -// Check is given string ends with `suffix`. -// -internals.endsWith = function (str, suffix) { - - return str.indexOf(suffix, str.length - suffix.length) !== -1; -}; - - -// -// Find rule for a given domain. -// -internals.findRule = function (domain) { - - var punyDomain = Punycode.toASCII(domain); - return internals.rules.reduce(function (memo, rule) { - - if (rule.punySuffix === -1){ - rule.punySuffix = Punycode.toASCII(rule.suffix); + function XMLStringWriter(options) { + XMLStringWriter.__super__.constructor.call(this, options); } - if (!internals.endsWith(punyDomain, '.' + rule.punySuffix) && punyDomain !== rule.punySuffix) { - return memo; - } - // This has been commented out as it never seems to run. This is because - // sub tlds always appear after their parents and we never find a shorter - // match. - //if (memo) { - // var memoSuffix = Punycode.toASCII(memo.suffix); - // if (memoSuffix.length >= punySuffix.length) { - // return memo; - // } - //} - return rule; - }, null); -}; - -// -// Error codes and messages. -// -exports.errorCodes = { - DOMAIN_TOO_SHORT: 'Domain name too short.', - DOMAIN_TOO_LONG: 'Domain name too long. It should be no more than 255 chars.', - LABEL_STARTS_WITH_DASH: 'Domain name label can not start with a dash.', - LABEL_ENDS_WITH_DASH: 'Domain name label can not end with a dash.', - LABEL_TOO_LONG: 'Domain name label should be at most 63 chars long.', - LABEL_TOO_SHORT: 'Domain name label should be at least 1 character long.', - LABEL_INVALID_CHARS: 'Domain name label can only contain alphanumeric characters or dashes.' -}; - - -// -// Validate domain name and throw if not valid. -// -// From wikipedia: -// -// Hostnames are composed of series of labels concatenated with dots, as are all -// domain names. Each label must be between 1 and 63 characters long, and the -// entire hostname (including the delimiting dots) has a maximum of 255 chars. -// -// Allowed chars: -// -// * `a-z` -// * `0-9` -// * `-` but not as a starting or ending character -// * `.` as a separator for the textual portions of a domain name -// -// * http://en.wikipedia.org/wiki/Domain_name -// * http://en.wikipedia.org/wiki/Hostname -// -internals.validate = function (input) { - - // Before we can validate we need to take care of IDNs with unicode chars. - var ascii = Punycode.toASCII(input); - - if (ascii.length < 1) { - return 'DOMAIN_TOO_SHORT'; - } - if (ascii.length > 255) { - return 'DOMAIN_TOO_LONG'; - } - - // Check each part's length and allowed chars. - var labels = ascii.split('.'); - var label; - - for (var i = 0; i < labels.length; ++i) { - label = labels[i]; - if (!label.length) { - return 'LABEL_TOO_SHORT'; - } - if (label.length > 63) { - return 'LABEL_TOO_LONG'; - } - if (label.charAt(0) === '-') { - return 'LABEL_STARTS_WITH_DASH'; - } - if (label.charAt(label.length - 1) === '-') { - return 'LABEL_ENDS_WITH_DASH'; - } - if (!/^[a-z0-9\-]+$/.test(label)) { - return 'LABEL_INVALID_CHARS'; - } - } -}; - - -// -// Public API -// - - -// -// Parse domain. -// -exports.parse = function (input) { - - if (typeof input !== 'string') { - throw new TypeError('Domain name must be a string.'); - } - - // Force domain to lowercase. - var domain = input.slice(0).toLowerCase(); - - // Handle FQDN. - // TODO: Simply remove trailing dot? - if (domain.charAt(domain.length - 1) === '.') { - domain = domain.slice(0, domain.length - 1); - } - - // Validate and sanitise input. - var error = internals.validate(domain); - if (error) { - return { - input: input, - error: { - message: exports.errorCodes[error], - code: error + XMLStringWriter.prototype.document = function(doc, options) { + var child, i, len, r, ref; + options = this.filterOptions(options); + r = ''; + ref = doc.children; + for (i = 0, len = ref.length; i < len; i++) { + child = ref[i]; + r += this.writeChildNode(child, options, 0); } + if (options.pretty && r.slice(-options.newline.length) === options.newline) { + r = r.slice(0, -options.newline.length); + } + return r; }; - } - var parsed = { - input: input, - tld: null, - sld: null, - domain: null, - subdomain: null, - listed: false - }; + return XMLStringWriter; - var domainParts = domain.split('.'); + })(XMLWriterBase); - // Non-Internet TLD - if (domainParts[domainParts.length - 1] === 'local') { - return parsed; - } - - var handlePunycode = function () { - - if (!/xn--/.test(domain)) { - return parsed; - } - if (parsed.domain) { - parsed.domain = Punycode.toASCII(parsed.domain); - } - if (parsed.subdomain) { - parsed.subdomain = Punycode.toASCII(parsed.subdomain); - } - return parsed; - }; - - var rule = internals.findRule(domain); - - // Unlisted tld. - if (!rule) { - if (domainParts.length < 2) { - return parsed; - } - parsed.tld = domainParts.pop(); - parsed.sld = domainParts.pop(); - parsed.domain = [parsed.sld, parsed.tld].join('.'); - if (domainParts.length) { - parsed.subdomain = domainParts.pop(); - } - return handlePunycode(); - } - - // At this point we know the public suffix is listed. - parsed.listed = true; - - var tldParts = rule.suffix.split('.'); - var privateParts = domainParts.slice(0, domainParts.length - tldParts.length); - - if (rule.exception) { - privateParts.push(tldParts.shift()); - } - - parsed.tld = tldParts.join('.'); - - if (!privateParts.length) { - return handlePunycode(); - } - - if (rule.wildcard) { - tldParts.unshift(privateParts.pop()); - parsed.tld = tldParts.join('.'); - } - - if (!privateParts.length) { - return handlePunycode(); - } - - parsed.sld = privateParts.pop(); - parsed.domain = [parsed.sld, parsed.tld].join('.'); - - if (privateParts.length) { - parsed.subdomain = privateParts.join('.'); - } - - return handlePunycode(); -}; - - -// -// Get domain. -// -exports.get = function (domain) { - - if (!domain) { - return null; - } - return exports.parse(domain).domain || null; -}; - - -// -// Check whether domain belongs to a known public suffix. -// -exports.isValid = function (domain) { - - var parsed = exports.parse(domain); - return Boolean(parsed.domain && parsed.listed); -}; +}).call(this); /***/ }), @@ -46107,7 +45941,7 @@ module.exports = function(dst, src) { XMLDocument = __webpack_require__(559); - XMLElement = __webpack_require__(845); + XMLElement = __webpack_require__(701); XMLCData = __webpack_require__(657); @@ -46135,7 +45969,7 @@ module.exports = function(dst, src) { XMLStringifier = __webpack_require__(602); - XMLStringWriter = __webpack_require__(347); + XMLStringWriter = __webpack_require__(750); WriterState = __webpack_require__(541); @@ -47959,7 +47793,8 @@ function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetry return { statusCode: error.statusCode, result: null, - headers: {} + headers: {}, + error }; } else { @@ -48004,7 +47839,7 @@ exports.retryHttpClientResponse = retryHttpClientResponse; Object.defineProperty(exports, "__esModule", { value: true }); exports.VERSION = void 0; // this is autogenerated file, see scripts/version-update.js -exports.VERSION = '1.0.3'; +exports.VERSION = '1.0.4'; //# sourceMappingURL=version.js.map /***/ }), @@ -48028,307 +47863,27 @@ module.exports = require("url"); /* 843 */, /* 844 */, /* 845 */ -/***/ (function(module, __unusedexports, __webpack_require__) { +/***/ (function(__unusedmodule, exports) { -// Generated by CoffeeScript 1.12.7 -(function() { - var NodeType, XMLAttribute, XMLElement, XMLNamedNodeMap, XMLNode, getValue, isFunction, isObject, ref, - extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, - hasProp = {}.hasOwnProperty; - - ref = __webpack_require__(582), isObject = ref.isObject, isFunction = ref.isFunction, getValue = ref.getValue; - - XMLNode = __webpack_require__(257); - - NodeType = __webpack_require__(683); - - XMLAttribute = __webpack_require__(884); - - XMLNamedNodeMap = __webpack_require__(451); - - module.exports = XMLElement = (function(superClass) { - extend(XMLElement, superClass); - - function XMLElement(parent, name, attributes) { - var child, j, len, ref1; - XMLElement.__super__.constructor.call(this, parent); - if (name == null) { - throw new Error("Missing element name. " + this.debugInfo()); - } - this.name = this.stringify.name(name); - this.type = NodeType.Element; - this.attribs = {}; - this.schemaTypeInfo = null; - if (attributes != null) { - this.attribute(attributes); - } - if (parent.type === NodeType.Document) { - this.isRoot = true; - this.documentObject = parent; - parent.rootObject = this; - if (parent.children) { - ref1 = parent.children; - for (j = 0, len = ref1.length; j < len; j++) { - child = ref1[j]; - if (child.type === NodeType.DocType) { - child.name = this.name; - break; - } - } - } - } - } - - Object.defineProperty(XMLElement.prototype, 'tagName', { - get: function() { - return this.name; - } - }); - - Object.defineProperty(XMLElement.prototype, 'namespaceURI', { - get: function() { - return ''; - } - }); - - Object.defineProperty(XMLElement.prototype, 'prefix', { - get: function() { - return ''; - } - }); - - Object.defineProperty(XMLElement.prototype, 'localName', { - get: function() { - return this.name; - } - }); - - Object.defineProperty(XMLElement.prototype, 'id', { - get: function() { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - } - }); - - Object.defineProperty(XMLElement.prototype, 'className', { - get: function() { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - } - }); - - Object.defineProperty(XMLElement.prototype, 'classList', { - get: function() { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - } - }); - - Object.defineProperty(XMLElement.prototype, 'attributes', { - get: function() { - if (!this.attributeMap || !this.attributeMap.nodes) { - this.attributeMap = new XMLNamedNodeMap(this.attribs); - } - return this.attributeMap; - } - }); - - XMLElement.prototype.clone = function() { - var att, attName, clonedSelf, ref1; - clonedSelf = Object.create(this); - if (clonedSelf.isRoot) { - clonedSelf.documentObject = null; - } - clonedSelf.attribs = {}; - ref1 = this.attribs; - for (attName in ref1) { - if (!hasProp.call(ref1, attName)) continue; - att = ref1[attName]; - clonedSelf.attribs[attName] = att.clone(); - } - clonedSelf.children = []; - this.children.forEach(function(child) { - var clonedChild; - clonedChild = child.clone(); - clonedChild.parent = clonedSelf; - return clonedSelf.children.push(clonedChild); - }); - return clonedSelf; - }; - - XMLElement.prototype.attribute = function(name, value) { - var attName, attValue; - if (name != null) { - name = getValue(name); - } - if (isObject(name)) { - for (attName in name) { - if (!hasProp.call(name, attName)) continue; - attValue = name[attName]; - this.attribute(attName, attValue); - } - } else { - if (isFunction(value)) { - value = value.apply(); - } - if (this.options.keepNullAttributes && (value == null)) { - this.attribs[name] = new XMLAttribute(this, name, ""); - } else if (value != null) { - this.attribs[name] = new XMLAttribute(this, name, value); - } - } - return this; - }; - - XMLElement.prototype.removeAttribute = function(name) { - var attName, j, len; - if (name == null) { - throw new Error("Missing attribute name. " + this.debugInfo()); - } - name = getValue(name); - if (Array.isArray(name)) { - for (j = 0, len = name.length; j < len; j++) { - attName = name[j]; - delete this.attribs[attName]; - } - } else { - delete this.attribs[name]; - } - return this; - }; - - XMLElement.prototype.toString = function(options) { - return this.options.writer.element(this, this.options.writer.filterOptions(options)); - }; - - XMLElement.prototype.att = function(name, value) { - return this.attribute(name, value); - }; - - XMLElement.prototype.a = function(name, value) { - return this.attribute(name, value); - }; - - XMLElement.prototype.getAttribute = function(name) { - if (this.attribs.hasOwnProperty(name)) { - return this.attribs[name].value; - } else { - return null; - } - }; - - XMLElement.prototype.setAttribute = function(name, value) { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - }; - - XMLElement.prototype.getAttributeNode = function(name) { - if (this.attribs.hasOwnProperty(name)) { - return this.attribs[name]; - } else { - return null; - } - }; - - XMLElement.prototype.setAttributeNode = function(newAttr) { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - }; - - XMLElement.prototype.removeAttributeNode = function(oldAttr) { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - }; - - XMLElement.prototype.getElementsByTagName = function(name) { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - }; - - XMLElement.prototype.getAttributeNS = function(namespaceURI, localName) { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - }; - - XMLElement.prototype.setAttributeNS = function(namespaceURI, qualifiedName, value) { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - }; - - XMLElement.prototype.removeAttributeNS = function(namespaceURI, localName) { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - }; - - XMLElement.prototype.getAttributeNodeNS = function(namespaceURI, localName) { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - }; - - XMLElement.prototype.setAttributeNodeNS = function(newAttr) { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - }; - - XMLElement.prototype.getElementsByTagNameNS = function(namespaceURI, localName) { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - }; - - XMLElement.prototype.hasAttribute = function(name) { - return this.attribs.hasOwnProperty(name); - }; - - XMLElement.prototype.hasAttributeNS = function(namespaceURI, localName) { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - }; - - XMLElement.prototype.setIdAttribute = function(name, isId) { - if (this.attribs.hasOwnProperty(name)) { - return this.attribs[name].isId; - } else { - return isId; - } - }; - - XMLElement.prototype.setIdAttributeNS = function(namespaceURI, localName, isId) { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - }; - - XMLElement.prototype.setIdAttributeNode = function(idAttr, isId) { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - }; - - XMLElement.prototype.getElementsByTagName = function(tagname) { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - }; - - XMLElement.prototype.getElementsByTagNameNS = function(namespaceURI, localName) { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - }; - - XMLElement.prototype.getElementsByClassName = function(classNames) { - throw new Error("This DOM method is not implemented." + this.debugInfo()); - }; - - XMLElement.prototype.isEqualNode = function(node) { - var i, j, ref1; - if (!XMLElement.__super__.isEqualNode.apply(this, arguments).isEqualNode(node)) { - return false; - } - if (node.namespaceURI !== this.namespaceURI) { - return false; - } - if (node.prefix !== this.prefix) { - return false; - } - if (node.localName !== this.localName) { - return false; - } - if (node.attribs.length !== this.attribs.length) { - return false; - } - for (i = j = 0, ref1 = this.attribs.length - 1; 0 <= ref1 ? j <= ref1 : j >= ref1; i = 0 <= ref1 ? ++j : --j) { - if (!this.attribs[i].isEqualNode(node.attribs[i])) { - return false; - } - } - return true; - }; - - return XMLElement; - - })(XMLNode); - -}).call(this); +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=types.js.map /***/ }), /* 846 */, @@ -49815,7 +49370,6 @@ class Poller { }); } /** - * @internal * Starts a loop that will break only if the poller is done * or if the poller is stopped. */ @@ -49829,7 +49383,6 @@ class Poller { } } /** - * @internal * pollOnce does one polling, by calling to the update method of the underlying * poll operation to make any relevant change effective. * @@ -49842,7 +49395,7 @@ class Poller { if (!this.isDone()) { this.operation = await this.operation.update({ abortSignal: options.abortSignal, - fireProgress: this.fireProgress.bind(this) + fireProgress: this.fireProgress.bind(this), }); if (this.isDone() && this.resolve) { // If the poller has finished polling, this means we now have a result. @@ -49863,7 +49416,6 @@ class Poller { } } /** - * @internal * fireProgress calls the functions passed in via onProgress the method of the poller. * * It loops over all of the callbacks received from onProgress, and executes them, sending them @@ -49877,7 +49429,6 @@ class Poller { } } /** - * @internal * Invokes the underlying operation's cancel method, and rejects the * pollUntilDone promise. */ @@ -50041,13 +49592,6 @@ class Poller { } } -// Copyright (c) Microsoft Corporation. -/** - * The `@azure/logger` configuration for this package. - * @internal - */ -const logger = logger$1.createClientLogger("core-lro"); - // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** @@ -50058,7 +49602,7 @@ const logger = logger$1.createClientLogger("core-lro"); */ function getPollingUrl(rawResponse, defaultPath) { var _a, _b, _c; - return ((_c = (_b = (_a = getAzureAsyncOperation(rawResponse)) !== null && _a !== void 0 ? _a : getLocation(rawResponse)) !== null && _b !== void 0 ? _b : getOperationLocation(rawResponse)) !== null && _c !== void 0 ? _c : defaultPath); + return ((_c = (_b = (_a = getAzureAsyncOperation(rawResponse)) !== null && _a !== void 0 ? _a : getOperationLocation(rawResponse)) !== null && _b !== void 0 ? _b : getLocation(rawResponse)) !== null && _c !== void 0 ? _c : defaultPath); } function getLocation(rawResponse) { return rawResponse.headers["location"]; @@ -50069,26 +49613,36 @@ function getOperationLocation(rawResponse) { function getAzureAsyncOperation(rawResponse) { return rawResponse.headers["azure-asyncoperation"]; } -function inferLroMode(requestPath, requestMethod, rawResponse) { - if (getAzureAsyncOperation(rawResponse) !== undefined) { - return { - mode: "AzureAsync", - resourceLocation: requestMethod === "PUT" - ? requestPath - : requestMethod === "POST" - ? getLocation(rawResponse) - : undefined - }; +function findResourceLocation(requestMethod, rawResponse, requestPath) { + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "POST": + case "PATCH": { + return getLocation(rawResponse); + } + default: { + return undefined; + } } - else if (getLocation(rawResponse) !== undefined || +} +function inferLroMode(requestPath, requestMethod, rawResponse) { + if (getAzureAsyncOperation(rawResponse) !== undefined || getOperationLocation(rawResponse) !== undefined) { return { - mode: "Location" + mode: "Location", + resourceLocation: findResourceLocation(requestMethod, rawResponse, requestPath), + }; + } + else if (getLocation(rawResponse) !== undefined) { + return { + mode: "Location", }; } else if (["PUT", "PATCH"].includes(requestMethod)) { return { - mode: "Body" + mode: "Body", }; } return {}; @@ -50121,50 +49675,6 @@ function isUnexpectedPollingResponse(rawResponse) { const successStates = ["succeeded"]; const failureStates = ["failed", "canceled", "cancelled"]; -// Copyright (c) Microsoft Corporation. -function getResponseStatus(rawResponse) { - var _a; - const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - return typeof status === "string" ? status.toLowerCase() : "succeeded"; -} -function isAzureAsyncPollingDone(rawResponse) { - const state = getResponseStatus(rawResponse); - if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { - throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); - } - return successStates.includes(state); -} -/** - * Sends a request to the URI of the provisioned resource if needed. - */ -async function sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig) { - switch (lroResourceLocationConfig) { - case "original-uri": - return lro.sendPollRequest(lro.requestPath); - case "azure-async-operation": - return undefined; - case "location": - default: - return lro.sendPollRequest(resourceLocation !== null && resourceLocation !== void 0 ? resourceLocation : lro.requestPath); - } -} -function processAzureAsyncOperationResult(lro, resourceLocation, lroResourceLocationConfig) { - return (response) => { - if (isAzureAsyncPollingDone(response.rawResponse)) { - if (resourceLocation === undefined) { - return Object.assign(Object.assign({}, response), { done: true }); - } - else { - return Object.assign(Object.assign({}, response), { done: false, next: async () => { - const finalResponse = await sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig); - return Object.assign(Object.assign({}, (finalResponse !== null && finalResponse !== void 0 ? finalResponse : response)), { done: true }); - } }); - } - } - return Object.assign(Object.assign({}, response), { done: false }); - }; -} - // Copyright (c) Microsoft Corporation. function getProvisioningState(rawResponse) { var _a, _b; @@ -50188,11 +49698,54 @@ function processBodyPollingOperationResult(response) { } // Copyright (c) Microsoft Corporation. -function isLocationPollingDone(rawResponse) { - return !isUnexpectedPollingResponse(rawResponse) && rawResponse.statusCode !== 202; +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +const logger = logger$1.createClientLogger("core-lro"); + +// Copyright (c) Microsoft Corporation. +function isPollingDone(rawResponse) { + var _a; + if (isUnexpectedPollingResponse(rawResponse) || rawResponse.statusCode === 202) { + return false; + } + const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const state = typeof status === "string" ? status.toLowerCase() : "succeeded"; + if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { + throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); + } + return successStates.includes(state); } -function processLocationPollingOperationResult(response) { - return Object.assign(Object.assign({}, response), { done: isLocationPollingDone(response.rawResponse) }); +/** + * Sends a request to the URI of the provisioned resource if needed. + */ +async function sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig) { + switch (lroResourceLocationConfig) { + case "original-uri": + return lro.sendPollRequest(lro.requestPath); + case "azure-async-operation": + return undefined; + case "location": + default: + return lro.sendPollRequest(resourceLocation !== null && resourceLocation !== void 0 ? resourceLocation : lro.requestPath); + } +} +function processLocationPollingOperationResult(lro, resourceLocation, lroResourceLocationConfig) { + return (response) => { + if (isPollingDone(response.rawResponse)) { + if (resourceLocation === undefined) { + return Object.assign(Object.assign({}, response), { done: true }); + } + else { + return Object.assign(Object.assign({}, response), { done: false, next: async () => { + const finalResponse = await sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig); + return Object.assign(Object.assign({}, (finalResponse !== null && finalResponse !== void 0 ? finalResponse : response)), { done: true }); + } }); + } + } + return Object.assign(Object.assign({}, response), { done: false }); + }; } // Copyright (c) Microsoft Corporation. @@ -50207,11 +49760,8 @@ function processPassthroughOperationResult(response) { */ function createGetLroStatusFromResponse(lroPrimitives, config, lroResourceLocationConfig) { switch (config.mode) { - case "AzureAsync": { - return processAzureAsyncOperationResult(lroPrimitives, config.resourceLocation, lroResourceLocationConfig); - } case "Location": { - return processLocationPollingOperationResult; + return processLocationPollingOperationResult(lroPrimitives, config.resourceLocation, lroResourceLocationConfig); } case "Body": { return processBodyPollingOperationResult; @@ -50229,10 +49779,11 @@ function createPoll(lroPrimitives) { const response = await lroPrimitives.sendPollRequest(path); const retryAfter = response.rawResponse.headers["retry-after"]; if (retryAfter !== undefined) { - const retryAfterInMs = parseInt(retryAfter); - pollerConfig.intervalInMs = isNaN(retryAfterInMs) + // Retry-After header value is either in HTTP date format, or in seconds + const retryAfterInSeconds = parseInt(retryAfter); + pollerConfig.intervalInMs = isNaN(retryAfterInSeconds) ? calculatePollingIntervalFromDate(new Date(retryAfter), pollerConfig.intervalInMs) - : retryAfterInMs; + : retryAfterInSeconds * 1000; } return getLroStatusFromResponse(response); }; @@ -50355,7 +49906,7 @@ class GenericPollOperation { */ toString() { return JSON.stringify({ - state: this.state + state: this.state, }); } } @@ -50402,84 +49953,1681 @@ exports.PollerStoppedError = PollerStoppedError; /* 890 */, /* 891 */, /* 892 */ -/***/ (function(module, __unusedexports, __webpack_require__) { +/***/ (function(__unusedmodule, exports, __webpack_require__) { -var iterate = __webpack_require__(157) - , initState = __webpack_require__(903) - , terminator = __webpack_require__(939) - ; - -// Public API -module.exports = serialOrdered; -// sorting helpers -module.exports.ascending = ascending; -module.exports.descending = descending; - -/** - * Runs iterator over provided sorted array elements in series +"use strict"; +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} sortMethod - custom sort function - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. */ -function serialOrdered(list, iterator, sortMethod, callback) -{ - var state = initState(list, sortMethod); - iterate(list, iterator, state, function iteratorHandler(error, result) - { - if (error) - { - callback(error, result); - return; +const punycode = __webpack_require__(815); +const urlParse = __webpack_require__(835).parse; +const util = __webpack_require__(669); +const pubsuffix = __webpack_require__(519); +const Store = __webpack_require__(627).Store; +const MemoryCookieStore = __webpack_require__(349).MemoryCookieStore; +const pathMatch = __webpack_require__(54).pathMatch; +const VERSION = __webpack_require__(459); +const { fromCallback } = __webpack_require__(147); + +// From RFC6265 S4.1.1 +// note that it excludes \x3B ";" +const COOKIE_OCTETS = /^[\x21\x23-\x2B\x2D-\x3A\x3C-\x5B\x5D-\x7E]+$/; + +const CONTROL_CHARS = /[\x00-\x1F]/; + +// From Chromium // '\r', '\n' and '\0' should be treated as a terminator in +// the "relaxed" mode, see: +// https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L60 +const TERMINATORS = ["\n", "\r", "\0"]; + +// RFC6265 S4.1.1 defines path value as 'any CHAR except CTLs or ";"' +// Note ';' is \x3B +const PATH_VALUE = /[\x20-\x3A\x3C-\x7E]+/; + +// date-time parsing constants (RFC6265 S5.1.1) + +const DATE_DELIM = /[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]/; + +const MONTH_TO_NUM = { + jan: 0, + feb: 1, + mar: 2, + apr: 3, + may: 4, + jun: 5, + jul: 6, + aug: 7, + sep: 8, + oct: 9, + nov: 10, + dec: 11 +}; + +const MAX_TIME = 2147483647000; // 31-bit max +const MIN_TIME = 0; // 31-bit min +const SAME_SITE_CONTEXT_VAL_ERR = + 'Invalid sameSiteContext option for getCookies(); expected one of "strict", "lax", or "none"'; + +function checkSameSiteContext(value) { + const context = String(value).toLowerCase(); + if (context === "none" || context === "lax" || context === "strict") { + return context; + } else { + return null; + } +} + +const PrefixSecurityEnum = Object.freeze({ + SILENT: "silent", + STRICT: "strict", + DISABLED: "unsafe-disabled" +}); + +// Dumped from ip-regex@4.0.0, with the following changes: +// * all capturing groups converted to non-capturing -- "(?:)" +// * support for IPv6 Scoped Literal ("%eth1") removed +// * lowercase hexadecimal only +var IP_REGEX_LOWERCASE =/(?:^(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}$)|(?:^(?:(?:[a-f\d]{1,4}:){7}(?:[a-f\d]{1,4}|:)|(?:[a-f\d]{1,4}:){6}(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|:[a-f\d]{1,4}|:)|(?:[a-f\d]{1,4}:){5}(?::(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,2}|:)|(?:[a-f\d]{1,4}:){4}(?:(?::[a-f\d]{1,4}){0,1}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,3}|:)|(?:[a-f\d]{1,4}:){3}(?:(?::[a-f\d]{1,4}){0,2}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,4}|:)|(?:[a-f\d]{1,4}:){2}(?:(?::[a-f\d]{1,4}){0,3}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,5}|:)|(?:[a-f\d]{1,4}:){1}(?:(?::[a-f\d]{1,4}){0,4}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,6}|:)|(?::(?:(?::[a-f\d]{1,4}){0,5}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,7}|:)))$)/; + +/* + * Parses a Natural number (i.e., non-negative integer) with either the + * *DIGIT ( non-digit *OCTET ) + * or + * *DIGIT + * grammar (RFC6265 S5.1.1). + * + * The "trailingOK" boolean controls if the grammar accepts a + * "( non-digit *OCTET )" trailer. + */ +function parseDigits(token, minDigits, maxDigits, trailingOK) { + let count = 0; + while (count < token.length) { + const c = token.charCodeAt(count); + // "non-digit = %x00-2F / %x3A-FF" + if (c <= 0x2f || c >= 0x3a) { + break; } + count++; + } - state.index++; + // constrain to a minimum and maximum number of digits. + if (count < minDigits || count > maxDigits) { + return null; + } - // are we there yet? - if (state.index < (state['keyedList'] || list).length) - { - iterate(list, iterator, state, iteratorHandler); - return; + if (!trailingOK && count != token.length) { + return null; + } + + return parseInt(token.substr(0, count), 10); +} + +function parseTime(token) { + const parts = token.split(":"); + const result = [0, 0, 0]; + + /* RF6256 S5.1.1: + * time = hms-time ( non-digit *OCTET ) + * hms-time = time-field ":" time-field ":" time-field + * time-field = 1*2DIGIT + */ + + if (parts.length !== 3) { + return null; + } + + for (let i = 0; i < 3; i++) { + // "time-field" must be strictly "1*2DIGIT", HOWEVER, "hms-time" can be + // followed by "( non-digit *OCTET )" so therefore the last time-field can + // have a trailer + const trailingOK = i == 2; + const num = parseDigits(parts[i], 1, 2, trailingOK); + if (num === null) { + return null; } + result[i] = num; + } - // done here - callback(null, state.results); - }); + return result; +} - return terminator.bind(state, callback); +function parseMonth(token) { + token = String(token) + .substr(0, 3) + .toLowerCase(); + const num = MONTH_TO_NUM[token]; + return num >= 0 ? num : null; } /* - * -- Sort methods + * RFC6265 S5.1.1 date parser (see RFC for full grammar) */ +function parseDate(str) { + if (!str) { + return; + } -/** - * sort helper to sort array elements in ascending order + /* RFC6265 S5.1.1: + * 2. Process each date-token sequentially in the order the date-tokens + * appear in the cookie-date + */ + const tokens = str.split(DATE_DELIM); + if (!tokens) { + return; + } + + let hour = null; + let minute = null; + let second = null; + let dayOfMonth = null; + let month = null; + let year = null; + + for (let i = 0; i < tokens.length; i++) { + const token = tokens[i].trim(); + if (!token.length) { + continue; + } + + let result; + + /* 2.1. If the found-time flag is not set and the token matches the time + * production, set the found-time flag and set the hour- value, + * minute-value, and second-value to the numbers denoted by the digits in + * the date-token, respectively. Skip the remaining sub-steps and continue + * to the next date-token. + */ + if (second === null) { + result = parseTime(token); + if (result) { + hour = result[0]; + minute = result[1]; + second = result[2]; + continue; + } + } + + /* 2.2. If the found-day-of-month flag is not set and the date-token matches + * the day-of-month production, set the found-day-of- month flag and set + * the day-of-month-value to the number denoted by the date-token. Skip + * the remaining sub-steps and continue to the next date-token. + */ + if (dayOfMonth === null) { + // "day-of-month = 1*2DIGIT ( non-digit *OCTET )" + result = parseDigits(token, 1, 2, true); + if (result !== null) { + dayOfMonth = result; + continue; + } + } + + /* 2.3. If the found-month flag is not set and the date-token matches the + * month production, set the found-month flag and set the month-value to + * the month denoted by the date-token. Skip the remaining sub-steps and + * continue to the next date-token. + */ + if (month === null) { + result = parseMonth(token); + if (result !== null) { + month = result; + continue; + } + } + + /* 2.4. If the found-year flag is not set and the date-token matches the + * year production, set the found-year flag and set the year-value to the + * number denoted by the date-token. Skip the remaining sub-steps and + * continue to the next date-token. + */ + if (year === null) { + // "year = 2*4DIGIT ( non-digit *OCTET )" + result = parseDigits(token, 2, 4, true); + if (result !== null) { + year = result; + /* From S5.1.1: + * 3. If the year-value is greater than or equal to 70 and less + * than or equal to 99, increment the year-value by 1900. + * 4. If the year-value is greater than or equal to 0 and less + * than or equal to 69, increment the year-value by 2000. + */ + if (year >= 70 && year <= 99) { + year += 1900; + } else if (year >= 0 && year <= 69) { + year += 2000; + } + } + } + } + + /* RFC 6265 S5.1.1 + * "5. Abort these steps and fail to parse the cookie-date if: + * * at least one of the found-day-of-month, found-month, found- + * year, or found-time flags is not set, + * * the day-of-month-value is less than 1 or greater than 31, + * * the year-value is less than 1601, + * * the hour-value is greater than 23, + * * the minute-value is greater than 59, or + * * the second-value is greater than 59. + * (Note that leap seconds cannot be represented in this syntax.)" + * + * So, in order as above: + */ + if ( + dayOfMonth === null || + month === null || + year === null || + second === null || + dayOfMonth < 1 || + dayOfMonth > 31 || + year < 1601 || + hour > 23 || + minute > 59 || + second > 59 + ) { + return; + } + + return new Date(Date.UTC(year, month, dayOfMonth, hour, minute, second)); +} + +function formatDate(date) { + return date.toUTCString(); +} + +// S5.1.2 Canonicalized Host Names +function canonicalDomain(str) { + if (str == null) { + return null; + } + str = str.trim().replace(/^\./, ""); // S4.1.2.3 & S5.2.3: ignore leading . + + // convert to IDN if any non-ASCII characters + if (punycode && /[^\u0001-\u007f]/.test(str)) { + str = punycode.toASCII(str); + } + + return str.toLowerCase(); +} + +// S5.1.3 Domain Matching +function domainMatch(str, domStr, canonicalize) { + if (str == null || domStr == null) { + return null; + } + if (canonicalize !== false) { + str = canonicalDomain(str); + domStr = canonicalDomain(domStr); + } + + /* + * S5.1.3: + * "A string domain-matches a given domain string if at least one of the + * following conditions hold:" + * + * " o The domain string and the string are identical. (Note that both the + * domain string and the string will have been canonicalized to lower case at + * this point)" + */ + if (str == domStr) { + return true; + } + + /* " o All of the following [three] conditions hold:" */ + + /* "* The domain string is a suffix of the string" */ + const idx = str.indexOf(domStr); + if (idx <= 0) { + return false; // it's a non-match (-1) or prefix (0) + } + + // next, check it's a proper suffix + // e.g., "a.b.c".indexOf("b.c") === 2 + // 5 === 3+2 + if (str.length !== domStr.length + idx) { + return false; // it's not a suffix + } + + /* " * The last character of the string that is not included in the + * domain string is a %x2E (".") character." */ + if (str.substr(idx-1,1) !== '.') { + return false; // doesn't align on "." + } + + /* " * The string is a host name (i.e., not an IP address)." */ + if (IP_REGEX_LOWERCASE.test(str)) { + return false; // it's an IP address + } + + return true; +} + +// RFC6265 S5.1.4 Paths and Path-Match + +/* + * "The user agent MUST use an algorithm equivalent to the following algorithm + * to compute the default-path of a cookie:" * - * @param {mixed} a - an item to compare - * @param {mixed} b - an item to compare - * @returns {number} - comparison result + * Assumption: the path (and not query part or absolute uri) is passed in. */ -function ascending(a, b) -{ - return a < b ? -1 : a > b ? 1 : 0; +function defaultPath(path) { + // "2. If the uri-path is empty or if the first character of the uri-path is not + // a %x2F ("/") character, output %x2F ("/") and skip the remaining steps. + if (!path || path.substr(0, 1) !== "/") { + return "/"; + } + + // "3. If the uri-path contains no more than one %x2F ("/") character, output + // %x2F ("/") and skip the remaining step." + if (path === "/") { + return path; + } + + const rightSlash = path.lastIndexOf("/"); + if (rightSlash === 0) { + return "/"; + } + + // "4. Output the characters of the uri-path from the first character up to, + // but not including, the right-most %x2F ("/")." + return path.slice(0, rightSlash); +} + +function trimTerminator(str) { + for (let t = 0; t < TERMINATORS.length; t++) { + const terminatorIdx = str.indexOf(TERMINATORS[t]); + if (terminatorIdx !== -1) { + str = str.substr(0, terminatorIdx); + } + } + + return str; +} + +function parseCookiePair(cookiePair, looseMode) { + cookiePair = trimTerminator(cookiePair); + + let firstEq = cookiePair.indexOf("="); + if (looseMode) { + if (firstEq === 0) { + // '=' is immediately at start + cookiePair = cookiePair.substr(1); + firstEq = cookiePair.indexOf("="); // might still need to split on '=' + } + } else { + // non-loose mode + if (firstEq <= 0) { + // no '=' or is at start + return; // needs to have non-empty "cookie-name" + } + } + + let cookieName, cookieValue; + if (firstEq <= 0) { + cookieName = ""; + cookieValue = cookiePair.trim(); + } else { + cookieName = cookiePair.substr(0, firstEq).trim(); + cookieValue = cookiePair.substr(firstEq + 1).trim(); + } + + if (CONTROL_CHARS.test(cookieName) || CONTROL_CHARS.test(cookieValue)) { + return; + } + + const c = new Cookie(); + c.key = cookieName; + c.value = cookieValue; + return c; +} + +function parse(str, options) { + if (!options || typeof options !== "object") { + options = {}; + } + str = str.trim(); + + // We use a regex to parse the "name-value-pair" part of S5.2 + const firstSemi = str.indexOf(";"); // S5.2 step 1 + const cookiePair = firstSemi === -1 ? str : str.substr(0, firstSemi); + const c = parseCookiePair(cookiePair, !!options.loose); + if (!c) { + return; + } + + if (firstSemi === -1) { + return c; + } + + // S5.2.3 "unparsed-attributes consist of the remainder of the set-cookie-string + // (including the %x3B (";") in question)." plus later on in the same section + // "discard the first ";" and trim". + const unparsed = str.slice(firstSemi + 1).trim(); + + // "If the unparsed-attributes string is empty, skip the rest of these + // steps." + if (unparsed.length === 0) { + return c; + } + + /* + * S5.2 says that when looping over the items "[p]rocess the attribute-name + * and attribute-value according to the requirements in the following + * subsections" for every item. Plus, for many of the individual attributes + * in S5.3 it says to use the "attribute-value of the last attribute in the + * cookie-attribute-list". Therefore, in this implementation, we overwrite + * the previous value. + */ + const cookie_avs = unparsed.split(";"); + while (cookie_avs.length) { + const av = cookie_avs.shift().trim(); + if (av.length === 0) { + // happens if ";;" appears + continue; + } + const av_sep = av.indexOf("="); + let av_key, av_value; + + if (av_sep === -1) { + av_key = av; + av_value = null; + } else { + av_key = av.substr(0, av_sep); + av_value = av.substr(av_sep + 1); + } + + av_key = av_key.trim().toLowerCase(); + + if (av_value) { + av_value = av_value.trim(); + } + + switch (av_key) { + case "expires": // S5.2.1 + if (av_value) { + const exp = parseDate(av_value); + // "If the attribute-value failed to parse as a cookie date, ignore the + // cookie-av." + if (exp) { + // over and underflow not realistically a concern: V8's getTime() seems to + // store something larger than a 32-bit time_t (even with 32-bit node) + c.expires = exp; + } + } + break; + + case "max-age": // S5.2.2 + if (av_value) { + // "If the first character of the attribute-value is not a DIGIT or a "-" + // character ...[or]... If the remainder of attribute-value contains a + // non-DIGIT character, ignore the cookie-av." + if (/^-?[0-9]+$/.test(av_value)) { + const delta = parseInt(av_value, 10); + // "If delta-seconds is less than or equal to zero (0), let expiry-time + // be the earliest representable date and time." + c.setMaxAge(delta); + } + } + break; + + case "domain": // S5.2.3 + // "If the attribute-value is empty, the behavior is undefined. However, + // the user agent SHOULD ignore the cookie-av entirely." + if (av_value) { + // S5.2.3 "Let cookie-domain be the attribute-value without the leading %x2E + // (".") character." + const domain = av_value.trim().replace(/^\./, ""); + if (domain) { + // "Convert the cookie-domain to lower case." + c.domain = domain.toLowerCase(); + } + } + break; + + case "path": // S5.2.4 + /* + * "If the attribute-value is empty or if the first character of the + * attribute-value is not %x2F ("/"): + * Let cookie-path be the default-path. + * Otherwise: + * Let cookie-path be the attribute-value." + * + * We'll represent the default-path as null since it depends on the + * context of the parsing. + */ + c.path = av_value && av_value[0] === "/" ? av_value : null; + break; + + case "secure": // S5.2.5 + /* + * "If the attribute-name case-insensitively matches the string "Secure", + * the user agent MUST append an attribute to the cookie-attribute-list + * with an attribute-name of Secure and an empty attribute-value." + */ + c.secure = true; + break; + + case "httponly": // S5.2.6 -- effectively the same as 'secure' + c.httpOnly = true; + break; + + case "samesite": // RFC6265bis-02 S5.3.7 + const enforcement = av_value ? av_value.toLowerCase() : ""; + switch (enforcement) { + case "strict": + c.sameSite = "strict"; + break; + case "lax": + c.sameSite = "lax"; + break; + default: + // RFC6265bis-02 S5.3.7 step 1: + // "If cookie-av's attribute-value is not a case-insensitive match + // for "Strict" or "Lax", ignore the "cookie-av"." + // This effectively sets it to 'none' from the prototype. + break; + } + break; + + default: + c.extensions = c.extensions || []; + c.extensions.push(av); + break; + } + } + + return c; } /** - * sort helper to sort array elements in descending order - * - * @param {mixed} a - an item to compare - * @param {mixed} b - an item to compare - * @returns {number} - comparison result + * If the cookie-name begins with a case-sensitive match for the + * string "__Secure-", abort these steps and ignore the cookie + * entirely unless the cookie's secure-only-flag is true. + * @param cookie + * @returns boolean */ -function descending(a, b) -{ - return -1 * ascending(a, b); +function isSecurePrefixConditionMet(cookie) { + return !cookie.key.startsWith("__Secure-") || cookie.secure; } +/** + * If the cookie-name begins with a case-sensitive match for the + * string "__Host-", abort these steps and ignore the cookie + * entirely unless the cookie meets all the following criteria: + * 1. The cookie's secure-only-flag is true. + * 2. The cookie's host-only-flag is true. + * 3. The cookie-attribute-list contains an attribute with an + * attribute-name of "Path", and the cookie's path is "/". + * @param cookie + * @returns boolean + */ +function isHostPrefixConditionMet(cookie) { + return ( + !cookie.key.startsWith("__Host-") || + (cookie.secure && + cookie.hostOnly && + cookie.path != null && + cookie.path === "/") + ); +} + +// avoid the V8 deoptimization monster! +function jsonParse(str) { + let obj; + try { + obj = JSON.parse(str); + } catch (e) { + return e; + } + return obj; +} + +function fromJSON(str) { + if (!str) { + return null; + } + + let obj; + if (typeof str === "string") { + obj = jsonParse(str); + if (obj instanceof Error) { + return null; + } + } else { + // assume it's an Object + obj = str; + } + + const c = new Cookie(); + for (let i = 0; i < Cookie.serializableProperties.length; i++) { + const prop = Cookie.serializableProperties[i]; + if (obj[prop] === undefined || obj[prop] === cookieDefaults[prop]) { + continue; // leave as prototype default + } + + if (prop === "expires" || prop === "creation" || prop === "lastAccessed") { + if (obj[prop] === null) { + c[prop] = null; + } else { + c[prop] = obj[prop] == "Infinity" ? "Infinity" : new Date(obj[prop]); + } + } else { + c[prop] = obj[prop]; + } + } + + return c; +} + +/* Section 5.4 part 2: + * "* Cookies with longer paths are listed before cookies with + * shorter paths. + * + * * Among cookies that have equal-length path fields, cookies with + * earlier creation-times are listed before cookies with later + * creation-times." + */ + +function cookieCompare(a, b) { + let cmp = 0; + + // descending for length: b CMP a + const aPathLen = a.path ? a.path.length : 0; + const bPathLen = b.path ? b.path.length : 0; + cmp = bPathLen - aPathLen; + if (cmp !== 0) { + return cmp; + } + + // ascending for time: a CMP b + const aTime = a.creation ? a.creation.getTime() : MAX_TIME; + const bTime = b.creation ? b.creation.getTime() : MAX_TIME; + cmp = aTime - bTime; + if (cmp !== 0) { + return cmp; + } + + // break ties for the same millisecond (precision of JavaScript's clock) + cmp = a.creationIndex - b.creationIndex; + + return cmp; +} + +// Gives the permutation of all possible pathMatch()es of a given path. The +// array is in longest-to-shortest order. Handy for indexing. +function permutePath(path) { + if (path === "/") { + return ["/"]; + } + const permutations = [path]; + while (path.length > 1) { + const lindex = path.lastIndexOf("/"); + if (lindex === 0) { + break; + } + path = path.substr(0, lindex); + permutations.push(path); + } + permutations.push("/"); + return permutations; +} + +function getCookieContext(url) { + if (url instanceof Object) { + return url; + } + // NOTE: decodeURI will throw on malformed URIs (see GH-32). + // Therefore, we will just skip decoding for such URIs. + try { + url = decodeURI(url); + } catch (err) { + // Silently swallow error + } + + return urlParse(url); +} + +const cookieDefaults = { + // the order in which the RFC has them: + key: "", + value: "", + expires: "Infinity", + maxAge: null, + domain: null, + path: null, + secure: false, + httpOnly: false, + extensions: null, + // set by the CookieJar: + hostOnly: null, + pathIsDefault: null, + creation: null, + lastAccessed: null, + sameSite: "none" +}; + +class Cookie { + constructor(options = {}) { + if (util.inspect.custom) { + this[util.inspect.custom] = this.inspect; + } + + Object.assign(this, cookieDefaults, options); + this.creation = this.creation || new Date(); + + // used to break creation ties in cookieCompare(): + Object.defineProperty(this, "creationIndex", { + configurable: false, + enumerable: false, // important for assert.deepEqual checks + writable: true, + value: ++Cookie.cookiesCreated + }); + } + + inspect() { + const now = Date.now(); + const hostOnly = this.hostOnly != null ? this.hostOnly : "?"; + const createAge = this.creation + ? `${now - this.creation.getTime()}ms` + : "?"; + const accessAge = this.lastAccessed + ? `${now - this.lastAccessed.getTime()}ms` + : "?"; + return `Cookie="${this.toString()}; hostOnly=${hostOnly}; aAge=${accessAge}; cAge=${createAge}"`; + } + + toJSON() { + const obj = {}; + + for (const prop of Cookie.serializableProperties) { + if (this[prop] === cookieDefaults[prop]) { + continue; // leave as prototype default + } + + if ( + prop === "expires" || + prop === "creation" || + prop === "lastAccessed" + ) { + if (this[prop] === null) { + obj[prop] = null; + } else { + obj[prop] = + this[prop] == "Infinity" // intentionally not === + ? "Infinity" + : this[prop].toISOString(); + } + } else if (prop === "maxAge") { + if (this[prop] !== null) { + // again, intentionally not === + obj[prop] = + this[prop] == Infinity || this[prop] == -Infinity + ? this[prop].toString() + : this[prop]; + } + } else { + if (this[prop] !== cookieDefaults[prop]) { + obj[prop] = this[prop]; + } + } + } + + return obj; + } + + clone() { + return fromJSON(this.toJSON()); + } + + validate() { + if (!COOKIE_OCTETS.test(this.value)) { + return false; + } + if ( + this.expires != Infinity && + !(this.expires instanceof Date) && + !parseDate(this.expires) + ) { + return false; + } + if (this.maxAge != null && this.maxAge <= 0) { + return false; // "Max-Age=" non-zero-digit *DIGIT + } + if (this.path != null && !PATH_VALUE.test(this.path)) { + return false; + } + + const cdomain = this.cdomain(); + if (cdomain) { + if (cdomain.match(/\.$/)) { + return false; // S4.1.2.3 suggests that this is bad. domainMatch() tests confirm this + } + const suffix = pubsuffix.getPublicSuffix(cdomain); + if (suffix == null) { + // it's a public suffix + return false; + } + } + return true; + } + + setExpires(exp) { + if (exp instanceof Date) { + this.expires = exp; + } else { + this.expires = parseDate(exp) || "Infinity"; + } + } + + setMaxAge(age) { + if (age === Infinity || age === -Infinity) { + this.maxAge = age.toString(); // so JSON.stringify() works + } else { + this.maxAge = age; + } + } + + cookieString() { + let val = this.value; + if (val == null) { + val = ""; + } + if (this.key === "") { + return val; + } + return `${this.key}=${val}`; + } + + // gives Set-Cookie header format + toString() { + let str = this.cookieString(); + + if (this.expires != Infinity) { + if (this.expires instanceof Date) { + str += `; Expires=${formatDate(this.expires)}`; + } else { + str += `; Expires=${this.expires}`; + } + } + + if (this.maxAge != null && this.maxAge != Infinity) { + str += `; Max-Age=${this.maxAge}`; + } + + if (this.domain && !this.hostOnly) { + str += `; Domain=${this.domain}`; + } + if (this.path) { + str += `; Path=${this.path}`; + } + + if (this.secure) { + str += "; Secure"; + } + if (this.httpOnly) { + str += "; HttpOnly"; + } + if (this.sameSite && this.sameSite !== "none") { + const ssCanon = Cookie.sameSiteCanonical[this.sameSite.toLowerCase()]; + str += `; SameSite=${ssCanon ? ssCanon : this.sameSite}`; + } + if (this.extensions) { + this.extensions.forEach(ext => { + str += `; ${ext}`; + }); + } + + return str; + } + + // TTL() partially replaces the "expiry-time" parts of S5.3 step 3 (setCookie() + // elsewhere) + // S5.3 says to give the "latest representable date" for which we use Infinity + // For "expired" we use 0 + TTL(now) { + /* RFC6265 S4.1.2.2 If a cookie has both the Max-Age and the Expires + * attribute, the Max-Age attribute has precedence and controls the + * expiration date of the cookie. + * (Concurs with S5.3 step 3) + */ + if (this.maxAge != null) { + return this.maxAge <= 0 ? 0 : this.maxAge * 1000; + } + + let expires = this.expires; + if (expires != Infinity) { + if (!(expires instanceof Date)) { + expires = parseDate(expires) || Infinity; + } + + if (expires == Infinity) { + return Infinity; + } + + return expires.getTime() - (now || Date.now()); + } + + return Infinity; + } + + // expiryTime() replaces the "expiry-time" parts of S5.3 step 3 (setCookie() + // elsewhere) + expiryTime(now) { + if (this.maxAge != null) { + const relativeTo = now || this.creation || new Date(); + const age = this.maxAge <= 0 ? -Infinity : this.maxAge * 1000; + return relativeTo.getTime() + age; + } + + if (this.expires == Infinity) { + return Infinity; + } + return this.expires.getTime(); + } + + // expiryDate() replaces the "expiry-time" parts of S5.3 step 3 (setCookie() + // elsewhere), except it returns a Date + expiryDate(now) { + const millisec = this.expiryTime(now); + if (millisec == Infinity) { + return new Date(MAX_TIME); + } else if (millisec == -Infinity) { + return new Date(MIN_TIME); + } else { + return new Date(millisec); + } + } + + // This replaces the "persistent-flag" parts of S5.3 step 3 + isPersistent() { + return this.maxAge != null || this.expires != Infinity; + } + + // Mostly S5.1.2 and S5.2.3: + canonicalizedDomain() { + if (this.domain == null) { + return null; + } + return canonicalDomain(this.domain); + } + + cdomain() { + return this.canonicalizedDomain(); + } +} + +Cookie.cookiesCreated = 0; +Cookie.parse = parse; +Cookie.fromJSON = fromJSON; +Cookie.serializableProperties = Object.keys(cookieDefaults); +Cookie.sameSiteLevel = { + strict: 3, + lax: 2, + none: 1 +}; + +Cookie.sameSiteCanonical = { + strict: "Strict", + lax: "Lax" +}; + +function getNormalizedPrefixSecurity(prefixSecurity) { + if (prefixSecurity != null) { + const normalizedPrefixSecurity = prefixSecurity.toLowerCase(); + /* The three supported options */ + switch (normalizedPrefixSecurity) { + case PrefixSecurityEnum.STRICT: + case PrefixSecurityEnum.SILENT: + case PrefixSecurityEnum.DISABLED: + return normalizedPrefixSecurity; + } + } + /* Default is SILENT */ + return PrefixSecurityEnum.SILENT; +} + +class CookieJar { + constructor(store, options = { rejectPublicSuffixes: true }) { + if (typeof options === "boolean") { + options = { rejectPublicSuffixes: options }; + } + this.rejectPublicSuffixes = options.rejectPublicSuffixes; + this.enableLooseMode = !!options.looseMode; + this.allowSpecialUseDomain = !!options.allowSpecialUseDomain; + this.store = store || new MemoryCookieStore(); + this.prefixSecurity = getNormalizedPrefixSecurity(options.prefixSecurity); + this._cloneSync = syncWrap("clone"); + this._importCookiesSync = syncWrap("_importCookies"); + this.getCookiesSync = syncWrap("getCookies"); + this.getCookieStringSync = syncWrap("getCookieString"); + this.getSetCookieStringsSync = syncWrap("getSetCookieStrings"); + this.removeAllCookiesSync = syncWrap("removeAllCookies"); + this.setCookieSync = syncWrap("setCookie"); + this.serializeSync = syncWrap("serialize"); + } + + setCookie(cookie, url, options, cb) { + let err; + const context = getCookieContext(url); + if (typeof options === "function") { + cb = options; + options = {}; + } + + const host = canonicalDomain(context.hostname); + const loose = options.loose || this.enableLooseMode; + + let sameSiteContext = null; + if (options.sameSiteContext) { + sameSiteContext = checkSameSiteContext(options.sameSiteContext); + if (!sameSiteContext) { + return cb(new Error(SAME_SITE_CONTEXT_VAL_ERR)); + } + } + + // S5.3 step 1 + if (typeof cookie === "string" || cookie instanceof String) { + cookie = Cookie.parse(cookie, { loose: loose }); + if (!cookie) { + err = new Error("Cookie failed to parse"); + return cb(options.ignoreError ? null : err); + } + } else if (!(cookie instanceof Cookie)) { + // If you're seeing this error, and are passing in a Cookie object, + // it *might* be a Cookie object from another loaded version of tough-cookie. + err = new Error( + "First argument to setCookie must be a Cookie object or string" + ); + return cb(options.ignoreError ? null : err); + } + + // S5.3 step 2 + const now = options.now || new Date(); // will assign later to save effort in the face of errors + + // S5.3 step 3: NOOP; persistent-flag and expiry-time is handled by getCookie() + + // S5.3 step 4: NOOP; domain is null by default + + // S5.3 step 5: public suffixes + if (this.rejectPublicSuffixes && cookie.domain) { + const suffix = pubsuffix.getPublicSuffix(cookie.cdomain()); + if (suffix == null) { + // e.g. "com" + err = new Error("Cookie has domain set to a public suffix"); + return cb(options.ignoreError ? null : err); + } + } + + // S5.3 step 6: + if (cookie.domain) { + if (!domainMatch(host, cookie.cdomain(), false)) { + err = new Error( + `Cookie not in this host's domain. Cookie:${cookie.cdomain()} Request:${host}` + ); + return cb(options.ignoreError ? null : err); + } + + if (cookie.hostOnly == null) { + // don't reset if already set + cookie.hostOnly = false; + } + } else { + cookie.hostOnly = true; + cookie.domain = host; + } + + //S5.2.4 If the attribute-value is empty or if the first character of the + //attribute-value is not %x2F ("/"): + //Let cookie-path be the default-path. + if (!cookie.path || cookie.path[0] !== "/") { + cookie.path = defaultPath(context.pathname); + cookie.pathIsDefault = true; + } + + // S5.3 step 8: NOOP; secure attribute + // S5.3 step 9: NOOP; httpOnly attribute + + // S5.3 step 10 + if (options.http === false && cookie.httpOnly) { + err = new Error("Cookie is HttpOnly and this isn't an HTTP API"); + return cb(options.ignoreError ? null : err); + } + + // 6252bis-02 S5.4 Step 13 & 14: + if (cookie.sameSite !== "none" && sameSiteContext) { + // "If the cookie's "same-site-flag" is not "None", and the cookie + // is being set from a context whose "site for cookies" is not an + // exact match for request-uri's host's registered domain, then + // abort these steps and ignore the newly created cookie entirely." + if (sameSiteContext === "none") { + err = new Error( + "Cookie is SameSite but this is a cross-origin request" + ); + return cb(options.ignoreError ? null : err); + } + } + + /* 6265bis-02 S5.4 Steps 15 & 16 */ + const ignoreErrorForPrefixSecurity = + this.prefixSecurity === PrefixSecurityEnum.SILENT; + const prefixSecurityDisabled = + this.prefixSecurity === PrefixSecurityEnum.DISABLED; + /* If prefix checking is not disabled ...*/ + if (!prefixSecurityDisabled) { + let errorFound = false; + let errorMsg; + /* Check secure prefix condition */ + if (!isSecurePrefixConditionMet(cookie)) { + errorFound = true; + errorMsg = "Cookie has __Secure prefix but Secure attribute is not set"; + } else if (!isHostPrefixConditionMet(cookie)) { + /* Check host prefix condition */ + errorFound = true; + errorMsg = + "Cookie has __Host prefix but either Secure or HostOnly attribute is not set or Path is not '/'"; + } + if (errorFound) { + return cb( + options.ignoreError || ignoreErrorForPrefixSecurity + ? null + : new Error(errorMsg) + ); + } + } + + const store = this.store; + + if (!store.updateCookie) { + store.updateCookie = function(oldCookie, newCookie, cb) { + this.putCookie(newCookie, cb); + }; + } + + function withCookie(err, oldCookie) { + if (err) { + return cb(err); + } + + const next = function(err) { + if (err) { + return cb(err); + } else { + cb(null, cookie); + } + }; + + if (oldCookie) { + // S5.3 step 11 - "If the cookie store contains a cookie with the same name, + // domain, and path as the newly created cookie:" + if (options.http === false && oldCookie.httpOnly) { + // step 11.2 + err = new Error("old Cookie is HttpOnly and this isn't an HTTP API"); + return cb(options.ignoreError ? null : err); + } + cookie.creation = oldCookie.creation; // step 11.3 + cookie.creationIndex = oldCookie.creationIndex; // preserve tie-breaker + cookie.lastAccessed = now; + // Step 11.4 (delete cookie) is implied by just setting the new one: + store.updateCookie(oldCookie, cookie, next); // step 12 + } else { + cookie.creation = cookie.lastAccessed = now; + store.putCookie(cookie, next); // step 12 + } + } + + store.findCookie(cookie.domain, cookie.path, cookie.key, withCookie); + } + + // RFC6365 S5.4 + getCookies(url, options, cb) { + const context = getCookieContext(url); + if (typeof options === "function") { + cb = options; + options = {}; + } + + const host = canonicalDomain(context.hostname); + const path = context.pathname || "/"; + + let secure = options.secure; + if ( + secure == null && + context.protocol && + (context.protocol == "https:" || context.protocol == "wss:") + ) { + secure = true; + } + + let sameSiteLevel = 0; + if (options.sameSiteContext) { + const sameSiteContext = checkSameSiteContext(options.sameSiteContext); + sameSiteLevel = Cookie.sameSiteLevel[sameSiteContext]; + if (!sameSiteLevel) { + return cb(new Error(SAME_SITE_CONTEXT_VAL_ERR)); + } + } + + let http = options.http; + if (http == null) { + http = true; + } + + const now = options.now || Date.now(); + const expireCheck = options.expire !== false; + const allPaths = !!options.allPaths; + const store = this.store; + + function matchingCookie(c) { + // "Either: + // The cookie's host-only-flag is true and the canonicalized + // request-host is identical to the cookie's domain. + // Or: + // The cookie's host-only-flag is false and the canonicalized + // request-host domain-matches the cookie's domain." + if (c.hostOnly) { + if (c.domain != host) { + return false; + } + } else { + if (!domainMatch(host, c.domain, false)) { + return false; + } + } + + // "The request-uri's path path-matches the cookie's path." + if (!allPaths && !pathMatch(path, c.path)) { + return false; + } + + // "If the cookie's secure-only-flag is true, then the request-uri's + // scheme must denote a "secure" protocol" + if (c.secure && !secure) { + return false; + } + + // "If the cookie's http-only-flag is true, then exclude the cookie if the + // cookie-string is being generated for a "non-HTTP" API" + if (c.httpOnly && !http) { + return false; + } + + // RFC6265bis-02 S5.3.7 + if (sameSiteLevel) { + const cookieLevel = Cookie.sameSiteLevel[c.sameSite || "none"]; + if (cookieLevel > sameSiteLevel) { + // only allow cookies at or below the request level + return false; + } + } + + // deferred from S5.3 + // non-RFC: allow retention of expired cookies by choice + if (expireCheck && c.expiryTime() <= now) { + store.removeCookie(c.domain, c.path, c.key, () => {}); // result ignored + return false; + } + + return true; + } + + store.findCookies( + host, + allPaths ? null : path, + this.allowSpecialUseDomain, + (err, cookies) => { + if (err) { + return cb(err); + } + + cookies = cookies.filter(matchingCookie); + + // sorting of S5.4 part 2 + if (options.sort !== false) { + cookies = cookies.sort(cookieCompare); + } + + // S5.4 part 3 + const now = new Date(); + for (const cookie of cookies) { + cookie.lastAccessed = now; + } + // TODO persist lastAccessed + + cb(null, cookies); + } + ); + } + + getCookieString(...args) { + const cb = args.pop(); + const next = function(err, cookies) { + if (err) { + cb(err); + } else { + cb( + null, + cookies + .sort(cookieCompare) + .map(c => c.cookieString()) + .join("; ") + ); + } + }; + args.push(next); + this.getCookies.apply(this, args); + } + + getSetCookieStrings(...args) { + const cb = args.pop(); + const next = function(err, cookies) { + if (err) { + cb(err); + } else { + cb( + null, + cookies.map(c => { + return c.toString(); + }) + ); + } + }; + args.push(next); + this.getCookies.apply(this, args); + } + + serialize(cb) { + let type = this.store.constructor.name; + if (type === "Object") { + type = null; + } + + // update README.md "Serialization Format" if you change this, please! + const serialized = { + // The version of tough-cookie that serialized this jar. Generally a good + // practice since future versions can make data import decisions based on + // known past behavior. When/if this matters, use `semver`. + version: `tough-cookie@${VERSION}`, + + // add the store type, to make humans happy: + storeType: type, + + // CookieJar configuration: + rejectPublicSuffixes: !!this.rejectPublicSuffixes, + + // this gets filled from getAllCookies: + cookies: [] + }; + + if ( + !( + this.store.getAllCookies && + typeof this.store.getAllCookies === "function" + ) + ) { + return cb( + new Error( + "store does not support getAllCookies and cannot be serialized" + ) + ); + } + + this.store.getAllCookies((err, cookies) => { + if (err) { + return cb(err); + } + + serialized.cookies = cookies.map(cookie => { + // convert to serialized 'raw' cookies + cookie = cookie instanceof Cookie ? cookie.toJSON() : cookie; + + // Remove the index so new ones get assigned during deserialization + delete cookie.creationIndex; + + return cookie; + }); + + return cb(null, serialized); + }); + } + + toJSON() { + return this.serializeSync(); + } + + // use the class method CookieJar.deserialize instead of calling this directly + _importCookies(serialized, cb) { + let cookies = serialized.cookies; + if (!cookies || !Array.isArray(cookies)) { + return cb(new Error("serialized jar has no cookies array")); + } + cookies = cookies.slice(); // do not modify the original + + const putNext = err => { + if (err) { + return cb(err); + } + + if (!cookies.length) { + return cb(err, this); + } + + let cookie; + try { + cookie = fromJSON(cookies.shift()); + } catch (e) { + return cb(e); + } + + if (cookie === null) { + return putNext(null); // skip this cookie + } + + this.store.putCookie(cookie, putNext); + }; + + putNext(); + } + + clone(newStore, cb) { + if (arguments.length === 1) { + cb = newStore; + newStore = null; + } + + this.serialize((err, serialized) => { + if (err) { + return cb(err); + } + CookieJar.deserialize(serialized, newStore, cb); + }); + } + + cloneSync(newStore) { + if (arguments.length === 0) { + return this._cloneSync(); + } + if (!newStore.synchronous) { + throw new Error( + "CookieJar clone destination store is not synchronous; use async API instead." + ); + } + return this._cloneSync(newStore); + } + + removeAllCookies(cb) { + const store = this.store; + + // Check that the store implements its own removeAllCookies(). The default + // implementation in Store will immediately call the callback with a "not + // implemented" Error. + if ( + typeof store.removeAllCookies === "function" && + store.removeAllCookies !== Store.prototype.removeAllCookies + ) { + return store.removeAllCookies(cb); + } + + store.getAllCookies((err, cookies) => { + if (err) { + return cb(err); + } + + if (cookies.length === 0) { + return cb(null); + } + + let completedCount = 0; + const removeErrors = []; + + function removeCookieCb(removeErr) { + if (removeErr) { + removeErrors.push(removeErr); + } + + completedCount++; + + if (completedCount === cookies.length) { + return cb(removeErrors.length ? removeErrors[0] : null); + } + } + + cookies.forEach(cookie => { + store.removeCookie( + cookie.domain, + cookie.path, + cookie.key, + removeCookieCb + ); + }); + }); + } + + static deserialize(strOrObj, store, cb) { + if (arguments.length !== 3) { + // store is optional + cb = store; + store = null; + } + + let serialized; + if (typeof strOrObj === "string") { + serialized = jsonParse(strOrObj); + if (serialized instanceof Error) { + return cb(serialized); + } + } else { + serialized = strOrObj; + } + + const jar = new CookieJar(store, serialized.rejectPublicSuffixes); + jar._importCookies(serialized, err => { + if (err) { + return cb(err); + } + cb(null, jar); + }); + } + + static deserializeSync(strOrObj, store) { + const serialized = + typeof strOrObj === "string" ? JSON.parse(strOrObj) : strOrObj; + const jar = new CookieJar(store, serialized.rejectPublicSuffixes); + + // catch this mistake early: + if (!jar.store.synchronous) { + throw new Error( + "CookieJar store is not synchronous; use async API instead." + ); + } + + jar._importCookiesSync(serialized); + return jar; + } +} +CookieJar.fromJSON = CookieJar.deserializeSync; + +[ + "_importCookies", + "clone", + "getCookies", + "getCookieString", + "getSetCookieStrings", + "removeAllCookies", + "serialize", + "setCookie" +].forEach(name => { + CookieJar.prototype[name] = fromCallback(CookieJar.prototype[name]); +}); +CookieJar.deserialize = fromCallback(CookieJar.deserialize); + +// Use a closure to provide a true imperative API for synchronous stores. +function syncWrap(method) { + return function(...args) { + if (!this.store.synchronous) { + throw new Error( + "CookieJar store is not synchronous; use async API instead." + ); + } + + let syncErr, syncResult; + this[method](...args, (err, result) => { + syncErr = err; + syncResult = result; + }); + + if (syncErr) { + throw syncErr; + } + return syncResult; + }; +} + +exports.version = VERSION; +exports.CookieJar = CookieJar; +exports.Cookie = Cookie; +exports.Store = Store; +exports.MemoryCookieStore = MemoryCookieStore; +exports.parseDate = parseDate; +exports.formatDate = formatDate; +exports.parse = parse; +exports.fromJSON = fromJSON; +exports.domainMatch = domainMatch; +exports.defaultPath = defaultPath; +exports.pathMatch = pathMatch; +exports.getPublicSuffix = pubsuffix.getPublicSuffix; +exports.cookieCompare = cookieCompare; +exports.permuteDomain = __webpack_require__(383).permuteDomain; +exports.permutePath = permutePath; +exports.canonicalDomain = canonicalDomain; +exports.PrefixSecurityEnum = PrefixSecurityEnum; + /***/ }), /* 893 */ @@ -53332,26 +54480,53 @@ exports.isCompatible = _makeCompatibilityCheck(version_1.VERSION); Object.defineProperty(exports, '__esModule', { value: true }); -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - var uuid = __webpack_require__(585); -var tough = __webpack_require__(701); +var util = __webpack_require__(669); +var tslib = __webpack_require__(865); +var xml2js = __webpack_require__(992); +var abortController = __webpack_require__(106); +var logger$1 = __webpack_require__(928); +var coreAuth = __webpack_require__(229); +var os = __webpack_require__(87); var http = __webpack_require__(605); var https = __webpack_require__(211); -var node_fetch = _interopDefault(__webpack_require__(454)); -var abortController = __webpack_require__(106); -var FormData = _interopDefault(__webpack_require__(790)); -var util = __webpack_require__(669); -var url = __webpack_require__(835); -var stream = __webpack_require__(794); -var logger$1 = __webpack_require__(928); +var tough = __webpack_require__(892); var tunnel = __webpack_require__(413); -var tslib = __webpack_require__(865); -var coreAuth = __webpack_require__(229); -var xml2js = __webpack_require__(992); -var os = __webpack_require__(87); +var stream = __webpack_require__(794); +var FormData = __webpack_require__(790); +var node_fetch = __webpack_require__(454); var coreTracing = __webpack_require__(263); -__webpack_require__(338); +var url = __webpack_require__(835); +__webpack_require__(97); + +function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } + +function _interopNamespace(e) { + if (e && e.__esModule) return e; + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n["default"] = e; + return Object.freeze(n); +} + +var xml2js__namespace = /*#__PURE__*/_interopNamespace(xml2js); +var os__namespace = /*#__PURE__*/_interopNamespace(os); +var http__namespace = /*#__PURE__*/_interopNamespace(http); +var https__namespace = /*#__PURE__*/_interopNamespace(https); +var tough__namespace = /*#__PURE__*/_interopNamespace(tough); +var tunnel__namespace = /*#__PURE__*/_interopNamespace(tunnel); +var FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData); +var node_fetch__default = /*#__PURE__*/_interopDefaultLegacy(node_fetch); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -53400,7 +54575,7 @@ class HttpHeaders { set(headerName, headerValue) { this._headersMap[getHeaderKey(headerName)] = { name: headerName, - value: headerValue.toString() + value: headerValue.toString(), }; } /** @@ -53432,12 +54607,7 @@ class HttpHeaders { * Get the headers that are contained this collection as an object. */ rawHeaders() { - const result = {}; - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - result[header.name.toLowerCase()] = header.value; - } - return result; + return this.toJson({ preserveCase: true }); } /** * Get the headers that are contained in this collection as an array. @@ -53474,14 +54644,27 @@ class HttpHeaders { /** * Get the JSON object representation of this HTTP header collection. */ - toJson() { - return this.rawHeaders(); + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } + } + else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } + } + return result; } /** * Get the string representation of this HTTP header collection. */ toString() { - return JSON.stringify(this.toJson()); + return JSON.stringify(this.toJson({ preserveCase: true })); } /** * Create a deep clone/copy of this HttpHeaders collection. @@ -53525,11 +54708,14 @@ function decodeString(value) { // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +/** + * A set of constants used internally when processing requests. + */ const Constants = { /** * The core-http version */ - coreHttpVersion: "2.2.2", + coreHttpVersion: "2.2.4", /** * Specifies HTTP. */ @@ -53565,12 +54751,12 @@ const Constants = { POST: "POST", MERGE: "MERGE", HEAD: "HEAD", - PATCH: "PATCH" + PATCH: "PATCH", }, StatusCodes: { TooManyRequests: 429, - ServiceUnavailable: 503 - } + ServiceUnavailable: 503, + }, }, /** * Defines constants for use with HTTP headers. @@ -53590,8 +54776,8 @@ const Constants = { /** * The UserAgent header. */ - USER_AGENT: "User-Agent" - } + USER_AGENT: "User-Agent", + }, }; // Copyright (c) Microsoft Corporation. @@ -53806,18 +54992,38 @@ function isObject(input) { } // Copyright (c) Microsoft Corporation. +// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications. +/** + * Used to map raw response objects to final shapes. + * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. + * Also allows pulling values from headers, as well as inserting default values and constants. + */ class Serializer { - constructor(modelMappers = {}, isXML) { + constructor( + /** + * The provided model mapper. + */ + modelMappers = {}, + /** + * Whether the contents are XML or not. + */ + isXML) { this.modelMappers = modelMappers; this.isXML = isXML; } + /** + * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. + * @param mapper - The definition of data models. + * @param value - The value. + * @param objectName - Name of the object. Used in the error messages. + */ validateConstraints(mapper, value, objectName) { const failValidation = (constraintName, constraintValue) => { throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); }; if (mapper.constraints && value != undefined) { const valueAsNumber = value; - const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems } = mapper.constraints; + const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) { failValidation("ExclusiveMaximum", ExclusiveMaximum); } @@ -53859,20 +55065,20 @@ class Serializer { } } /** - * Serialize the given object based on its metadata defined in the mapper + * Serialize the given object based on its metadata defined in the mapper. * - * @param mapper - The mapper which defines the metadata of the serializable object - * @param object - A valid Javascript object to be serialized - * @param objectName - Name of the serialized object - * @param options - additional options to deserialization - * @returns A valid serialized Javascript object + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param object - A valid Javascript object to be serialized. + * @param objectName - Name of the serialized object. + * @param options - additional options to deserialization. + * @returns A valid serialized Javascript object. */ serialize(mapper, object, objectName, options = {}) { var _a, _b, _c; const updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, }; let payload = {}; const mapperType = mapper.type.name; @@ -53942,20 +55148,20 @@ class Serializer { return payload; } /** - * Deserialize the given object based on its metadata defined in the mapper + * Deserialize the given object based on its metadata defined in the mapper. * - * @param mapper - The mapper which defines the metadata of the serializable object - * @param responseBody - A valid Javascript entity to be deserialized - * @param objectName - Name of the deserialized object + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param responseBody - A valid Javascript entity to be deserialized. + * @param objectName - Name of the deserialized object. * @param options - Controls behavior of XML parser and builder. - * @returns A valid deserialized Javascript object + * @returns A valid deserialized Javascript object. */ deserialize(mapper, responseBody, objectName, options = {}) { var _a, _b, _c; const updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, }; if (responseBody == undefined) { if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { @@ -54054,9 +55260,7 @@ function bufferToBase64Url(buffer) { // Uint8Array to Base64. const str = encodeByteArray(buffer); // Base64 to Base64Url. - return trimEnd(str, "=") - .replace(/\+/g, "-") - .replace(/\//g, "_"); + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); } function base64UrlToByteArray(str) { if (!str) { @@ -54272,10 +55476,10 @@ function serializeDictionaryType(serializer, mapper, object, objectName, isXml, return tempDictionary; } /** - * Resolves the additionalProperties property from a referenced mapper - * @param serializer - The serializer containing the entire set of mappers - * @param mapper - The composite mapper to resolve - * @param objectName - Name of the object being serialized + * Resolves the additionalProperties property from a referenced mapper. + * @param serializer - The serializer containing the entire set of mappers. + * @param mapper - The composite mapper to resolve. + * @param objectName - Name of the object being serialized. */ function resolveAdditionalProperties(serializer, mapper, objectName) { const additionalProperties = mapper.type.additionalProperties; @@ -54286,7 +55490,7 @@ function resolveAdditionalProperties(serializer, mapper, objectName) { return additionalProperties; } /** - * Finds the mapper referenced by className + * Finds the mapper referenced by `className`. * @param serializer - The serializer containing the entire set of mappers * @param mapper - The composite mapper to resolve * @param objectName - Name of the object being serialized @@ -54625,7 +55829,9 @@ function getPolymorphicDiscriminatorSafely(serializer, typeName) { serializer.modelMappers[typeName] && serializer.modelMappers[typeName].type.polymorphicDiscriminator); } -// TODO: why is this here? +/** + * Utility function that serializes an object that might contain binary information into a plain object, array or a string. + */ function serializeObject(toSerialize) { const castToSerialize = toSerialize; if (toSerialize == undefined) @@ -54663,6 +55869,9 @@ function strEnum(o) { } return result; } +/** + * String enum containing the string types of property mappers. + */ // eslint-disable-next-line @typescript-eslint/no-redeclare const MapperType = strEnum([ "Base64Url", @@ -54680,7 +55889,7 @@ const MapperType = strEnum([ "String", "Stream", "TimeSpan", - "UnixTime" + "UnixTime", ]); // Copyright (c) Microsoft Corporation. @@ -54943,9 +56152,6 @@ class WebResource { } } -// Copyright (c) Microsoft Corporation. -const custom = util.inspect.custom; - // Copyright (c) Microsoft Corporation. /** * A class that handles the query portion of a URLBuilder. @@ -55243,6 +56449,10 @@ class URLBuilder { } } } + /** + * Serializes the URL as a string. + * @returns the URL as a string. + */ toString() { let result = ""; if (this._scheme) { @@ -55278,6 +56488,9 @@ class URLBuilder { this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); } } + /** + * Parses a given string URL into a new {@link URLBuilder}. + */ static parse(text) { const result = new URLBuilder(); result.set(text, "SCHEME_OR_HOST"); @@ -55534,6 +56747,60 @@ function nextQuery(tokenizer) { tokenizer._currentState = "DONE"; } +// Copyright (c) Microsoft Corporation. +function createProxyAgent(requestUrl, proxySettings, headers) { + const host = URLBuilder.parse(proxySettings.host).getHost(); + if (!host) { + throw new Error("Expecting a non-empty host in proxy settings."); + } + if (!isValidPort(proxySettings.port)) { + throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); + } + const tunnelOptions = { + proxy: { + host: host, + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; + } + else if (proxySettings.username) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; + } + const isRequestHttps = isUrlHttps(requestUrl); + const isProxyHttps = isUrlHttps(proxySettings.host); + const proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + return proxyAgent; +} +function isUrlHttps(url) { + const urlScheme = URLBuilder.parse(url).getScheme() || ""; + return urlScheme.toLowerCase() === "https"; +} +function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { + if (isRequestHttps && isProxyHttps) { + return tunnel__namespace.httpsOverHttps(tunnelOptions); + } + else if (isRequestHttps && !isProxyHttps) { + return tunnel__namespace.httpsOverHttp(tunnelOptions); + } + else if (!isRequestHttps && isProxyHttps) { + return tunnel__namespace.httpOverHttps(tunnelOptions); + } + else { + return tunnel__namespace.httpOverHttp(tunnelOptions); + } +} +function isValidPort(port) { + // any port in 0-65535 range is valid (RFC 793) even though almost all implementations + // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports + return 0 <= port && port <= 65535; +} + // Copyright (c) Microsoft Corporation. const RedactedString = "REDACTED"; const defaultAllowedHeaderNames = [ @@ -55574,7 +56841,7 @@ const defaultAllowedHeaderNames = [ "Retry-After", "Server", "Transfer-Encoding", - "User-Agent" + "User-Agent", ]; const defaultAllowedQueryParameters = ["api-version"]; class Sanitizer { @@ -55667,8 +56934,14 @@ class Sanitizer { } } +// Copyright (c) Microsoft Corporation. +const custom = util.inspect.custom; + // Copyright (c) Microsoft Corporation. const errorSanitizer = new Sanitizer(); +/** + * An error resulting from an HTTP request to a service endpoint. + */ class RestError extends Error { constructor(message, code, statusCode, request, response) { super(message); @@ -55686,13 +56959,22 @@ class RestError extends Error { return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; } } +/** + * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) + */ RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +/** + * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. + */ RestError.PARSE_ERROR = "PARSE_ERROR"; // Copyright (c) Microsoft Corporation. const logger = logger$1.createClientLogger("core-http"); // Copyright (c) Microsoft Corporation. +function getCachedAgent(isHttps, agentCache) { + return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; +} class ReportTransform extends stream.Transform { constructor(progressCallback) { super(); @@ -55706,7 +56988,44 @@ class ReportTransform extends stream.Transform { callback(undefined); } } -class FetchHttpClient { +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream, aborter) { + return new Promise((resolve) => { + stream.once("close", () => { + aborter === null || aborter === void 0 ? void 0 : aborter.abort(); + resolve(); + }); + stream.once("end", resolve); + stream.once("error", resolve); + }); +} +/** + * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} + */ +function parseHeaders(headers) { + const httpHeaders = new HttpHeaders(); + headers.forEach((value, key) => { + httpHeaders.set(key, value); + }); + return httpHeaders; +} +/** + * An HTTP client that uses `node-fetch`. + */ +class NodeFetchHttpClient { + constructor() { + // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent + this.proxyAgentMap = new Map(); + this.keepAliveAgents = {}; + this.cookieJar = new tough__namespace.CookieJar(undefined, { looseMode: true }); + } + /** + * Provides minimum viable error handling and the logic that executes the abstract methods. + * @param httpRequest - Object representing the outgoing HTTP request. + * @returns An object representing the incoming HTTP response. + */ async sendRequest(httpRequest) { var _a; if (!httpRequest && typeof httpRequest !== "object") { @@ -55732,7 +57051,7 @@ class FetchHttpClient { } if (httpRequest.formData) { const formData = httpRequest.formData; - const requestForm = new FormData(); + const requestForm = new FormData__default["default"](); const appendFormValue = (key, value) => { // value function probably returns a stream so we can provide a fresh stream on each retry if (typeof value === "function") { @@ -55802,7 +57121,7 @@ class FetchHttpClient { readableStreamBody: streaming ? response.body : undefined, - bodyAsText: !streaming ? await response.text() : undefined + bodyAsText: !streaming ? await response.text() : undefined, }; const onDownloadProgress = httpRequest.onDownloadProgress; if (onDownloadProgress) { @@ -55856,94 +57175,6 @@ class FetchHttpClient { } } } -} -function isReadableStream(body) { - return body && typeof body.pipe === "function"; -} -function isStreamComplete(stream, aborter) { - return new Promise((resolve) => { - stream.once("close", () => { - aborter === null || aborter === void 0 ? void 0 : aborter.abort(); - resolve(); - }); - stream.once("end", resolve); - stream.once("error", resolve); - }); -} -function parseHeaders(headers) { - const httpHeaders = new HttpHeaders(); - headers.forEach((value, key) => { - httpHeaders.set(key, value); - }); - return httpHeaders; -} - -// Copyright (c) Microsoft Corporation. -function createProxyAgent(requestUrl, proxySettings, headers) { - const host = URLBuilder.parse(proxySettings.host).getHost(); - if (!host) { - throw new Error("Expecting a non-empty host in proxy settings."); - } - if (!isValidPort(proxySettings.port)) { - throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); - } - const tunnelOptions = { - proxy: { - host: host, - port: proxySettings.port, - headers: (headers && headers.rawHeaders()) || {} - } - }; - if (proxySettings.username && proxySettings.password) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; - } - else if (proxySettings.username) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; - } - const isRequestHttps = isUrlHttps(requestUrl); - const isProxyHttps = isUrlHttps(proxySettings.host); - const proxyAgent = { - isHttps: isRequestHttps, - agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) - }; - return proxyAgent; -} -function isUrlHttps(url) { - const urlScheme = URLBuilder.parse(url).getScheme() || ""; - return urlScheme.toLowerCase() === "https"; -} -function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { - if (isRequestHttps && isProxyHttps) { - return tunnel.httpsOverHttps(tunnelOptions); - } - else if (isRequestHttps && !isProxyHttps) { - return tunnel.httpsOverHttp(tunnelOptions); - } - else if (!isRequestHttps && isProxyHttps) { - return tunnel.httpOverHttps(tunnelOptions); - } - else { - return tunnel.httpOverHttp(tunnelOptions); - } -} -function isValidPort(port) { - // any port in 0-65535 range is valid (RFC 793) even though almost all implementations - // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports - return 0 <= port && port <= 65535; -} - -// Copyright (c) Microsoft Corporation. -function getCachedAgent(isHttps, agentCache) { - return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; -} -class NodeFetchHttpClient extends FetchHttpClient { - constructor() { - super(...arguments); - // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent - this.proxyAgentMap = new Map(); - this.keepAliveAgents = {}; - this.cookieJar = new tough.CookieJar(undefined, { looseMode: true }); - } getOrCreateAgent(httpRequest) { var _a; const isHttps = isUrlHttps(httpRequest.url); @@ -55975,24 +57206,30 @@ class NodeFetchHttpClient extends FetchHttpClient { return agent; } const agentOptions = { - keepAlive: httpRequest.keepAlive + keepAlive: httpRequest.keepAlive, }; if (isHttps) { - agent = this.keepAliveAgents.httpsAgent = new https.Agent(agentOptions); + agent = this.keepAliveAgents.httpsAgent = new https__namespace.Agent(agentOptions); } else { - agent = this.keepAliveAgents.httpAgent = new http.Agent(agentOptions); + agent = this.keepAliveAgents.httpAgent = new http__namespace.Agent(agentOptions); } return agent; } else { - return isHttps ? https.globalAgent : http.globalAgent; + return isHttps ? https__namespace.globalAgent : http__namespace.globalAgent; } } + /** + * Uses `node-fetch` to perform the request. + */ // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs async fetch(input, init) { - return node_fetch(input, init); + return node_fetch__default["default"](input, init); } + /** + * Prepares a request based on the provided web resource. + */ async prepareRequest(httpRequest) { const requestInit = {}; if (this.cookieJar && !httpRequest.headers.get("Cookie")) { @@ -56013,6 +57250,9 @@ class NodeFetchHttpClient extends FetchHttpClient { requestInit.compress = httpRequest.decompressResponse; return requestInit; } + /** + * Process an HTTP response. Handles persisting a cookie for subsequent requests if the response has a "Set-Cookie" header. + */ async processRequest(operationResponse) { if (this.cookieJar) { const setCookieHeader = operationResponse.headers.get("Set-Cookie"); @@ -56033,6 +57273,11 @@ class NodeFetchHttpClient extends FetchHttpClient { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +exports.HttpPipelineLogLevel = void 0; (function (HttpPipelineLogLevel) { /** * A log level that indicates that no logs will be logged. @@ -56052,6 +57297,7 @@ class NodeFetchHttpClient extends FetchHttpClient { HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; })(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {})); +// Copyright (c) Microsoft Corporation. /** * Converts an OperationOptions to a RequestOptionsBase * @@ -56073,8 +57319,22 @@ function operationOptionsToRequestOptionsBase(opts) { } // Copyright (c) Microsoft Corporation. +/** + * The base class from which all request policies derive. + */ class BaseRequestPolicy { - constructor(_nextPolicy, _options) { + /** + * The main method to implement that manipulates a request/response. + */ + constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options) { this._nextPolicy = _nextPolicy; this._options = _options; } @@ -56126,113 +57386,6 @@ class RequestPolicyOptions { } } -// Copyright (c) Microsoft Corporation. -function logPolicy(loggingOptions = {}) { - return { - create: (nextPolicy, options) => { - return new LogPolicy(nextPolicy, options, loggingOptions); - } - }; -} -class LogPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, { logger: logger$1 = logger.info, allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { - super(nextPolicy, options); - this.logger = logger$1; - this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - get allowedHeaderNames() { - return this.sanitizer.allowedHeaderNames; - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - set allowedHeaderNames(allowedHeaderNames) { - this.sanitizer.allowedHeaderNames = allowedHeaderNames; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - get allowedQueryParameters() { - return this.sanitizer.allowedQueryParameters; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - set allowedQueryParameters(allowedQueryParameters) { - this.sanitizer.allowedQueryParameters = allowedQueryParameters; - } - sendRequest(request) { - if (!this.logger.enabled) - return this._nextPolicy.sendRequest(request); - this.logRequest(request); - return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); - } - logRequest(request) { - this.logger(`Request: ${this.sanitizer.sanitize(request)}`); - } - logResponse(response) { - this.logger(`Response status code: ${response.status}`); - this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); - return response; - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Get the path to this parameter's value as a dotted string (a.b.c). - * @param parameter - The parameter to get the path string for. - * @returns The path to this parameter's value as a dotted string. - */ -function getPathStringFromParameter(parameter) { - return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); -} -function getPathStringFromParameterPath(parameterPath, mapper) { - let result; - if (typeof parameterPath === "string") { - result = parameterPath; - } - else if (Array.isArray(parameterPath)) { - result = parameterPath.join("."); - } - else { - result = mapper.serializedName; - } - return result; -} - -// Copyright (c) Microsoft Corporation. -/** - * Gets the list of status codes for streaming responses. - * @internal - */ -function getStreamResponseStatusCodes(operationSpec) { - const result = new Set(); - for (const statusCode in operationSpec.responses) { - const operationResponse = operationSpec.responses[statusCode]; - if (operationResponse.bodyMapper && - operationResponse.bodyMapper.type.name === MapperType.Stream) { - result.add(Number(statusCode)); - } - } - return result; -} - // Copyright (c) Microsoft Corporation. // Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed // by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536 @@ -56265,18 +57418,18 @@ const xml2jsDefaultOptionsV2 = { xmldec: { version: "1.0", encoding: "UTF-8", - standalone: true + standalone: true, }, doctype: undefined, renderOpts: { pretty: true, indent: " ", - newline: "\n" + newline: "\n", }, headless: false, chunkSize: 10000, emptyTag: "", - cdata: false + cdata: false, }; // The xml2js settings for general XML parsing operations. const xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); @@ -56285,7 +57438,7 @@ xml2jsParserSettings.explicitArray = false; const xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); xml2jsBuilderSettings.explicitArray = false; xml2jsBuilderSettings.renderOpts = { - pretty: false + pretty: false, }; /** * Converts given JSON object to XML string @@ -56296,7 +57449,7 @@ function stringifyXML(obj, opts = {}) { var _a; xml2jsBuilderSettings.rootName = opts.rootName; xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const builder = new xml2js.Builder(xml2jsBuilderSettings); + const builder = new xml2js__namespace.Builder(xml2jsBuilderSettings); return builder.buildObject(obj); } /** @@ -56308,7 +57461,7 @@ function parseXML(str, opts = {}) { var _a; xml2jsParserSettings.explicitRoot = !!opts.includeRoot; xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const xmlParser = new xml2js.Parser(xml2jsParserSettings); + const xmlParser = new xml2js__namespace.Parser(xml2jsParserSettings); return new Promise((resolve, reject) => { if (!str) { reject(new Error("Document is empty")); @@ -56335,7 +57488,7 @@ function deserializationPolicy(deserializationContentTypes, parsingOptions) { return { create: (nextPolicy, options) => { return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions); - } + }, }; } const defaultJsonContentTypes = ["application/json", "text/json"]; @@ -56343,8 +57496,8 @@ const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; const DefaultDeserializationOptions = { expectedContentTypes: { json: defaultJsonContentTypes, - xml: defaultXmlContentTypes - } + xml: defaultXmlContentTypes, + }, }; /** * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the @@ -56362,7 +57515,7 @@ class DeserializationPolicy extends BaseRequestPolicy { } async sendRequest(request) { return this._nextPolicy.sendRequest(request).then((response) => deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, { - xmlCharKey: this.xmlCharKey + xmlCharKey: this.xmlCharKey, })); } } @@ -56395,12 +57548,20 @@ function shouldDeserializeResponse(parsedResponse) { } return result; } +/** + * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. + * @param jsonContentTypes - Response content types to parse the body as JSON. + * @param xmlContentTypes - Response content types to parse the body as XML. + * @param response - HTTP Response from the pipeline. + * @param options - Options to the serializer, mostly for configuring the XML parser if needed. + * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. + */ function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options = {}) { var _a, _b, _c; const updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, }; return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { if (!shouldDeserializeResponse(parsedResponse)) { @@ -56551,6 +57712,113 @@ function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { return Promise.resolve(operationResponse); } +// Copyright (c) Microsoft Corporation. +/** + * By default, HTTP connections are maintained for future requests. + */ +const DefaultKeepAliveOptions = { + enable: true, +}; +/** + * Creates a policy that controls whether HTTP connections are maintained on future requests. + * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. + * @returns An instance of the {@link KeepAlivePolicy} + */ +function keepAlivePolicy(keepAliveOptions) { + return { + create: (nextPolicy, options) => { + return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); + }, + }; +} +/** + * KeepAlivePolicy is a policy used to control keep alive settings for every request. + */ +class KeepAlivePolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy - + * @param options - + * @param keepAliveOptions - + */ + constructor(nextPolicy, options, keepAliveOptions) { + super(nextPolicy, options); + this.keepAliveOptions = keepAliveOptions; + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.keepAlive = this.keepAliveOptions.enable; + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Methods that are allowed to follow redirects 301 and 302 + */ +const allowedRedirect = ["GET", "HEAD"]; +const DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; +/** + * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + * @param maximumRetries - Maximum number of redirects to follow. + * @returns An instance of the {@link RedirectPolicy} + */ +function redirectPolicy(maximumRetries = 20) { + return { + create: (nextPolicy, options) => { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} +/** + * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + */ +class RedirectPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, maxRetries = 20) { + super(nextPolicy, options); + this.maxRetries = maxRetries; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request) + .then((response) => handleRedirect(this, response, 0)); + } +} +function handleRedirect(policy, response, currentRetries) { + const { request, status } = response; + const locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && allowedRedirect.includes(request.method)) || + (status === 302 && allowedRedirect.includes(request.method)) || + (status === 303 && request.method === "POST") || + status === 307) && + (!policy.maxRetries || currentRetries < policy.maxRetries)) { + const builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + if (status === 303) { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then((res) => handleRedirect(policy, res, currentRetries + 1)); + } + return Promise.resolve(response); +} + // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. const DEFAULT_CLIENT_RETRY_COUNT = 3; @@ -56614,7 +57882,7 @@ function isDefined(thing) { } // Copyright (c) Microsoft Corporation. -const StandardAbortMessage = "The operation was aborted."; +const StandardAbortMessage$1 = "The operation was aborted."; /** * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. * @param delayInMs - The number of milliseconds to be delayed. @@ -56629,7 +57897,7 @@ function delay(delayInMs, value, options) { let timer = undefined; let onAborted = undefined; const rejectOnAbort = () => { - return reject(new abortController.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage)); + return reject(new abortController.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage$1)); }; const removeListeners = () => { if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { @@ -56657,20 +57925,34 @@ function delay(delayInMs, value, options) { } // Copyright (c) Microsoft Corporation. +/** + * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. + * @param retryCount - Maximum number of retries. + * @param retryInterval - Base time between retries. + * @param maxRetryInterval - Maximum time to wait between retries. + */ function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { return { create: (nextPolicy, options) => { return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval); - } + }, }; } +/** + * Describes the Retry Mode type. Currently supporting only Exponential. + */ +exports.RetryMode = void 0; (function (RetryMode) { + /** + * Currently supported retry mode. + * Each time a retry happens, it will take exponentially more time than the last time. + */ RetryMode[RetryMode["Exponential"] = 0] = "Exponential"; })(exports.RetryMode || (exports.RetryMode = {})); const DefaultRetryOptions = { maxRetries: DEFAULT_CLIENT_RETRY_COUNT, retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL, - maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL + maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL, }; /** * Instantiates a new "ExponentialRetryPolicyFilter" instance. @@ -56695,11 +57977,11 @@ class ExponentialRetryPolicy extends BaseRequestPolicy { sendRequest(request) { return this._nextPolicy .sendRequest(request.clone()) - .then((response) => retry(this, request, response)) - .catch((error) => retry(this, request, error.response, undefined, error)); + .then((response) => retry$1(this, request, response)) + .catch((error) => retry$1(this, request, error.response, undefined, error)); } } -async function retry(policy, request, response, retryData, requestError) { +async function retry$1(policy, request, response, retryData, requestError) { function shouldPolicyRetry(responseParam) { const statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; if (statusCode === 503 && (response === null || response === void 0 ? void 0 : response.headers.get(Constants.HeaderConstants.RETRY_AFTER))) { @@ -56716,7 +57998,7 @@ async function retry(policy, request, response, retryData, requestError) { retryData = updateRetryData({ retryInterval: policy.retryInterval, minRetryInterval: 0, - maxRetryInterval: policy.maxRetryInterval + maxRetryInterval: policy.maxRetryInterval, }, retryData, requestError); const isAborted = request.abortSignal && request.abortSignal.aborted; if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) { @@ -56724,10 +58006,10 @@ async function retry(policy, request, response, retryData, requestError) { try { await delay(retryData.retryInterval); const res = await policy._nextPolicy.sendRequest(request.clone()); - return retry(policy, request, res, retryData); + return retry$1(policy, request, res, retryData); } catch (err) { - return retry(policy, request, response, retryData, err); + return retry$1(policy, request, response, retryData, err); } } else if (isAborted || requestError || !response) { @@ -56742,11 +58024,467 @@ async function retry(policy, request, response, retryData, requestError) { } // Copyright (c) Microsoft Corporation. +/** + * Creates a policy that logs information about the outgoing request and the incoming responses. + * @param loggingOptions - Logging options. + * @returns An instance of the {@link LogPolicy} + */ +function logPolicy(loggingOptions = {}) { + return { + create: (nextPolicy, options) => { + return new LogPolicy(nextPolicy, options, loggingOptions); + }, + }; +} +/** + * A policy that logs information about the outgoing request and the incoming responses. + */ +class LogPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, { logger: logger$1 = logger.info, allowedHeaderNames = [], allowedQueryParameters = [], } = {}) { + super(nextPolicy, options); + this.logger = logger$1; + this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + get allowedHeaderNames() { + return this.sanitizer.allowedHeaderNames; + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + set allowedHeaderNames(allowedHeaderNames) { + this.sanitizer.allowedHeaderNames = allowedHeaderNames; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + get allowedQueryParameters() { + return this.sanitizer.allowedQueryParameters; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + set allowedQueryParameters(allowedQueryParameters) { + this.sanitizer.allowedQueryParameters = allowedQueryParameters; + } + sendRequest(request) { + if (!this.logger.enabled) + return this._nextPolicy.sendRequest(request); + this.logRequest(request); + return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); + } + logRequest(request) { + this.logger(`Request: ${this.sanitizer.sanitize(request)}`); + } + logResponse(response) { + this.logger(`Response status code: ${response.status}`); + this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); + return response; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} +function getPathStringFromParameterPath(parameterPath, mapper) { + let result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} + +// Copyright (c) Microsoft Corporation. +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +function getStreamResponseStatusCodes(operationSpec) { + const result = new Set(); + for (const statusCode in operationSpec.responses) { + const operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result.add(Number(statusCode)); + } + } + return result; +} + +// Copyright (c) Microsoft Corporation. +function getDefaultUserAgentKey() { + return Constants.HeaderConstants.USER_AGENT; +} +function getPlatformSpecificData() { + const runtimeInfo = { + key: "Node", + value: process.version, + }; + const osInfo = { + key: "OS", + value: `(${os__namespace.arch()}-${os__namespace.type()}-${os__namespace.release()})`, + }; + return [runtimeInfo, osInfo]; +} + +// Copyright (c) Microsoft Corporation. +function getRuntimeInfo() { + const msRestRuntime = { + key: "core-http", + value: Constants.coreHttpVersion, + }; + return [msRestRuntime]; +} +function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { + return telemetryInfo + .map((info) => { + const value = info.value ? `${valueSeparator}${info.value}` : ""; + return `${info.key}${value}`; + }) + .join(keySeparator); +} +const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +/** + * The default approach to generate user agents. + * Uses static information from this package, plus system information available from the runtime. + */ +function getDefaultUserAgentValue() { + const runtimeInfo = getRuntimeInfo(); + const platformSpecificData = getPlatformSpecificData(); + const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} +/** + * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + * @param userAgentData - Telemetry information. + * @returns A new {@link UserAgentPolicy}. + */ +function userAgentPolicy(userAgentData) { + const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null + ? getDefaultUserAgentKey() + : userAgentData.key; + const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: (nextPolicy, options) => { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} +/** + * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + */ +class UserAgentPolicy extends BaseRequestPolicy { + constructor(_nextPolicy, _options, headerKey, headerValue) { + super(_nextPolicy, _options); + this._nextPolicy = _nextPolicy; + this._options = _options; + this.headerKey = headerKey; + this.headerValue = headerValue; + } + sendRequest(request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + } + /** + * Adds the user agent header to the outgoing request. + */ + addUserAgentHeader(request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +exports.QueryCollectionFormat = void 0; +(function (QueryCollectionFormat) { + /** + * CSV: Each pair of segments joined by a single comma. + */ + QueryCollectionFormat["Csv"] = ","; + /** + * SSV: Each pair of segments joined by a single space character. + */ + QueryCollectionFormat["Ssv"] = " "; + /** + * TSV: Each pair of segments joined by a single tab character. + */ + QueryCollectionFormat["Tsv"] = "\t"; + /** + * Pipes: Each pair of segments joined by a single pipe character. + */ + QueryCollectionFormat["Pipes"] = "|"; + /** + * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` + */ + QueryCollectionFormat["Multi"] = "Multi"; +})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); + +// Copyright (c) Microsoft Corporation. +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +function bearerTokenAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + const getToken = createTokenCycler(credential, scopes /* , options */); + class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const { token } = await getToken({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + }); + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return { + create: (nextPolicy, options) => { + return new BearerTokenAuthenticationPolicy(nextPolicy, options); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +/** + * Returns a request policy factory that can be used to create an instance of + * {@link DisableResponseDecompressionPolicy}. + */ +function disableResponseDecompressionPolicy() { + return { + create: (nextPolicy, options) => { + return new DisableResponseDecompressionPolicy(nextPolicy, options); + }, + }; +} +/** + * A policy to disable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + /** + * Creates an instance of DisableResponseDecompressionPolicy. + * + * @param nextPolicy - + * @param options - + */ + // The parent constructor is protected. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.decompressResponse = false; + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Creates a policy that assigns a unique request id to outgoing requests. + * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. + */ function generateClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { return { create: (nextPolicy, options) => { return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); - } + }, }; } class GenerateClientRequestIdPolicy extends BaseRequestPolicy { @@ -56763,130 +58501,190 @@ class GenerateClientRequestIdPolicy extends BaseRequestPolicy { } // Copyright (c) Microsoft Corporation. -function getDefaultUserAgentKey() { - return Constants.HeaderConstants.USER_AGENT; -} -function getPlatformSpecificData() { - const runtimeInfo = { - key: "Node", - value: process.version - }; - const osInfo = { - key: "OS", - value: `(${os.arch()}-${os.type()}-${os.release()})` - }; - return [runtimeInfo, osInfo]; +let cachedHttpClient; +function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = new NodeFetchHttpClient(); + } + return cachedHttpClient; } // Copyright (c) Microsoft Corporation. -function getRuntimeInfo() { - const msRestRuntime = { - key: "core-http", - value: Constants.coreHttpVersion - }; - return [msRestRuntime]; -} -function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { - return telemetryInfo - .map((info) => { - const value = info.value ? `${valueSeparator}${info.value}` : ""; - return `${info.key}${value}`; - }) - .join(keySeparator); -} -const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; -function getDefaultUserAgentValue() { - const runtimeInfo = getRuntimeInfo(); - const platformSpecificData = getPlatformSpecificData(); - const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); - return userAgent; -} -function userAgentPolicy(userAgentData) { - const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null - ? getDefaultUserAgentKey() - : userAgentData.key; - const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null - ? getDefaultUserAgentValue() - : userAgentData.value; +function ndJsonPolicy() { return { create: (nextPolicy, options) => { - return new UserAgentPolicy(nextPolicy, options, key, value); - } + return new NdJsonPolicy(nextPolicy, options); + }, }; } -class UserAgentPolicy extends BaseRequestPolicy { - constructor(_nextPolicy, _options, headerKey, headerValue) { - super(_nextPolicy, _options); - this._nextPolicy = _nextPolicy; - this._options = _options; - this.headerKey = headerKey; - this.headerValue = headerValue; +/** + * NdJsonPolicy that formats a JSON array as newline-delimited JSON + */ +class NdJsonPolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + */ + constructor(nextPolicy, options) { + super(nextPolicy, options); } - sendRequest(request) { - this.addUserAgentHeader(request); + /** + * Sends a request. + */ + async sendRequest(request) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + const body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } + } return this._nextPolicy.sendRequest(request); } - addUserAgentHeader(request) { - if (!request.headers) { - request.headers = new HttpHeaders(); - } - if (!request.headers.get(this.headerKey) && this.headerValue) { - request.headers.set(this.headerKey, this.headerValue); - } - } } // Copyright (c) Microsoft Corporation. /** - * Methods that are allowed to follow redirects 301 and 302 + * Stores the patterns specified in NO_PROXY environment variable. + * @internal */ -const allowedRedirect = ["GET", "HEAD"]; -const DefaultRedirectOptions = { - handleRedirects: true, - maxRetries: 20 -}; -function redirectPolicy(maximumRetries = 20) { - return { - create: (nextPolicy, options) => { - return new RedirectPolicy(nextPolicy, options, maximumRetries); +const globalNoProxyList = []; +let noProxyListLoaded = false; +/** A cache of whether a host should bypass the proxy. */ +const globalBypassedMap = new Map(); +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + const allProxy = getEnvironmentValue(Constants.ALL_PROXY); + const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +/** + * Check whether the host of a given `uri` matches any pattern in the no proxy list. + * If there's a match, any request sent to the same host shouldn't have the proxy settings set. + * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 + */ +function isBypassed(uri, noProxyList, bypassedMap) { + if (noProxyList.length === 0) { + return false; + } + const host = URLBuilder.parse(uri).getHost(); + if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { + return bypassedMap.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + return isBypassedFlag; +} +/** + * @internal + */ +function loadNoProxy() { + const noProxy = getEnvironmentValue(Constants.NO_PROXY); + noProxyListLoaded = true; + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + return []; +} +/** + * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. + * @param proxyUrl - URL of the proxy + * @returns The default proxy settings, or undefined. + */ +function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); + const parsedUrl = URLBuilder.parse(urlWithoutAuth); + const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username, + password, }; } -class RedirectPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, maxRetries = 20) { +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +function proxyPolicy(proxySettings, options) { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + if (!noProxyListLoaded) { + globalNoProxyList.push(...loadNoProxy()); + } + return { + create: (nextPolicy, requestPolicyOptions) => { + return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); + }, + }; +} +function extractAuthFromUrl(url) { + const atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + const schemeIndex = url.indexOf("://"); + const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + const auth = url.substring(authStart, atIndex); + const colonIndex = auth.indexOf(":"); + const hasPassword = colonIndex !== -1; + const username = hasPassword ? auth.substring(0, colonIndex) : auth; + const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username, + password, + urlWithoutAuth, + }; +} +class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, proxySettings, customNoProxyList) { super(nextPolicy, options); - this.maxRetries = maxRetries; + this.proxySettings = proxySettings; + this.customNoProxyList = customNoProxyList; } sendRequest(request) { - return this._nextPolicy - .sendRequest(request) - .then((response) => handleRedirect(this, response, 0)); - } -} -function handleRedirect(policy, response, currentRetries) { - const { request, status } = response; - const locationHeader = response.headers.get("location"); - if (locationHeader && - (status === 300 || - (status === 301 && allowedRedirect.includes(request.method)) || - (status === 302 && allowedRedirect.includes(request.method)) || - (status === 303 && request.method === "POST") || - status === 307) && - (!policy.maxRetries || currentRetries < policy.maxRetries)) { - const builder = URLBuilder.parse(request.url); - builder.setPath(locationHeader); - request.url = builder.toString(); - // POST request with Status code 303 should be converted into a - // redirected GET request if the redirect url is present in the location header - if (status === 303) { - request.method = "GET"; - delete request.body; + var _a; + if (!request.proxySettings && + !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { + request.proxySettings = this.proxySettings; } - return policy._nextPolicy - .sendRequest(request) - .then((res) => handleRedirect(policy, res, currentRetries + 1)); + return this._nextPolicy.sendRequest(request); } - return Promise.resolve(response); } // Copyright (c) Microsoft Corporation. @@ -56894,7 +58692,7 @@ function rpRegistrationPolicy(retryTimeout = 30) { return { create: (nextPolicy, options) => { return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); - } + }, }; } class RPRegistrationPolicy extends BaseRequestPolicy { @@ -57039,193 +58837,52 @@ async function getRegistrationStatus(policy, url, originalRequest) { } // Copyright (c) Microsoft Corporation. -// Default options for the cycler if none are provided -const DEFAULT_CYCLER_OPTIONS = { - forcedRefreshWindowInMs: 1000, - retryIntervalInMs: 3000, - refreshWindowInMs: 1000 * 60 * 2 // Start refreshing 2m before expiry -}; /** - * Converts an an unreliable access token getter (which may resolve with null) - * into an AccessTokenGetter by retrying the unreliable getter in a regular - * interval. - * - * @param getAccessToken - a function that produces a promise of an access - * token that may fail by returning null - * @param retryIntervalInMs - the time (in milliseconds) to wait between retry - * attempts - * @param timeoutInMs - the timestamp after which the refresh attempt will fail, - * throwing an exception - * @returns - a promise that, if it resolves, will resolve with an access token + * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + * @param authenticationProvider - The authentication provider. + * @returns An instance of the {@link SigningPolicy}. */ -async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { - // This wrapper handles exceptions gracefully as long as we haven't exceeded - // the timeout. - async function tryGetAccessToken() { - if (Date.now() < timeoutInMs) { - try { - return await getAccessToken(); - } - catch (_a) { - return null; - } - } - else { - const finalToken = await getAccessToken(); - // Timeout is up, so throw if it's still null - if (finalToken === null) { - throw new Error("Failed to refresh access token."); - } - return finalToken; - } - } - let token = await tryGetAccessToken(); - while (token === null) { - await delay(retryIntervalInMs); - token = await tryGetAccessToken(); - } - return token; -} -/** - * Creates a token cycler from a credential, scopes, and optional settings. - * - * A token cycler represents a way to reliably retrieve a valid access token - * from a TokenCredential. It will handle initializing the token, refreshing it - * when it nears expiration, and synchronizes refresh attempts to avoid - * concurrency hazards. - * - * @param credential - the underlying TokenCredential that provides the access - * token - * @param scopes - the scopes to request authorization for - * @param tokenCyclerOptions - optionally override default settings for the cycler - * - * @returns - a function that reliably produces a valid access token - */ -function createTokenCycler(credential, scopes, tokenCyclerOptions) { - let refreshWorker = null; - let token = null; - const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); - /** - * This little holder defines several predicates that we use to construct - * the rules of refreshing the token. - */ - const cycler = { - /** - * Produces true if a refresh job is currently in progress. - */ - get isRefreshing() { - return refreshWorker !== null; - }, - /** - * Produces true if the cycler SHOULD refresh (we are within the refresh - * window and not already refreshing) - */ - get shouldRefresh() { - var _a; - return (!cycler.isRefreshing && - ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); - }, - /** - * Produces true if the cycler MUST refresh (null or nearly-expired - * token). - */ - get mustRefresh() { - return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); - } - }; - /** - * Starts a refresh job or returns the existing job if one is already - * running. - */ - function refresh(getTokenOptions) { - var _a; - if (!cycler.isRefreshing) { - // We bind `scopes` here to avoid passing it around a lot - const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); - // Take advantage of promise chaining to insert an assignment to `token` - // before the refresh can be considered done. - refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, - // If we don't have a token, then we should timeout immediately - (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) - .then((_token) => { - refreshWorker = null; - token = _token; - return token; - }) - .catch((reason) => { - // We also should reset the refresher if we enter a failed state. All - // existing awaiters will throw, but subsequent requests will start a - // new retry chain. - refreshWorker = null; - token = null; - throw reason; - }); - } - return refreshWorker; - } - return async (tokenOptions) => { - // - // Simple rules: - // - If we MUST refresh, then return the refresh task, blocking - // the pipeline until a token is available. - // - If we SHOULD refresh, then run refresh but don't return it - // (we can still use the cached token). - // - Return the token, since it's fine if we didn't return in - // step 1. - // - if (cycler.mustRefresh) - return refresh(tokenOptions); - if (cycler.shouldRefresh) { - refresh(tokenOptions); - } - return token; - }; -} -// #endregion -/** - * Creates a new factory for a RequestPolicy that applies a bearer token to - * the requests' `Authorization` headers. - * - * @param credential - The TokenCredential implementation that can supply the bearer token. - * @param scopes - The scopes for which the bearer token applies. - */ -function bearerTokenAuthenticationPolicy(credential, scopes) { - // This simple function encapsulates the entire process of reliably retrieving the token - const getToken = createTokenCycler(credential, scopes /* , options */); - class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(webResource) { - if (!webResource.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); - } - const { token } = await getToken({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext - } - }); - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); - return this._nextPolicy.sendRequest(webResource); - } - } +function signingPolicy(authenticationProvider) { return { create: (nextPolicy, options) => { - return new BearerTokenAuthenticationPolicy(nextPolicy, options); - } + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, }; } +/** + * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + */ +class SigningPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, authenticationProvider) { + super(nextPolicy, options); + this.authenticationProvider = authenticationProvider; + } + signRequest(request) { + return this.authenticationProvider.signRequest(request); + } + sendRequest(request) { + return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); + } +} // Copyright (c) Microsoft Corporation. +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - Maximum number of retries. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + * @returns An instance of the {@link SystemErrorRetryPolicy} + */ function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { return { create: (nextPolicy, options) => { return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); - } + }, }; } /** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". * @param retryCount - The client retry count. * @param retryInterval - The client retry interval, in milliseconds. * @param minRetryInterval - The minimum retry interval, in milliseconds. @@ -57246,10 +58903,10 @@ class SystemErrorRetryPolicy extends BaseRequestPolicy { sendRequest(request) { return this._nextPolicy .sendRequest(request.clone()) - .catch((error) => retry$1(this, request, error.response, error)); + .catch((error) => retry(this, request, error.response, error)); } } -async function retry$1(policy, request, operationResponse, err, retryData) { +async function retry(policy, request, operationResponse, err, retryData) { retryData = updateRetryData(policy, retryData, err); function shouldPolicyRetry(_response, error) { if (error && @@ -57270,7 +58927,7 @@ async function retry$1(policy, request, operationResponse, err, retryData) { return policy._nextPolicy.sendRequest(request.clone()); } catch (nestedErr) { - return retry$1(policy, request, operationResponse, nestedErr, retryData); + return retry(policy, request, operationResponse, nestedErr, retryData); } } else { @@ -57282,155 +58939,6 @@ async function retry$1(policy, request, operationResponse, err, retryData) { } } -// Copyright (c) Microsoft Corporation. -(function (QueryCollectionFormat) { - QueryCollectionFormat["Csv"] = ","; - QueryCollectionFormat["Ssv"] = " "; - QueryCollectionFormat["Tsv"] = "\t"; - QueryCollectionFormat["Pipes"] = "|"; - QueryCollectionFormat["Multi"] = "Multi"; -})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); - -// Copyright (c) Microsoft Corporation. -/** - * Stores the patterns specified in NO_PROXY environment variable. - * @internal - */ -const globalNoProxyList = []; -let noProxyListLoaded = false; -/** A cache of whether a host should bypass the proxy. */ -const globalBypassedMap = new Map(); -function loadEnvironmentProxyValue() { - if (!process) { - return undefined; - } - const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); - const allProxy = getEnvironmentValue(Constants.ALL_PROXY); - const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); - return httpsProxy || allProxy || httpProxy; -} -/** - * Check whether the host of a given `uri` matches any pattern in the no proxy list. - * If there's a match, any request sent to the same host shouldn't have the proxy settings set. - * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 - */ -function isBypassed(uri, noProxyList, bypassedMap) { - if (noProxyList.length === 0) { - return false; - } - const host = URLBuilder.parse(uri).getHost(); - if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { - return bypassedMap.get(host); - } - let isBypassedFlag = false; - for (const pattern of noProxyList) { - if (pattern[0] === ".") { - // This should match either domain it self or any subdomain or host - // .foo.com will match foo.com it self or *.foo.com - if (host.endsWith(pattern)) { - isBypassedFlag = true; - } - else { - if (host.length === pattern.length - 1 && host === pattern.slice(1)) { - isBypassedFlag = true; - } - } - } - else { - if (host === pattern) { - isBypassedFlag = true; - } - } - } - bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); - return isBypassedFlag; -} -/** - * @internal - */ -function loadNoProxy() { - const noProxy = getEnvironmentValue(Constants.NO_PROXY); - noProxyListLoaded = true; - if (noProxy) { - return noProxy - .split(",") - .map((item) => item.trim()) - .filter((item) => item.length); - } - return []; -} -function getDefaultProxySettings(proxyUrl) { - if (!proxyUrl) { - proxyUrl = loadEnvironmentProxyValue(); - if (!proxyUrl) { - return undefined; - } - } - const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); - const parsedUrl = URLBuilder.parse(urlWithoutAuth); - const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; - return { - host: schema + parsedUrl.getHost(), - port: Number.parseInt(parsedUrl.getPort() || "80"), - username, - password - }; -} -/** - * A policy that allows one to apply proxy settings to all requests. - * If not passed static settings, they will be retrieved from the HTTPS_PROXY - * or HTTP_PROXY environment variables. - * @param proxySettings - ProxySettings to use on each request. - * @param options - additional settings, for example, custom NO_PROXY patterns - */ -function proxyPolicy(proxySettings, options) { - if (!proxySettings) { - proxySettings = getDefaultProxySettings(); - } - if (!noProxyListLoaded) { - globalNoProxyList.push(...loadNoProxy()); - } - return { - create: (nextPolicy, requestPolicyOptions) => { - return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); - } - }; -} -function extractAuthFromUrl(url) { - const atIndex = url.indexOf("@"); - if (atIndex === -1) { - return { urlWithoutAuth: url }; - } - const schemeIndex = url.indexOf("://"); - const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; - const auth = url.substring(authStart, atIndex); - const colonIndex = auth.indexOf(":"); - const hasPassword = colonIndex !== -1; - const username = hasPassword ? auth.substring(0, colonIndex) : auth; - const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; - const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); - return { - username, - password, - urlWithoutAuth - }; -} -class ProxyPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, proxySettings, customNoProxyList) { - super(nextPolicy, options); - this.proxySettings = proxySettings; - this.customNoProxyList = customNoProxyList; - } - sendRequest(request) { - var _a; - if (!request.proxySettings && - !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { - request.proxySettings = this.proxySettings; - } - return this._nextPolicy.sendRequest(request); - } -} - // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** @@ -57440,15 +58948,28 @@ const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; // Copyright (c) Microsoft Corporation. const StatusCodes = Constants.HttpConstants.StatusCodes; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * @returns + */ function throttlingRetryPolicy() { return { create: (nextPolicy, options) => { return new ThrottlingRetryPolicy(nextPolicy, options); - } + }, }; } -const StandardAbortMessage$1 = "The operation was aborted."; +const StandardAbortMessage = "The operation was aborted."; /** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * * To learn more, please refer to * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and @@ -57479,10 +59000,10 @@ class ThrottlingRetryPolicy extends BaseRequestPolicy { this.numberOfRetries += 1; await delay(delayInMs, undefined, { abortSignal: httpRequest.abortSignal, - abortErrorMsg: StandardAbortMessage$1 + abortErrorMsg: StandardAbortMessage, }); if ((_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { - throw new abortController.AbortError(StandardAbortMessage$1); + throw new abortController.AbortError(StandardAbortMessage); } if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) { return this.sendRequest(httpRequest); @@ -57516,77 +59037,26 @@ class ThrottlingRetryPolicy extends BaseRequestPolicy { } } -// Copyright (c) Microsoft Corporation. -function signingPolicy(authenticationProvider) { - return { - create: (nextPolicy, options) => { - return new SigningPolicy(nextPolicy, options, authenticationProvider); - } - }; -} -class SigningPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, authenticationProvider) { - super(nextPolicy, options); - this.authenticationProvider = authenticationProvider; - } - signRequest(request) { - return this.authenticationProvider.signRequest(request); - } - sendRequest(request) { - return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); - } -} - -// Copyright (c) Microsoft Corporation. -const DefaultKeepAliveOptions = { - enable: true -}; -function keepAlivePolicy(keepAliveOptions) { - return { - create: (nextPolicy, options) => { - return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); - } - }; -} -/** - * KeepAlivePolicy is a policy used to control keep alive settings for every request. - */ -class KeepAlivePolicy extends BaseRequestPolicy { - /** - * Creates an instance of KeepAlivePolicy. - * - * @param nextPolicy - - * @param options - - * @param keepAliveOptions - - */ - constructor(nextPolicy, options, keepAliveOptions) { - super(nextPolicy, options); - this.keepAliveOptions = keepAliveOptions; - } - /** - * Sends out request. - * - * @param request - - * @returns - */ - async sendRequest(request) { - request.keepAlive = this.keepAliveOptions.enable; - return this._nextPolicy.sendRequest(request); - } -} - // Copyright (c) Microsoft Corporation. const createSpan = coreTracing.createSpanFunction({ packagePrefix: "", - namespace: "" + namespace: "", }); +/** + * Creates a policy that wraps outgoing requests with a tracing span. + * @param tracingOptions - Tracing options. + * @returns An instance of the {@link TracingPolicy} class. + */ function tracingPolicy(tracingOptions = {}) { return { create(nextPolicy, options) { return new TracingPolicy(nextPolicy, options, tracingOptions); - } + }, }; } +/** + * A policy that wraps outgoing requests with a tracing span. + */ class TracingPolicy extends BaseRequestPolicy { constructor(nextPolicy, options, tracingOptions) { super(nextPolicy, options); @@ -57613,14 +59083,13 @@ class TracingPolicy extends BaseRequestPolicy { tryCreateSpan(request) { var _a; try { - const path = URLBuilder.parse(request.url).getPath() || "/"; // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier. // We can pass this as a separate parameter once we upgrade to the latest core-tracing. - const { span } = createSpan(path, { + const { span } = createSpan(`HTTP ${request.method}`, { tracingOptions: { spanOptions: Object.assign(Object.assign({}, request.spanOptions), { kind: coreTracing.SpanKind.CLIENT }), - tracingContext: request.tracingContext - } + tracingContext: request.tracingContext, + }, }); // If the span is not recording, don't do any more work. if (!span.isRecording()) { @@ -57634,7 +59103,7 @@ class TracingPolicy extends BaseRequestPolicy { span.setAttributes({ "http.method": request.method, "http.url": request.url, - requestId: request.requestId + requestId: request.requestId, }); if (this.userAgent) { span.setAttribute("http.user_agent", this.userAgent); @@ -57661,7 +59130,7 @@ class TracingPolicy extends BaseRequestPolicy { try { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: err.message + message: err.message, }); if (err.statusCode) { span.setAttribute("http.status_code", err.statusCode); @@ -57680,7 +59149,7 @@ class TracingPolicy extends BaseRequestPolicy { span.setAttribute("serviceRequestId", serviceRequestId); } span.setStatus({ - code: coreTracing.SpanStatusCode.OK + code: coreTracing.SpanStatusCode.OK, }); span.end(); } @@ -57690,88 +59159,6 @@ class TracingPolicy extends BaseRequestPolicy { } } -// Copyright (c) Microsoft Corporation. -/** - * Returns a request policy factory that can be used to create an instance of - * {@link DisableResponseDecompressionPolicy}. - */ -function disableResponseDecompressionPolicy() { - return { - create: (nextPolicy, options) => { - return new DisableResponseDecompressionPolicy(nextPolicy, options); - } - }; -} -/** - * A policy to disable response decompression according to Accept-Encoding header - * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding - */ -class DisableResponseDecompressionPolicy extends BaseRequestPolicy { - /** - * Creates an instance of DisableResponseDecompressionPolicy. - * - * @param nextPolicy - - * @param options - - */ - // The parent constructor is protected. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - /** - * Sends out request. - * - * @param request - - * @returns - */ - async sendRequest(request) { - request.decompressResponse = false; - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -function ndJsonPolicy() { - return { - create: (nextPolicy, options) => { - return new NdJsonPolicy(nextPolicy, options); - } - }; -} -/** - * NdJsonPolicy that formats a JSON array as newline-delimited JSON - */ -class NdJsonPolicy extends BaseRequestPolicy { - /** - * Creates an instance of KeepAlivePolicy. - */ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - /** - * Sends a request. - */ - async sendRequest(request) { - // There currently isn't a good way to bypass the serializer - if (typeof request.body === "string" && request.body.startsWith("[")) { - const body = JSON.parse(request.body); - if (Array.isArray(body)) { - request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); - } - } - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -let cachedHttpClient; -function getCachedDefaultHttpClient() { - if (!cachedHttpClient) { - cachedHttpClient = new NodeFetchHttpClient(); - } - return cachedHttpClient; -} - // Copyright (c) Microsoft Corporation. /** * ServiceClient sends service requests and receives responses. @@ -57821,7 +59208,7 @@ class ServiceClient { bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes); } return bearerTokenPolicyFactory.create(nextPolicy, createOptions); - } + }, }; }; authPolicyFactory = wrappedPolicyFactory(); @@ -58056,7 +59443,7 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op const updatedOptions = { rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : "", includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false, - xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY + xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY, }; const xmlCharKey = serializerOptions.xmlCharKey; if (operationSpec.requestBody && operationSpec.requestBody.mapper) { @@ -58075,13 +59462,13 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op if (typeName === MapperType.Sequence) { httpRequest.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName, - xmlCharKey + xmlCharKey, }); } else if (!isStream) { httpRequest.body = stringifyXML(value, { rootName: xmlName || serializedName, - xmlCharKey + xmlCharKey, }); } } @@ -58165,6 +59552,12 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) { factories.push(logPolicy({ logger: logger.info })); return factories; } +/** + * Creates an HTTP pipeline based on the given options. + * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. + * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. + * @returns A set of options that can be passed to create a new {@link ServiceClient}. + */ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { const requestPolicyFactories = []; if (pipelineOptions.sendStreamingJson) { @@ -58203,7 +59596,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { } return { httpClient: pipelineOptions.httpClient, - requestPolicyFactories + requestPolicyFactories, }; } function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { @@ -58279,12 +59672,18 @@ function getPropertyFromParameterPath(parent, parameterPath) { } return result; } +/** + * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). + * @param _response - Wrapper object for http response. + * @param responseSpec - Mappers for how to parse the response properties. + * @returns - A normalized response object. + */ function flattenResponse(_response, responseSpec) { const parsedHeaders = _response.parsedHeaders; const bodyMapper = responseSpec && responseSpec.bodyMapper; const addOperationResponse = (obj) => { return Object.defineProperty(obj, "_response", { - value: _response + value: _response, }); }; if (bodyMapper) { @@ -58370,9 +59769,16 @@ class ExpiringAccessTokenCache { this.cachedToken = undefined; this.tokenRefreshBufferMs = tokenRefreshBufferMs; } + /** + * Saves an access token into the internal in-memory cache. + * @param accessToken - Access token or undefined to clear the cache. + */ setCachedToken(accessToken) { this.cachedToken = accessToken; } + /** + * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. + */ getCachedToken() { if (this.cachedToken && Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { @@ -58431,6 +59837,9 @@ class AccessTokenRefresher { // Copyright (c) Microsoft Corporation. const HeaderConstants = Constants.HeaderConstants; const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; +/** + * A simple {@link ServiceClientCredential} that authenticates with a username and a password. + */ class BasicAuthenticationCredentials { /** * Creates a new BasicAuthenticationCredentials object. @@ -58440,6 +59849,10 @@ class BasicAuthenticationCredentials { * @param authorizationScheme - The authorization scheme. */ constructor(userName, password, authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME) { + /** + * Authorization scheme. Defaults to "Basic". + * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes + */ this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { throw new Error("userName cannot be null or undefined and must be of type string."); @@ -58519,6 +59932,9 @@ class ApiKeyCredentials { } // Copyright (c) Microsoft Corporation. +/** + * A {@link TopicCredentials} object used for Azure Event Grid. + */ class TopicCredentials extends ApiKeyCredentials { /** * Creates a new EventGrid TopicCredentials object. @@ -58531,8 +59947,8 @@ class TopicCredentials extends ApiKeyCredentials { } const options = { inHeader: { - "aeg-sas-key": topicKey - } + "aeg-sas-key": topicKey, + }, }; super(options); } @@ -58540,9 +59956,7 @@ class TopicCredentials extends ApiKeyCredentials { Object.defineProperty(exports, 'isTokenCredential', { enumerable: true, - get: function () { - return coreAuth.isTokenCredential; - } + get: function () { return coreAuth.isTokenCredential; } }); exports.AccessTokenRefresher = AccessTokenRefresher; exports.ApiKeyCredentials = ApiKeyCredentials; diff --git a/dist/setup/index.js b/dist/setup/index.js index 19df95db..d05c5006 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -1387,6 +1387,11 @@ function assertDefined(name, value) { return value; } exports.assertDefined = assertDefined; +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; +} +exports.isGhes = isGhes; //# sourceMappingURL=cacheUtils.js.map /***/ }), @@ -3643,125 +3648,7 @@ exports.parseURL = __webpack_require__(936).parseURL; /* 71 */, /* 72 */, /* 73 */, -/* 74 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Path = void 0; -const path = __importStar(__webpack_require__(622)); -const pathHelper = __importStar(__webpack_require__(972)); -const assert_1 = __importDefault(__webpack_require__(357)); -const IS_WINDOWS = process.platform === 'win32'; -/** - * Helper class for parsing paths into segments - */ -class Path { - /** - * Constructs a Path - * @param itemPath Path or array of segments - */ - constructor(itemPath) { - this.segments = []; - // String - if (typeof itemPath === 'string') { - assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); - // Normalize slashes and trim unnecessary trailing slash - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - // Not rooted - if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path.sep); - } - // Rooted - else { - // Add all segments, while not at the root - let remaining = itemPath; - let dir = pathHelper.dirname(remaining); - while (dir !== remaining) { - // Add the segment - const basename = path.basename(remaining); - this.segments.unshift(basename); - // Truncate the last segment - remaining = dir; - dir = pathHelper.dirname(remaining); - } - // Remainder is the root - this.segments.unshift(remaining); - } - } - // Array - else { - // Must not be empty - assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); - // Each segment - for (let i = 0; i < itemPath.length; i++) { - let segment = itemPath[i]; - // Must not be empty - assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`); - // Normalize slashes - segment = pathHelper.normalizeSeparators(itemPath[i]); - // Root segment - if (i === 0 && pathHelper.hasRoot(segment)) { - segment = pathHelper.safeTrimTrailingSeparator(segment); - assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); - this.segments.push(segment); - } - // All other segments - else { - // Must not contain slash - assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); - this.segments.push(segment); - } - } - } - } - /** - * Converts the path to it's string representation - */ - toString() { - // First segment - let result = this.segments[0]; - // All others - let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); - for (let i = 1; i < this.segments.length; i++) { - if (skipSlash) { - skipSlash = false; - } - else { - result += path.sep; - } - result += this.segments[i]; - } - return result; - } -} -exports.Path = Path; -//# sourceMappingURL=internal-path.js.map - -/***/ }), +/* 74 */, /* 75 */, /* 76 */, /* 77 */, @@ -4854,7 +4741,19 @@ Object.defineProperty(exports, "__esModule", { value: true }); /***/ }), /* 96 */, -/* 97 */, +/* 97 */ +/***/ (function() { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +if (typeof Symbol === undefined || !Symbol.asyncIterator) { + Symbol.asyncIterator = Symbol.for("Symbol.asyncIterator"); +} +//# sourceMappingURL=index.js.map + +/***/ }), /* 98 */ /***/ (function(__unusedmodule, exports, __webpack_require__) { @@ -5488,18 +5387,18 @@ function downloadCache(archiveLocation, archivePath, options) { exports.downloadCache = downloadCache; // Reserve Cache function reserveCache(key, paths, options) { - var _a, _b; return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const reserveCacheRequest = { key, - version + version, + cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize }; const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); })); - return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1; + return response; }); } exports.reserveCache = reserveCache; @@ -8406,7 +8305,7 @@ const pathHelper = __importStar(__webpack_require__(972)); const assert_1 = __importDefault(__webpack_require__(357)); const minimatch_1 = __webpack_require__(93); const internal_match_kind_1 = __webpack_require__(327); -const internal_path_1 = __webpack_require__(74); +const internal_path_1 = __webpack_require__(338); const IS_WINDOWS = process.platform === 'win32'; class Pattern { constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { @@ -8924,47 +8823,7 @@ exports.isTokenCredential = isTokenCredential; /***/ }), /* 230 */, -/* 231 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -// Generated by CoffeeScript 1.12.7 -(function() { - var XMLStringWriter, XMLWriterBase, - extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, - hasProp = {}.hasOwnProperty; - - XMLWriterBase = __webpack_require__(423); - - module.exports = XMLStringWriter = (function(superClass) { - extend(XMLStringWriter, superClass); - - function XMLStringWriter(options) { - XMLStringWriter.__super__.constructor.call(this, options); - } - - XMLStringWriter.prototype.document = function(doc, options) { - var child, i, len, r, ref; - options = this.filterOptions(options); - r = ''; - ref = doc.children; - for (i = 0, len = ref.length; i < len; i++) { - child = ref[i]; - r += this.writeChildNode(child, options, 0); - } - if (options.pretty && r.slice(-options.newline.length) === options.newline) { - r = r.slice(0, -options.newline.length); - } - return r; - }; - - return XMLStringWriter; - - })(XMLWriterBase); - -}).call(this); - - -/***/ }), +/* 231 */, /* 232 */, /* 233 */, /* 234 */, @@ -9008,17 +8867,22 @@ var DiagConsoleLogger = /** @class */ (function () { function DiagConsoleLogger() { function _consoleFunc(funcName) { return function () { - var orgArguments = arguments; + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } if (console) { // Some environments only expose the console when the F12 developer console is open + // eslint-disable-next-line no-console var theFunc = console[funcName]; if (typeof theFunc !== 'function') { // Not all environments support all functions + // eslint-disable-next-line no-console theFunc = console.log; } // One last final check if (typeof theFunc === 'function') { - return theFunc.apply(console, orgArguments); + return theFunc.apply(console, args); } } }; @@ -9271,7 +9135,8 @@ function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetry return { statusCode: error.statusCode, result: null, - headers: {} + headers: {}, + error }; } else { @@ -13417,7 +13282,7 @@ function expand(str, isTop) { XMLDocumentCB = __webpack_require__(472); - XMLStringWriter = __webpack_require__(231); + XMLStringWriter = __webpack_require__(750); XMLStreamWriter = __webpack_require__(458); @@ -13579,14 +13444,122 @@ function hasLastPage (link) { /***/ }), /* 337 */, /* 338 */ -/***/ (function() { +/***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; -if (typeof Symbol === undefined || !Symbol.asyncIterator) { - Symbol.asyncIterator = Symbol.for("Symbol.asyncIterator"); +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Path = void 0; +const path = __importStar(__webpack_require__(622)); +const pathHelper = __importStar(__webpack_require__(972)); +const assert_1 = __importDefault(__webpack_require__(357)); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Helper class for parsing paths into segments + */ +class Path { + /** + * Constructs a Path + * @param itemPath Path or array of segments + */ + constructor(itemPath) { + this.segments = []; + // String + if (typeof itemPath === 'string') { + assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // Not rooted + if (!pathHelper.hasRoot(itemPath)) { + this.segments = itemPath.split(path.sep); + } + // Rooted + else { + // Add all segments, while not at the root + let remaining = itemPath; + let dir = pathHelper.dirname(remaining); + while (dir !== remaining) { + // Add the segment + const basename = path.basename(remaining); + this.segments.unshift(basename); + // Truncate the last segment + remaining = dir; + dir = pathHelper.dirname(remaining); + } + // Remainder is the root + this.segments.unshift(remaining); + } + } + // Array + else { + // Must not be empty + assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); + // Each segment + for (let i = 0; i < itemPath.length; i++) { + let segment = itemPath[i]; + // Must not be empty + assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`); + // Normalize slashes + segment = pathHelper.normalizeSeparators(itemPath[i]); + // Root segment + if (i === 0 && pathHelper.hasRoot(segment)) { + segment = pathHelper.safeTrimTrailingSeparator(segment); + assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); + this.segments.push(segment); + } + // All other segments + else { + // Must not contain slash + assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); + this.segments.push(segment); + } + } + } + } + /** + * Converts the path to it's string representation + */ + toString() { + // First segment + let result = this.segments[0]; + // All others + let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); + for (let i = 1; i < this.segments.length; i++) { + if (skipSlash) { + skipSlash = false; + } + else { + result += path.sep; + } + result += this.segments[i]; + } + return result; + } } -//# sourceMappingURL=index.js.map +exports.Path = Path; +//# sourceMappingURL=internal-path.js.map /***/ }), /* 339 */, @@ -14178,6 +14151,29 @@ var events = __webpack_require__(614); var fs = __webpack_require__(747); var util = __webpack_require__(669); +function _interopNamespace(e) { + if (e && e.__esModule) return e; + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n["default"] = e; + return Object.freeze(n); +} + +var coreHttp__namespace = /*#__PURE__*/_interopNamespace(coreHttp); +var os__namespace = /*#__PURE__*/_interopNamespace(os); +var fs__namespace = /*#__PURE__*/_interopNamespace(fs); +var util__namespace = /*#__PURE__*/_interopNamespace(util); + /* * Copyright (c) Microsoft Corporation. * Licensed under the MIT License. @@ -15132,10 +15128,10 @@ const BlobItemInternal = { modelProperties: { name: { serializedName: "Name", - required: true, xmlName: "Name", type: { - name: "String" + name: "Composite", + className: "BlobName" } }, deleted: { @@ -15210,6 +15206,30 @@ const BlobItemInternal = { } } }; +const BlobName = { + serializedName: "BlobName", + type: { + name: "Composite", + className: "BlobName", + modelProperties: { + encoded: { + serializedName: "Encoded", + xmlName: "Encoded", + xmlIsAttribute: true, + type: { + name: "Boolean" + } + }, + content: { + serializedName: "content", + xmlName: "content", + type: { + name: "String" + } + } + } + } +}; const BlobPropertiesInternal = { serializedName: "BlobPropertiesInternal", xmlName: "Properties", @@ -15653,10 +15673,10 @@ const BlobPrefix = { modelProperties: { name: { serializedName: "Name", - required: true, xmlName: "Name", type: { - name: "String" + name: "Composite", + className: "BlobName" } } } @@ -17279,6 +17299,59 @@ const ContainerSubmitBatchExceptionHeaders = { } } }; +const ContainerFilterBlobsHeaders = { + serializedName: "Container_filterBlobsHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerFilterBlobsExceptionHeaders = { + serializedName: "Container_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; const ContainerAcquireLeaseHeaders = { serializedName: "Container_acquireLeaseHeaders", type: { @@ -19787,6 +19860,13 @@ const BlobCopyFromURLHeaders = { name: "ByteArray" } }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -22304,6 +22384,7 @@ var Mappers = /*#__PURE__*/Object.freeze({ ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse, BlobFlatListSegment: BlobFlatListSegment, BlobItemInternal: BlobItemInternal, + BlobName: BlobName, BlobPropertiesInternal: BlobPropertiesInternal, ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse, BlobHierarchyListSegment: BlobHierarchyListSegment, @@ -22355,6 +22436,8 @@ var Mappers = /*#__PURE__*/Object.freeze({ ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders, ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders, ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders, + ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders, + ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders, ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders, ContainerAcquireLeaseExceptionHeaders: ContainerAcquireLeaseExceptionHeaders, ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders, @@ -22542,7 +22625,7 @@ const timeoutInSeconds = { const version = { parameterPath: "version", mapper: { - defaultValue: "2020-10-02", + defaultValue: "2021-04-10", isConstant: true, serializedName: "x-ms-version", type: { @@ -22637,7 +22720,7 @@ const include = { element: { type: { name: "Enum", - allowedValues: ["metadata", "deleted"] + allowedValues: ["metadata", "deleted", "system"] } } } @@ -23159,11 +23242,10 @@ const encryptionKeySha256 = { } }; const encryptionAlgorithm = { - parameterPath: ["options", "encryptionAlgorithm"], + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], mapper: { - defaultValue: "AES256", - isConstant: true, serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", type: { name: "String" } @@ -24080,7 +24162,7 @@ class Service { setProperties(blobServiceProperties, options) { const operationArguments = { blobServiceProperties, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); } @@ -24091,9 +24173,9 @@ class Service { */ getProperties(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); } /** * Retrieves statistics related to replication for the Blob service. It is only available on the @@ -24103,7 +24185,7 @@ class Service { */ getStatistics(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); } @@ -24113,7 +24195,7 @@ class Service { */ listContainersSegment(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); } @@ -24126,7 +24208,7 @@ class Service { getUserDelegationKey(keyInfo, options) { const operationArguments = { keyInfo, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); } @@ -24136,9 +24218,9 @@ class Service { */ getAccountInfo(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); } /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. @@ -24153,9 +24235,9 @@ class Service { contentLength, multipartContentType, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); } /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a @@ -24165,13 +24247,13 @@ class Service { */ filterBlobs(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec$1); } } // Operation Specifications -const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const xmlSerializer$5 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); const setPropertiesOperationSpec = { path: "/", httpMethod: "PUT", @@ -24200,9 +24282,9 @@ const setPropertiesOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer$5 }; -const getPropertiesOperationSpec = { +const getPropertiesOperationSpec$2 = { path: "/", httpMethod: "GET", responses: { @@ -24227,7 +24309,7 @@ const getPropertiesOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; const getStatisticsOperationSpec = { path: "/", @@ -24254,7 +24336,7 @@ const getStatisticsOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; const listContainersSegmentOperationSpec = { path: "/", @@ -24284,7 +24366,7 @@ const listContainersSegmentOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; const getUserDelegationKeyOperationSpec = { path: "/", @@ -24315,9 +24397,9 @@ const getUserDelegationKeyOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer$5 }; -const getAccountInfoOperationSpec = { +const getAccountInfoOperationSpec$2 = { path: "/", httpMethod: "GET", responses: { @@ -24333,9 +24415,9 @@ const getAccountInfoOperationSpec = { urlParameters: [url], headerParameters: [version, accept1], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; -const submitBatchOperationSpec = { +const submitBatchOperationSpec$1 = { path: "/", httpMethod: "POST", responses: { @@ -24365,9 +24447,9 @@ const submitBatchOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer$5 }; -const filterBlobsOperationSpec = { +const filterBlobsOperationSpec$1 = { path: "/", httpMethod: "GET", responses: { @@ -24394,7 +24476,7 @@ const filterBlobsOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; /* @@ -24420,9 +24502,9 @@ class Container { */ create(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec); + return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); } /** * returns all user-defined metadata and system properties for the specified container. The data @@ -24431,7 +24513,7 @@ class Container { */ getProperties(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$1); } @@ -24442,9 +24524,9 @@ class Container { */ delete(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); } /** * operation sets one or more user-defined name-value pairs for the specified container. @@ -24452,9 +24534,9 @@ class Container { */ setMetadata(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); } /** * gets the permissions for the specified container. The permissions indicate whether container data @@ -24463,7 +24545,7 @@ class Container { */ getAccessPolicy(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); } @@ -24474,7 +24556,7 @@ class Container { */ setAccessPolicy(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); } @@ -24484,7 +24566,7 @@ class Container { */ restore(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); } @@ -24496,7 +24578,7 @@ class Container { rename(sourceContainerName, options) { const operationArguments = { sourceContainerName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, renameOperationSpec); } @@ -24513,9 +24595,20 @@ class Container { contentLength, multipartContentType, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -24524,9 +24617,9 @@ class Container { */ acquireLease(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -24537,9 +24630,9 @@ class Container { releaseLease(leaseId, options) { const operationArguments = { leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -24550,9 +24643,9 @@ class Container { renewLease(leaseId, options) { const operationArguments = { leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -24561,9 +24654,9 @@ class Container { */ breakLease(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -24578,9 +24671,9 @@ class Container { const operationArguments = { leaseId, proposedLeaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); } /** * [Update] The List Blobs operation returns a list of the blobs under the specified container @@ -24588,7 +24681,7 @@ class Container { */ listBlobFlatSegment(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); } @@ -24603,7 +24696,7 @@ class Container { listBlobHierarchySegment(delimiter, options) { const operationArguments = { delimiter, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); } @@ -24613,14 +24706,14 @@ class Container { */ getAccountInfo(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$1); } } // Operation Specifications -const xmlSerializer$1 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const createOperationSpec = { +const xmlSerializer$4 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const createOperationSpec$2 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -24644,7 +24737,7 @@ const createOperationSpec = { preventEncryptionScopeOverride ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const getPropertiesOperationSpec$1 = { path: "/{containerName}", @@ -24667,9 +24760,9 @@ const getPropertiesOperationSpec$1 = { leaseId ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const deleteOperationSpec = { +const deleteOperationSpec$1 = { path: "/{containerName}", httpMethod: "DELETE", responses: { @@ -24692,9 +24785,9 @@ const deleteOperationSpec = { ifUnmodifiedSince ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const setMetadataOperationSpec = { +const setMetadataOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -24721,7 +24814,7 @@ const setMetadataOperationSpec = { ifModifiedSince ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const getAccessPolicyOperationSpec = { path: "/{containerName}", @@ -24760,7 +24853,7 @@ const getAccessPolicyOperationSpec = { leaseId ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const setAccessPolicyOperationSpec = { path: "/{containerName}", @@ -24794,7 +24887,7 @@ const setAccessPolicyOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const restoreOperationSpec = { path: "/{containerName}", @@ -24822,7 +24915,7 @@ const restoreOperationSpec = { deletedContainerVersion ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const renameOperationSpec = { path: "/{containerName}", @@ -24850,9 +24943,9 @@ const renameOperationSpec = { sourceLeaseId ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const submitBatchOperationSpec$1 = { +const submitBatchOperationSpec = { path: "/{containerName}", httpMethod: "POST", responses: { @@ -24886,9 +24979,39 @@ const submitBatchOperationSpec$1 = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const acquireLeaseOperationSpec = { +const filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ContainerFilterBlobsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where, + restype2 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const acquireLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -24917,9 +25040,9 @@ const acquireLeaseOperationSpec = { proposedLeaseId ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const releaseLeaseOperationSpec = { +const releaseLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -24947,9 +25070,9 @@ const releaseLeaseOperationSpec = { leaseId1 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const renewLeaseOperationSpec = { +const renewLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -24977,9 +25100,9 @@ const renewLeaseOperationSpec = { action2 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const breakLeaseOperationSpec = { +const breakLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -25007,9 +25130,9 @@ const breakLeaseOperationSpec = { breakPeriod ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const changeLeaseOperationSpec = { +const changeLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -25038,7 +25161,7 @@ const changeLeaseOperationSpec = { proposedLeaseId1 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const listBlobFlatSegmentOperationSpec = { path: "/{containerName}", @@ -25069,7 +25192,7 @@ const listBlobFlatSegmentOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const listBlobHierarchySegmentOperationSpec = { path: "/{containerName}", @@ -25101,7 +25224,7 @@ const listBlobHierarchySegmentOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const getAccountInfoOperationSpec$1 = { path: "/{containerName}", @@ -25119,7 +25242,7 @@ const getAccountInfoOperationSpec$1 = { urlParameters: [url], headerParameters: [version, accept1], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; /* @@ -25145,7 +25268,7 @@ class Blob$1 { */ download(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); } @@ -25156,9 +25279,9 @@ class Blob$1 { */ getProperties(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); } /** * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is @@ -25177,9 +25300,9 @@ class Blob$1 { */ delete(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); } /** * Undelete a blob that was previously soft deleted @@ -25187,7 +25310,7 @@ class Blob$1 { */ undelete(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); } @@ -25199,7 +25322,7 @@ class Blob$1 { setExpiry(expiryOptions, options) { const operationArguments = { expiryOptions, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); } @@ -25209,7 +25332,7 @@ class Blob$1 { */ setHttpHeaders(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); } @@ -25219,7 +25342,7 @@ class Blob$1 { */ setImmutabilityPolicy(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setImmutabilityPolicyOperationSpec); } @@ -25229,7 +25352,7 @@ class Blob$1 { */ deleteImmutabilityPolicy(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, deleteImmutabilityPolicyOperationSpec); } @@ -25241,7 +25364,7 @@ class Blob$1 { setLegalHold(legalHold, options) { const operationArguments = { legalHold, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setLegalHoldOperationSpec); } @@ -25252,9 +25375,9 @@ class Blob$1 { */ setMetadata(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -25263,9 +25386,9 @@ class Blob$1 { */ acquireLease(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -25276,9 +25399,9 @@ class Blob$1 { releaseLease(leaseId, options) { const operationArguments = { leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -25289,9 +25412,9 @@ class Blob$1 { renewLease(leaseId, options) { const operationArguments = { leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -25306,9 +25429,9 @@ class Blob$1 { const operationArguments = { leaseId, proposedLeaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -25317,9 +25440,9 @@ class Blob$1 { */ breakLease(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); } /** * The Create Snapshot operation creates a read-only snapshot of a blob @@ -25327,7 +25450,7 @@ class Blob$1 { */ createSnapshot(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); } @@ -25342,7 +25465,7 @@ class Blob$1 { startCopyFromURL(copySource, options) { const operationArguments = { copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); } @@ -25358,7 +25481,7 @@ class Blob$1 { copyFromURL(copySource, options) { const operationArguments = { copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); } @@ -25372,7 +25495,7 @@ class Blob$1 { abortCopyFromURL(copyId, options) { const operationArguments = { copyId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); } @@ -25388,7 +25511,7 @@ class Blob$1 { setTier(tier, options) { const operationArguments = { tier, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); } @@ -25398,9 +25521,9 @@ class Blob$1 { */ getAccountInfo(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); } /** * The Query operation enables users to select/project on blob data by providing simple query @@ -25409,7 +25532,7 @@ class Blob$1 { */ query(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, queryOperationSpec); } @@ -25419,7 +25542,7 @@ class Blob$1 { */ getTags(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); } @@ -25429,13 +25552,13 @@ class Blob$1 { */ setTags(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); } } // Operation Specifications -const xmlSerializer$2 = new coreHttp.Serializer(Mappers, /* isXml */ true); +const xmlSerializer$3 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); const downloadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", @@ -25483,9 +25606,9 @@ const downloadOperationSpec = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const getPropertiesOperationSpec$2 = { +const getPropertiesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "HEAD", responses: { @@ -25518,9 +25641,9 @@ const getPropertiesOperationSpec$2 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const deleteOperationSpec$1 = { +const deleteOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "DELETE", responses: { @@ -25552,7 +25675,7 @@ const deleteOperationSpec$1 = { deleteSnapshots ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const undeleteOperationSpec = { path: "/{containerName}/{blob}", @@ -25574,7 +25697,7 @@ const undeleteOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setExpiryOperationSpec = { path: "/{containerName}/{blob}", @@ -25598,7 +25721,7 @@ const setExpiryOperationSpec = { expiresOn ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setHttpHeadersOperationSpec = { path: "/{containerName}/{blob}", @@ -25632,7 +25755,7 @@ const setHttpHeadersOperationSpec = { blobContentDisposition ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", @@ -25657,7 +25780,7 @@ const setImmutabilityPolicyOperationSpec = { immutabilityPolicyMode ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const deleteImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", @@ -25679,7 +25802,7 @@ const deleteImmutabilityPolicyOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setLegalHoldOperationSpec = { path: "/{containerName}/{blob}", @@ -25702,9 +25825,9 @@ const setLegalHoldOperationSpec = { legalHold ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const setMetadataOperationSpec$1 = { +const setMetadataOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -25735,9 +25858,9 @@ const setMetadataOperationSpec$1 = { encryptionScope ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const acquireLeaseOperationSpec$1 = { +const acquireLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -25765,9 +25888,9 @@ const acquireLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const releaseLeaseOperationSpec$1 = { +const releaseLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -25794,9 +25917,9 @@ const releaseLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const renewLeaseOperationSpec$1 = { +const renewLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -25823,9 +25946,9 @@ const renewLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const changeLeaseOperationSpec$1 = { +const changeLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -25853,9 +25976,9 @@ const changeLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const breakLeaseOperationSpec$1 = { +const breakLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -25882,7 +26005,7 @@ const breakLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const createSnapshotOperationSpec = { path: "/{containerName}/{blob}", @@ -25915,7 +26038,7 @@ const createSnapshotOperationSpec = { encryptionScope ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const startCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -25957,7 +26080,7 @@ const startCopyFromURLOperationSpec = { legalHold1 ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const copyFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -25986,6 +26109,7 @@ const copyFromURLOperationSpec = { ifTags, immutabilityPolicyExpiry, immutabilityPolicyMode, + encryptionScope, tier, sourceIfModifiedSince, sourceIfUnmodifiedSince, @@ -25999,7 +26123,7 @@ const copyFromURLOperationSpec = { copySourceAuthorization ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const abortCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -26027,7 +26151,7 @@ const abortCopyFromURLOperationSpec = { copyActionAbortConstant ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setTierOperationSpec = { path: "/{containerName}/{blob}", @@ -26061,9 +26185,9 @@ const setTierOperationSpec = { tier1 ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const getAccountInfoOperationSpec$2 = { +const getAccountInfoOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { @@ -26079,7 +26203,7 @@ const getAccountInfoOperationSpec$2 = { urlParameters: [url], headerParameters: [version, accept1], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const queryOperationSpec = { path: "/{containerName}/{blob}", @@ -26129,7 +26253,7 @@ const queryOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const getTagsOperationSpec = { path: "/{containerName}/{blob}", @@ -26159,7 +26283,7 @@ const getTagsOperationSpec = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setTagsOperationSpec = { path: "/{containerName}/{blob}", @@ -26193,7 +26317,7 @@ const setTagsOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; /* @@ -26223,7 +26347,7 @@ class PageBlob { const operationArguments = { contentLength, blobContentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, createOperationSpec$1); } @@ -26237,7 +26361,7 @@ class PageBlob { const operationArguments = { contentLength, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); } @@ -26249,7 +26373,7 @@ class PageBlob { clearPages(contentLength, options) { const operationArguments = { contentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); } @@ -26270,7 +26394,7 @@ class PageBlob { sourceRange, contentLength, range, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); } @@ -26281,7 +26405,7 @@ class PageBlob { */ getPageRanges(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); } @@ -26292,7 +26416,7 @@ class PageBlob { */ getPageRangesDiff(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); } @@ -26305,7 +26429,7 @@ class PageBlob { resize(blobContentLength, options) { const operationArguments = { blobContentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); } @@ -26319,7 +26443,7 @@ class PageBlob { updateSequenceNumber(sequenceNumberAction, options) { const operationArguments = { sequenceNumberAction, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); } @@ -26338,14 +26462,14 @@ class PageBlob { copyIncremental(copySource, options) { const operationArguments = { copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); } } // Operation Specifications -const xmlSerializer$3 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +const xmlSerializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); const createOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", @@ -26392,7 +26516,7 @@ const createOperationSpec$1 = { blobSequenceNumber ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const uploadPagesOperationSpec = { path: "/{containerName}/{blob}", @@ -26434,7 +26558,7 @@ const uploadPagesOperationSpec = { ifSequenceNumberEqualTo ], mediaType: "binary", - serializer + serializer: serializer$2 }; const clearPagesOperationSpec = { path: "/{containerName}/{blob}", @@ -26472,7 +26596,7 @@ const clearPagesOperationSpec = { pageWrite1 ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const uploadPagesFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -26519,7 +26643,7 @@ const uploadPagesFromURLOperationSpec = { range1 ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const getPageRangesOperationSpec = { path: "/{containerName}/{blob}", @@ -26553,7 +26677,7 @@ const getPageRangesOperationSpec = { ifTags ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const getPageRangesDiffOperationSpec = { path: "/{containerName}/{blob}", @@ -26589,7 +26713,7 @@ const getPageRangesDiffOperationSpec = { prevSnapshotUrl ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const resizeOperationSpec = { path: "/{containerName}/{blob}", @@ -26622,7 +26746,7 @@ const resizeOperationSpec = { blobContentLength ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const updateSequenceNumberOperationSpec = { path: "/{containerName}/{blob}", @@ -26652,7 +26776,7 @@ const updateSequenceNumberOperationSpec = { sequenceNumberAction ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const copyIncrementalOperationSpec = { path: "/{containerName}/{blob}", @@ -26680,7 +26804,7 @@ const copyIncrementalOperationSpec = { copySource ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; /* @@ -26707,9 +26831,9 @@ class AppendBlob { create(contentLength, options) { const operationArguments = { contentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); + return this.client.sendOperationRequest(operationArguments, createOperationSpec); } /** * The Append Block operation commits a new block of data to the end of an existing append blob. The @@ -26723,7 +26847,7 @@ class AppendBlob { const operationArguments = { contentLength, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); } @@ -26740,7 +26864,7 @@ class AppendBlob { const operationArguments = { sourceUrl, contentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); } @@ -26751,15 +26875,15 @@ class AppendBlob { */ seal(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, sealOperationSpec); } } // Operation Specifications -const xmlSerializer$4 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer$1 = new coreHttp.Serializer(Mappers, /* isXml */ false); -const createOperationSpec$2 = { +const xmlSerializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const createOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -26802,7 +26926,7 @@ const createOperationSpec$2 = { blobType1 ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$1 }; const appendBlockOperationSpec = { path: "/{containerName}/{blob}", @@ -26886,7 +27010,7 @@ const appendBlockFromUrlOperationSpec = { sourceRange1 ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$1 }; const sealOperationSpec = { path: "/{containerName}/{blob}", @@ -26914,7 +27038,7 @@ const sealOperationSpec = { appendPosition ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$1 }; /* @@ -26946,7 +27070,7 @@ class BlockBlob { const operationArguments = { contentLength, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); } @@ -26967,7 +27091,7 @@ class BlockBlob { const operationArguments = { contentLength, copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); } @@ -26985,7 +27109,7 @@ class BlockBlob { blockId, contentLength, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); } @@ -27004,7 +27128,7 @@ class BlockBlob { blockId, contentLength, sourceUrl, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); } @@ -27022,7 +27146,7 @@ class BlockBlob { commitBlockList(blocks, options) { const operationArguments = { blocks, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); } @@ -27036,14 +27160,14 @@ class BlockBlob { getBlockList(listType, options) { const operationArguments = { listType, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); } } // Operation Specifications -const xmlSerializer$5 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer$2 = new coreHttp.Serializer(Mappers, /* isXml */ false); +const xmlSerializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); const uploadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", @@ -27091,7 +27215,7 @@ const uploadOperationSpec = { blobType2 ], mediaType: "binary", - serializer: serializer$2 + serializer }; const putBlobFromUrlOperationSpec = { path: "/{containerName}/{blob}", @@ -27144,7 +27268,7 @@ const putBlobFromUrlOperationSpec = { copySourceBlobProperties ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer }; const stageBlockOperationSpec = { path: "/{containerName}/{blob}", @@ -27180,7 +27304,7 @@ const stageBlockOperationSpec = { accept2 ], mediaType: "binary", - serializer: serializer$2 + serializer }; const stageBlockFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -27221,7 +27345,7 @@ const stageBlockFromURLOperationSpec = { sourceRange1 ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer }; const commitBlockListOperationSpec = { path: "/{containerName}/{blob}", @@ -27271,7 +27395,7 @@ const commitBlockListOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$5 + serializer: xmlSerializer }; const getBlockListOperationSpec = { path: "/{containerName}/{blob}", @@ -27301,7 +27425,7 @@ const getBlockListOperationSpec = { ifTags ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer }; // Copyright (c) Microsoft Corporation. @@ -27312,8 +27436,8 @@ const logger = logger$1.createClientLogger("storage-blob"); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const SDK_VERSION = "12.8.0"; -const SERVICE_VERSION = "2020-10-02"; +const SDK_VERSION = "12.9.0"; +const SERVICE_VERSION = "2021-04-10"; const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB const BLOCK_BLOB_MAX_BLOCKS = 50000; @@ -27330,15 +27454,15 @@ const URLConstants = { SIGNATURE: "sig", SNAPSHOT: "snapshot", VERSIONID: "versionid", - TIMEOUT: "timeout" - } + TIMEOUT: "timeout", + }, }; const HTTPURLConnection = { HTTP_ACCEPTED: 202, HTTP_CONFLICT: 409, HTTP_NOT_FOUND: 404, HTTP_PRECON_FAILED: 412, - HTTP_RANGE_NOT_SATISFIABLE: 416 + HTTP_RANGE_NOT_SATISFIABLE: 416, }; const HeaderConstants = { AUTHORIZATION: "Authorization", @@ -27363,7 +27487,7 @@ const HeaderConstants = { X_MS_COPY_SOURCE: "x-ms-copy-source", X_MS_DATE: "x-ms-date", X_MS_ERROR_CODE: "x-ms-error-code", - X_MS_VERSION: "x-ms-version" + X_MS_VERSION: "x-ms-version", }; const ETagNone = ""; const ETagAny = "*"; @@ -27468,7 +27592,7 @@ const StorageBlobLoggingAllowedHeaderNames = [ "x-ms-tag-count", "x-ms-encryption-key-sha256", "x-ms-if-tags", - "x-ms-source-if-tags" + "x-ms-source-if-tags", ]; const StorageBlobLoggingAllowedQueryParameters = [ "comp", @@ -27503,8 +27627,9 @@ const StorageBlobLoggingAllowedQueryParameters = [ "skt", "sktid", "skv", - "snapshot" + "snapshot", ]; +const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; // Copyright (c) Microsoft Corporation. /** @@ -27644,7 +27769,7 @@ function extractConnectionStringParts(connectionString) { url: blobEndpoint, accountName, accountKey, - proxyUri + proxyUri, }; } else { @@ -27976,14 +28101,14 @@ function toBlobTags(tags) { return undefined; } const res = { - blobTagSet: [] + blobTagSet: [], }; for (const key in tags) { if (Object.prototype.hasOwnProperty.call(tags, key)) { const value = tags[key]; res.blobTagSet.push({ key, - value + value, }); } } @@ -28023,33 +28148,33 @@ function toQuerySerialization(textConfiguration) { fieldQuote: textConfiguration.fieldQuote || "", recordSeparator: textConfiguration.recordSeparator, escapeChar: textConfiguration.escapeCharacter || "", - headersPresent: textConfiguration.hasHeaders || false - } - } + headersPresent: textConfiguration.hasHeaders || false, + }, + }, }; case "json": return { format: { type: "json", jsonTextConfiguration: { - recordSeparator: textConfiguration.recordSeparator - } - } + recordSeparator: textConfiguration.recordSeparator, + }, + }, }; case "arrow": return { format: { type: "arrow", arrowConfiguration: { - schema: textConfiguration.schema - } - } + schema: textConfiguration.schema, + }, + }, }; case "parquet": return { format: { - type: "parquet" - } + type: "parquet", + }, }; default: throw Error("Invalid BlobQueryTextConfiguration."); @@ -28073,7 +28198,7 @@ function parseObjectReplicationRecord(objectReplicationRecord) { } const rule = { ruleId: ids[1], - replicationStatus: objectReplicationRecord[key] + replicationStatus: objectReplicationRecord[key], }; const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); if (policyIndex > -1) { @@ -28082,7 +28207,7 @@ function parseObjectReplicationRecord(objectReplicationRecord) { else { orProperties.push({ policyId: ids[0], - rules: [rule] + rules: [rule], }); } } @@ -28101,6 +28226,202 @@ function attachCredential(thing, credential) { function httpAuthorizationToString(httpAuthorization) { return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; } +function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); + } + else { + return name.content; + } +} +function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + var _a; + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function decodeBase64String(value) { + if (coreHttp.isNode) { + return Buffer.from(value, "base64"); + } + else { + const byteString = atob(value); + const arr = new Uint8Array(byteString.length); + for (let i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; + } +} +function ParseBoolean(content) { + if (content === undefined) + return undefined; + if (content === "true") + return true; + if (content === "false") + return false; + return undefined; +} +function ParseBlobName(blobNameInXML) { + if (blobNameInXML["$"] !== undefined && blobNameInXML["#"] !== undefined) { + return { + encoded: ParseBoolean(blobNameInXML["$"]["Encoded"]), + content: blobNameInXML["#"], + }; + } + else { + return { + encoded: false, + content: blobNameInXML, + }; + } +} +function ParseBlobItem(blobInXML) { + const blobPropertiesInXML = blobInXML["Properties"]; + const blobProperties = { + createdOn: new Date(blobPropertiesInXML["Creation-Time"]), + lastModified: new Date(blobPropertiesInXML["Last-Modified"]), + etag: blobPropertiesInXML["Etag"], + contentLength: blobPropertiesInXML["Content-Length"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["Content-Length"]), + contentType: blobPropertiesInXML["Content-Type"], + contentEncoding: blobPropertiesInXML["Content-Encoding"], + contentLanguage: blobPropertiesInXML["Content-Language"], + contentMD5: decodeBase64String(blobPropertiesInXML["Content-MD5"]), + contentDisposition: blobPropertiesInXML["Content-Disposition"], + cacheControl: blobPropertiesInXML["Cache-Control"], + blobSequenceNumber: blobPropertiesInXML["x-ms-blob-sequence-number"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["x-ms-blob-sequence-number"]), + blobType: blobPropertiesInXML["BlobType"], + leaseStatus: blobPropertiesInXML["LeaseStatus"], + leaseState: blobPropertiesInXML["LeaseState"], + leaseDuration: blobPropertiesInXML["LeaseDuration"], + copyId: blobPropertiesInXML["CopyId"], + copyStatus: blobPropertiesInXML["CopyStatus"], + copySource: blobPropertiesInXML["CopySource"], + copyProgress: blobPropertiesInXML["CopyProgress"], + copyCompletedOn: blobPropertiesInXML["CopyCompletionTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["CopyCompletionTime"]), + copyStatusDescription: blobPropertiesInXML["CopyStatusDescription"], + serverEncrypted: ParseBoolean(blobPropertiesInXML["ServerEncrypted"]), + incrementalCopy: ParseBoolean(blobPropertiesInXML["IncrementalCopy"]), + destinationSnapshot: blobPropertiesInXML["DestinationSnapshot"], + deletedOn: blobPropertiesInXML["DeletedTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["DeletedTime"]), + remainingRetentionDays: blobPropertiesInXML["RemainingRetentionDays"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["RemainingRetentionDays"]), + accessTier: blobPropertiesInXML["AccessTier"], + accessTierInferred: ParseBoolean(blobPropertiesInXML["AccessTierInferred"]), + archiveStatus: blobPropertiesInXML["ArchiveStatus"], + customerProvidedKeySha256: blobPropertiesInXML["CustomerProvidedKeySha256"], + encryptionScope: blobPropertiesInXML["EncryptionScope"], + accessTierChangedOn: blobPropertiesInXML["AccessTierChangeTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["AccessTierChangeTime"]), + tagCount: blobPropertiesInXML["TagCount"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["TagCount"]), + expiresOn: blobPropertiesInXML["Expiry-Time"] === undefined + ? undefined + : new Date(blobPropertiesInXML["Expiry-Time"]), + isSealed: ParseBoolean(blobPropertiesInXML["Sealed"]), + rehydratePriority: blobPropertiesInXML["RehydratePriority"], + lastAccessedOn: blobPropertiesInXML["LastAccessTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["LastAccessTime"]), + immutabilityPolicyExpiresOn: blobPropertiesInXML["ImmutabilityPolicyUntilDate"] === undefined + ? undefined + : new Date(blobPropertiesInXML["ImmutabilityPolicyUntilDate"]), + immutabilityPolicyMode: blobPropertiesInXML["ImmutabilityPolicyMode"], + legalHold: ParseBoolean(blobPropertiesInXML["LegalHold"]), + }; + return { + name: ParseBlobName(blobInXML["Name"]), + deleted: ParseBoolean(blobInXML["Deleted"]), + snapshot: blobInXML["Snapshot"], + versionId: blobInXML["VersionId"], + isCurrentVersion: ParseBoolean(blobInXML["IsCurrentVersion"]), + properties: blobProperties, + metadata: blobInXML["Metadata"], + blobTags: ParseBlobTags(blobInXML["Tags"]), + objectReplicationMetadata: blobInXML["OrMetadata"], + hasVersionsOnly: ParseBoolean(blobInXML["HasVersionsOnly"]), + }; +} +function ParseBlobPrefix(blobPrefixInXML) { + return { + name: ParseBlobName(blobPrefixInXML["Name"]), + }; +} +function ParseBlobTag(blobTagInXML) { + return { + key: blobTagInXML["Key"], + value: blobTagInXML["Value"], + }; +} +function ParseBlobTags(blobTagsInXML) { + if (blobTagsInXML === undefined || + blobTagsInXML["TagSet"] === undefined || + blobTagsInXML["TagSet"]["Tag"] === undefined) { + return undefined; + } + const blobTagSet = []; + if (blobTagsInXML["TagSet"]["Tag"] instanceof Array) { + blobTagsInXML["TagSet"]["Tag"].forEach((blobTagInXML) => { + blobTagSet.push(ParseBlobTag(blobTagInXML)); + }); + } + else { + blobTagSet.push(ParseBlobTag(blobTagsInXML["TagSet"]["Tag"])); + } + return { blobTagSet: blobTagSet }; +} +function ProcessBlobItems(blobArrayInXML) { + const blobItems = []; + if (blobArrayInXML instanceof Array) { + blobArrayInXML.forEach((blobInXML) => { + blobItems.push(ParseBlobItem(blobInXML)); + }); + } + else { + blobItems.push(ParseBlobItem(blobArrayInXML)); + } + return blobItems; +} +function ProcessBlobPrefixes(blobPrefixesInXML) { + const blobPrefixes = []; + if (blobPrefixesInXML instanceof Array) { + blobPrefixesInXML.forEach((blobPrefixInXML) => { + blobPrefixes.push(ParseBlobPrefix(blobPrefixInXML)); + }); + } + else { + blobPrefixes.push(ParseBlobPrefix(blobPrefixesInXML)); + } + return blobPrefixes; +} // Copyright (c) Microsoft Corporation. /** @@ -28131,9 +28452,16 @@ class StorageBrowserPolicy extends coreHttp.BaseRequestPolicy { * @param request - */ async sendRequest(request) { - { + if (coreHttp.isNode) { return this._nextPolicy.sendRequest(request); } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.remove(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.remove(HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); } } @@ -28154,6 +28482,10 @@ class StorageBrowserPolicyFactory { } // Copyright (c) Microsoft Corporation. +/** + * RetryPolicy types. + */ +exports.StorageRetryPolicyType = void 0; (function (StorageRetryPolicyType) { /** * Exponential retry. Retry time delay grows exponentially. @@ -28171,7 +28503,7 @@ const DEFAULT_RETRY_OPTIONS = { retryDelayInMs: 4 * 1000, retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, secondaryHost: "", - tryTimeoutInMs: undefined // Use server side default timeout strategy + tryTimeoutInMs: undefined, // Use server side default timeout strategy }; const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); /** @@ -28208,7 +28540,7 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost - : DEFAULT_RETRY_OPTIONS.secondaryHost + : DEFAULT_RETRY_OPTIONS.secondaryHost, }; } /** @@ -28285,7 +28617,7 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { "ENOTFOUND", "TIMEOUT", "EPIPE", - "REQUEST_SEND_ERROR" // For default xhr based http client provided in ms-rest-js + "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js ]; if (err) { for (const retriableError of retriableErrors) { @@ -28471,7 +28803,7 @@ class TelemetryPolicy extends coreHttp.BaseRequestPolicy { * @param request - */ async sendRequest(request) { - { + if (coreHttp.isNode) { if (!request.headers) { request.headers = new coreHttp.HttpHeaders(); } @@ -28494,7 +28826,7 @@ class TelemetryPolicyFactory { */ constructor(telemetry) { const userAgentInfo = []; - { + if (coreHttp.isNode) { if (telemetry) { const telemetryString = telemetry.userAgentPrefix || ""; if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { @@ -28507,7 +28839,7 @@ class TelemetryPolicyFactory { userAgentInfo.push(libInfo); } // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299) - const runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`; + const runtimeInfo = `(NODE-VERSION ${process.version}; ${os__namespace.type()} ${os__namespace.release()})`; if (userAgentInfo.indexOf(runtimeInfo) === -1) { userAgentInfo.push(runtimeInfo); } @@ -28531,6 +28863,247 @@ function getCachedDefaultHttpClient() { return _defaultHttpClient; } +// Copyright (c) Microsoft Corporation. +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + }, +}; +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await coreHttp.delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * Converts: `Bearer a="b" c="d"`. + * Into: `[ { a: 'b', c: 'd' }]`. + * + * @internal + */ +function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +function storageBearerTokenChallengeAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + let getToken = createTokenCycler(credential, scopes); + class StorageBearerTokenChallengeAuthenticationPolicy extends coreHttp.BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const getTokenInternal = getToken; + const token = (await getTokenInternal({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + })).token; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + const response = await this._nextPolicy.sendRequest(webResource); + if ((response === null || response === void 0 ? void 0 : response.status) === 401) { + const challenge = getChallenge(response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope; + const parsedAuthUri = coreHttp.URLBuilder.parse(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.getPath().split("/"); + const tenantId = pathSegments[1]; + const getTokenForChallenge = createTokenCycler(credential, challengeScopes); + const tokenForChallenge = (await getTokenForChallenge({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + tenantId: tenantId, + })).token; + getToken = getTokenForChallenge; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${tokenForChallenge}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return response; + } + } + return { + create: (nextPolicy, options) => { + return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options); + }, + }; +} + // Copyright (c) Microsoft Corporation. /** * A helper to decide if a given argument satisfies the Pipeline contract @@ -28576,7 +29149,7 @@ class Pipeline { toServiceClientOptions() { return { httpClient: this.options.httpClient, - requestPolicyFactories: this.factories + requestPolicyFactories: this.factories, }; } } @@ -28588,6 +29161,7 @@ class Pipeline { * @returns A new Pipeline object. */ function newPipeline(credential, pipelineOptions = {}) { + var _a; if (credential === undefined) { credential = new AnonymousCredential(); } @@ -28609,16 +29183,16 @@ function newPipeline(credential, pipelineOptions = {}) { coreHttp.logPolicy({ logger: logger.info, allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, - allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters - }) + allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, + }), ]; - { + if (coreHttp.isNode) { // policies only available in Node.js runtime, not in browsers factories.push(coreHttp.proxyPolicy(pipelineOptions.proxyOptions)); factories.push(coreHttp.disableResponseDecompressionPolicy()); } factories.push(coreHttp.isTokenCredential(credential) - ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) + ? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential) : credential); return new Pipeline(factories, pipelineOptions); } @@ -28645,7 +29219,9 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { */ signRequest(request) { request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); - if (request.body && typeof request.body === "string" && request.body.length > 0) { + if (request.body && + (typeof request.body === "string" || request.body !== undefined) && + request.body.length > 0) { request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } const stringToSign = [ @@ -28660,7 +29236,7 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), - this.getHeaderValueToSign(request, HeaderConstants.RANGE) + this.getHeaderValueToSign(request, HeaderConstants.RANGE), ].join("\n") + "\n" + this.getCanonicalizedHeadersString(request) + @@ -28789,9 +29365,7 @@ class StorageSharedKeyCredential extends Credential { * @param stringToSign - */ computeHMACSHA256(stringToSign) { - return crypto.createHmac("sha256", this.accountKey) - .update(stringToSign, "utf8") - .digest("base64"); + return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); } } @@ -28803,8 +29377,8 @@ class StorageSharedKeyCredential extends Credential { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ const packageName = "azure-storage-blob"; -const packageVersion = "12.8.0"; -class StorageClientContext extends coreHttp.ServiceClient { +const packageVersion = "12.9.0"; +class StorageClientContext extends coreHttp__namespace.ServiceClient { /** * Initializes a new instance of the StorageClientContext class. * @param url The URL of the service account, container, or blob that is the target of the desired @@ -28820,7 +29394,7 @@ class StorageClientContext extends coreHttp.ServiceClient { options = {}; } if (!options.userAgent) { - const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + const defaultUserAgent = coreHttp__namespace.getDefaultUserAgentValue(); options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; } super(undefined, options); @@ -28829,7 +29403,7 @@ class StorageClientContext extends coreHttp.ServiceClient { // Parameter assignments this.url = url; // Assigning values to Constant parameters - this.version = options.version || "2020-10-02"; + this.version = options.version || "2021-04-10"; } } @@ -28876,7 +29450,7 @@ class StorageClient { */ const createSpan = coreTracing.createSpanFunction({ packagePrefix: "Azure.Storage.Blob", - namespace: "Microsoft.Storage" + namespace: "Microsoft.Storage", }); /** * @internal @@ -28890,7 +29464,7 @@ function convertTracingToRequestOptionsBase(options) { return { // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions, - tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext + tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext, }; } @@ -28947,6 +29521,10 @@ class BlobSASPermissions { * Specifies SetImmutabilityPolicy access granted. */ this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; } /** * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an @@ -28988,6 +29566,9 @@ class BlobSASPermissions { case "i": blobSASPermissions.setImmutabilityPolicy = true; break; + case "y": + blobSASPermissions.permanentDelete = true; + break; default: throw new RangeError(`Invalid permission: ${char}`); } @@ -29032,6 +29613,9 @@ class BlobSASPermissions { if (permissionLike.setImmutabilityPolicy) { blobSASPermissions.setImmutabilityPolicy = true; } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } return blobSASPermissions; } /** @@ -29072,6 +29656,9 @@ class BlobSASPermissions { if (this.setImmutabilityPolicy) { permissions.push("i"); } + if (this.permanentDelete) { + permissions.push("y"); + } return permissions.join(""); } } @@ -29131,6 +29718,14 @@ class ContainerSASPermissions { * Specifies SetImmutabilityPolicy access granted. */ this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + this.filterByTags = false; } /** * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an @@ -29175,6 +29770,12 @@ class ContainerSASPermissions { case "i": containerSASPermissions.setImmutabilityPolicy = true; break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; default: throw new RangeError(`Invalid permission ${char}`); } @@ -29222,6 +29823,12 @@ class ContainerSASPermissions { if (permissionLike.setImmutabilityPolicy) { containerSASPermissions.setImmutabilityPolicy = true; } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } return containerSASPermissions; } /** @@ -29267,6 +29874,12 @@ class ContainerSASPermissions { if (this.setImmutabilityPolicy) { permissions.push("i"); } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } return permissions.join(""); } } @@ -29296,9 +29909,7 @@ class UserDelegationKeyCredential { */ computeHMACSHA256(stringToSign) { // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); - return crypto.createHmac("sha256", this.key) - .update(stringToSign, "utf8") - .digest("base64"); + return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); } } @@ -29316,6 +29927,10 @@ function ipRangeToString(ipRange) { } // Copyright (c) Microsoft Corporation. +/** + * Protocols for generated SAS. + */ +exports.SASProtocol = void 0; (function (SASProtocol) { /** * Protocol that allows HTTPS only @@ -29336,7 +29951,7 @@ function ipRangeToString(ipRange) { * NOTE: Instances of this class are immutable. */ class SASQueryParameters { - constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId) { + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { this.version = version; this.signature = signature; if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { @@ -29349,6 +29964,7 @@ class SASQueryParameters { this.expiresOn = permissionsOrOptions.expiresOn; this.ipRangeInner = permissionsOrOptions.ipRange; this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; this.resource = permissionsOrOptions.resource; this.cacheControl = permissionsOrOptions.cacheControl; this.contentDisposition = permissionsOrOptions.contentDisposition; @@ -29374,6 +29990,7 @@ class SASQueryParameters { this.protocol = protocol; this.startsOn = startsOn; this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; this.identifier = identifier; this.resource = resource; this.cacheControl = cacheControl; @@ -29402,7 +30019,7 @@ class SASQueryParameters { if (this.ipRangeInner) { return { end: this.ipRangeInner.end, - start: this.ipRangeInner.start + start: this.ipRangeInner.start, }; } return undefined; @@ -29421,6 +30038,7 @@ class SASQueryParameters { "se", "sip", "si", + "ses", "skoid", "sktid", "skt", @@ -29436,7 +30054,7 @@ class SASQueryParameters { "rscl", "rsct", "saoid", - "scid" + "scid", ]; const queries = []; for (const param of params) { @@ -29465,6 +30083,9 @@ class SASQueryParameters { case "si": this.tryAppendQueryParameter(queries, param, this.identifier); break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; case "skoid": // Signed object ID this.tryAppendQueryParameter(queries, param, this.signedOid); break; @@ -29549,6 +30170,15 @@ function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredent if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); } + // Version 2020-12-06 adds support for encryptionscope in SAS. + if (version >= "2020-12-06") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + } + } // Version 2019-12-12 adds support for the blob tags permission. // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string @@ -29630,7 +30260,7 @@ function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKe blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", ].join("\n"); const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); @@ -29699,11 +30329,81 @@ function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKe blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", ].join("\n"); const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); } +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope); +} /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. @@ -29775,7 +30475,7 @@ function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userD blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType + blobSASSignatureValues.contentType, ].join("\n"); const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); @@ -29854,11 +30554,91 @@ function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userD blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType + blobSASSignatureValues.contentType, ].join("\n"); const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); } +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope); +} function getCanonicalName(accountName, containerName, blobName) { // Container: "/blob/account/containerName" // Blob: "/blob/account/containerName/blobName" @@ -29892,6 +30672,11 @@ function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { version < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + } if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.tag && version < "2019-12-12") { @@ -29902,10 +30687,18 @@ function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); } + if (version < "2021-04-10" && + blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + } if (version < "2020-02-10" && (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); } + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } blobSASSignatureValues.version = version; return blobSASSignatureValues; } @@ -29979,7 +30772,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30014,7 +30807,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30047,7 +30840,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30079,7 +30872,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30114,7 +30907,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -30194,8 +30987,7 @@ class RetriableReadableStream extends stream.Readable { }); } else { - this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this - .offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); } } else { @@ -31040,7 +31832,7 @@ class AvroReader { } async initialize(options = {}) { const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); if (!arraysEqual(header, AVRO_INIT_BYTES)) { throw new Error("Stream is not an Avro file."); @@ -31048,7 +31840,7 @@ class AvroReader { // File metadata is written as if defined by the following map schema: // { "type": "map", "values": "bytes"} this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); // Validate codec const codec = this._metadata[AVRO_CODEC_KEY]; @@ -31057,7 +31849,7 @@ class AvroReader { } // The 16-byte, randomly-generated sync marker for this file. this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); // Parse the schema const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); @@ -31066,7 +31858,7 @@ class AvroReader { this._blockOffset = this._initialBlockOffset + this._dataStream.position; } this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); // skip block length await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); @@ -31088,13 +31880,13 @@ class AvroReader { } while (this.hasNext()) { const result = yield tslib.__await(this._itemType.read(this._dataStream, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, })); this._itemsRemainingInBlock--; this._objectIndex++; if (this._itemsRemainingInBlock == 0) { const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, })); this._blockOffset = this._initialBlockOffset + this._dataStream.position; this._objectIndex = 0; @@ -31103,7 +31895,7 @@ class AvroReader { } try { this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, })); } catch (err) { @@ -31302,7 +32094,7 @@ class BlobQuickQueryStream extends stream.Readable { position, name, isFatal: fatal, - description + description, }); } break; @@ -31678,6 +32470,11 @@ class BlobQueryResponse { } // Copyright (c) Microsoft Corporation. +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +exports.BlockBlobTier = void 0; (function (BlockBlobTier) { /** * Optimized for storing data that is accessed frequently. @@ -31693,6 +32490,12 @@ class BlobQueryResponse { */ BlockBlobTier["Archive"] = "Archive"; })(exports.BlockBlobTier || (exports.BlockBlobTier = {})); +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +exports.PremiumPageBlobTier = void 0; (function (PremiumPageBlobTier) { /** * P4 Tier. @@ -31753,6 +32556,20 @@ function ensureCpkIfSpecified(cpk, isHttps) { cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; } } +/** + * Defines the known cloud audiences for Storage. + */ +exports.StorageBlobAudience = void 0; +(function (StorageBlobAudience) { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; +})(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -31765,16 +32582,16 @@ function ensureCpkIfSpecified(cpk, isHttps) { function rangeResponseFromModel(response) { const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ offset: x.start, - count: x.end - x.start + count: x.end - x.start, })); const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ offset: x.start, - count: x.end - x.start + count: x.end - x.start, })); return Object.assign(Object.assign({}, response), { pageRange, clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { pageRange, - clearRange + clearRange, } }) }); } @@ -31787,7 +32604,7 @@ function rangeResponseFromModel(response) { */ class BlobBeginCopyFromUrlPoller extends coreLro.Poller { constructor(options) { - const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions } = options; + const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; let state; if (resumeFrom) { state = JSON.parse(resumeFrom).state; @@ -31823,7 +32640,7 @@ const cancel = async function cancel(options = {}) { } // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call await state.blobClient.abortCopyFromURL(copyId, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); state.isCancelled = true; return makeBlobBeginCopyFromURLPollOperation(state); @@ -31901,7 +32718,7 @@ function makeBlobBeginCopyFromURLPollOperation(state) { state: Object.assign({}, state), cancel, toString, - update + update, }; } @@ -32543,7 +33360,7 @@ async function streamToBuffer2(stream, buffer, encoding) { */ async function readStreamToLocalFile(rs, file) { return new Promise((resolve, reject) => { - const ws = fs.createWriteStream(file); + const ws = fs__namespace.createWriteStream(file); rs.on("error", (err) => { reject(err); }); @@ -32559,8 +33376,8 @@ async function readStreamToLocalFile(rs, file) { * * Promisified version of fs.stat(). */ -const fsStat = util.promisify(fs.stat); -const fsCreateReadStream = fs.createReadStream; +const fsStat = util__namespace.promisify(fs__namespace.stat); +const fsCreateReadStream = fs__namespace.createReadStream; /** * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, @@ -32603,12 +33420,17 @@ class BlobClient extends StorageClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -32625,10 +33447,8 @@ class BlobClient extends StorageClient { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url, pipeline); - ({ - blobName: this._name, - containerName: this._containerName - } = this.getBlobAndContainerNamesFromUrl()); + ({ blobName: this._name, containerName: this._containerName } = + this.getBlobAndContainerNamesFromUrl()); this.blobContext = new Blob$1(this.storageClientContext); this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); @@ -32753,11 +33573,13 @@ class BlobClient extends StorageClient { const { span, updatedOptions } = createSpan("BlobClient-download", options); try { const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress // for Node.js, progress is reported by RetriableReadableStream + onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); // Return browser response immediately - if (false) {} + if (!coreHttp.isNode) { + return wrappedRes; + } // We support retrying when download stream unexpected ends in Node.js runtime // Following code shouldn't be bundled into browser build, however some // bundlers may try to bundle following code and "FileReadResponse.ts". @@ -32782,16 +33604,16 @@ class BlobClient extends StorageClient { ifModifiedSince: options.conditions.ifModifiedSince, ifNoneMatch: options.conditions.ifNoneMatch, ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, - ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions + ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, }, range: rangeToString({ count: offset + res.contentLength - start, - offset: start + offset: start, }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, - cpkInfo: options.customerProvidedKey + cpkInfo: options.customerProvidedKey, }; // Debug purpose only // console.log( @@ -32802,13 +33624,13 @@ class BlobClient extends StorageClient { return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; }, offset, res.contentLength, { maxRetryRequests: options.maxRetryRequests, - onProgress: options.onProgress + onProgress: options.onProgress, }); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -32833,21 +33655,23 @@ class BlobClient extends StorageClient { abortSignal: options.abortSignal, customerProvidedKey: options.customerProvidedKey, conditions: options.conditions, - tracingOptions: updatedOptions.tracingOptions + tracingOptions: updatedOptions.tracingOptions, }); return true; } catch (e) { if (e.statusCode === 404) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when checking blob existence" - }); + // Expected exception when checking blob existence return false; } + else if (e.statusCode === 409 && + e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg) { + // Expected exception when checking blob existence + return true; + } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -32879,7 +33703,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -32906,7 +33730,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -32928,20 +33752,19 @@ class BlobClient extends StorageClient { const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); try { const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a blob or snapshot only if it exists." + message: "Expected exception when deleting a blob or snapshot only if it exists.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -32965,7 +33788,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -32999,7 +33822,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33029,7 +33852,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33055,7 +33878,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33079,7 +33902,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33113,7 +33936,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33197,7 +34020,7 @@ class BlobClient extends StorageClient { const client = { abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), getProperties: (...args) => this.getProperties(...args), - startCopyFromURL: (...args) => this.startCopyFromURL(...args) + startCopyFromURL: (...args) => this.startCopyFromURL(...args), }; const poller = new BlobBeginCopyFromUrlPoller({ blobClient: client, @@ -33205,7 +34028,7 @@ class BlobClient extends StorageClient { intervalInMs: options.intervalInMs, onProgress: options.onProgress, resumeFrom: options.resumeFrom, - startCopyFromURLOptions: options + startCopyFromURLOptions: options, }); // Trigger the startCopyFromURL call by calling poll. // Any errors from this method should be surfaced to the user. @@ -33228,7 +34051,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33254,13 +34077,13 @@ class BlobClient extends StorageClient { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince - }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold }, convertTracingToRequestOptionsBase(updatedOptions))); + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33288,7 +34111,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33365,7 +34188,7 @@ class BlobClient extends StorageClient { conditions: options.conditions, maxRetryRequests: options.maxRetryRequestsPerBlock, customerProvidedKey: options.customerProvidedKey, - tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) + tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)), }); const stream = response.readableStreamBody; await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); @@ -33384,7 +34207,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33422,7 +34245,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33503,13 +34326,13 @@ class BlobClient extends StorageClient { sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - sourceIfTags: options.sourceConditions.tagConditions + sourceIfTags: options.sourceConditions.tagConditions, }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33550,7 +34373,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33571,7 +34394,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33592,7 +34415,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33643,12 +34466,17 @@ class AppendBlobClient extends BlobClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -33703,7 +34531,7 @@ class AppendBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33724,20 +34552,19 @@ class AppendBlobClient extends BlobClient { const conditions = { ifNoneMatch: ETagAny }; try { const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist." + message: "Expected exception when creating a blob only if it does not already exist.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33760,7 +34587,7 @@ class AppendBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33799,13 +34626,13 @@ class AppendBlobClient extends BlobClient { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return await this.appendBlobContext.appendBlock(contentLength, body, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress + onUploadProgress: options.onProgress, }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33838,13 +34665,13 @@ class AppendBlobClient extends BlobClient { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -33895,12 +34722,17 @@ class BlockBlobClient extends BlobClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -33966,23 +34798,25 @@ class BlockBlobClient extends BlobClient { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); try { - if (false) {} + if (!coreHttp.isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { queryType: "SQL", expression: query, inputSerialization: toQuerySerialization(options.inputTextConfiguration), - outputSerialization: toQuerySerialization(options.outputTextConfiguration) + outputSerialization: toQuerySerialization(options.outputTextConfiguration), }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); return new BlobQueryResponse(response, { abortSignal: options.abortSignal, onProgress: options.onProgress, - onError: options.onError + onError: options.onError, }); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34024,13 +34858,13 @@ class BlockBlobClient extends BlobClient { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return await this.blockBlobContext.upload(contentLength, body, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress + onUploadProgress: options.onProgress, }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34067,13 +34901,13 @@ class BlockBlobClient extends BlobClient { sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, - sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions + sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions, }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }), convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34097,13 +34931,13 @@ class BlockBlobClient extends BlobClient { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return await this.blockBlobContext.stageBlock(blockId, contentLength, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { - onUploadProgress: options.onProgress + onUploadProgress: options.onProgress, }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34141,7 +34975,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34172,7 +35006,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34206,7 +35040,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34233,7 +35067,7 @@ class BlockBlobClient extends BlobClient { async uploadData(data, options = {}) { const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); try { - if (true) { + if (coreHttp.isNode) { let buffer; if (data instanceof Buffer) { buffer = data; @@ -34247,12 +35081,15 @@ class BlockBlobClient extends BlobClient { } return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); } - else {} + else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34288,7 +35125,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34301,7 +35138,7 @@ class BlockBlobClient extends BlobClient { * Uploads data to block blob. Requires a bodyFactory as the data source, * which need to return a {@link HttpRequestBody} object with the offset and size provided. * - * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} * to commit the block list. @@ -34367,14 +35204,14 @@ class BlockBlobClient extends BlobClient { abortSignal: options.abortSignal, conditions: options.conditions, encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions + tracingOptions: updatedOptions.tracingOptions, }); // Update progress after block is successfully uploaded to server, in case of block trying // TODO: Hook with convenience layer progress event in finer level transferProgress += contentLength; if (options.onProgress) { options.onProgress({ - loadedBytes: transferProgress + loadedBytes: transferProgress, }); } }); @@ -34385,7 +35222,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34414,14 +35251,14 @@ class BlockBlobClient extends BlobClient { return () => fsCreateReadStream(filePath, { autoClose: true, end: count ? offset + count - 1 : Infinity, - start: offset + start: offset, }); }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34465,7 +35302,7 @@ class BlockBlobClient extends BlobClient { await this.stageBlock(blockID, body, length, { conditions: options.conditions, encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions + tracingOptions: updatedOptions.tracingOptions, }); // Update progress after block is successfully uploaded to server, in case of block trying transferProgress += length; @@ -34484,7 +35321,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34535,12 +35372,17 @@ class PageBlobClient extends BlobClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -34590,7 +35432,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34613,20 +35455,19 @@ class PageBlobClient extends BlobClient { try { const conditions = { ifNoneMatch: ETagAny }; const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist." + message: "Expected exception when creating a blob only if it does not already exist.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34651,13 +35492,13 @@ class PageBlobClient extends BlobClient { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return await this.pageBlobContext.uploadPages(count, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress + onUploadProgress: options.onProgress, }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34687,13 +35528,13 @@ class PageBlobClient extends BlobClient { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34720,7 +35561,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34749,7 +35590,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34779,7 +35620,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34809,7 +35650,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34835,7 +35676,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34862,7 +35703,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34892,7 +35733,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35040,7 +35881,7 @@ class BatchResponseParser { return { subResponses: deserializedSubResponses, subResponsesSucceededCount: subResponsesSucceededCount, - subResponsesFailedCount: subResponsesFailedCount + subResponsesFailedCount: subResponsesFailedCount, }; } } @@ -35187,7 +36028,7 @@ class BlobBatch { this.setBatchType("delete"); await this.addSubRequestInternal({ url: url, - credential: credential + credential: credential, }, async () => { await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); }); @@ -35195,7 +36036,7 @@ class BlobBatch { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35234,7 +36075,7 @@ class BlobBatch { this.setBatchType("setAccessTier"); await this.addSubRequestInternal({ url: url, - credential: credential + credential: credential, }, async () => { await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); }); @@ -35242,7 +36083,7 @@ class BlobBatch { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35299,7 +36140,7 @@ class InnerBatchRequest { this.subRequestPrefix, `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, "", - `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}` // sub request start line with method + `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method ].join(HTTP_LINE_ENDING); for (const header of request.headers.headersArray()) { this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; @@ -35339,7 +36180,7 @@ class BatchRequestAssemblePolicy extends coreHttp.BaseRequestPolicy { this.dummyResponse = { request: new coreHttp.WebResource(), status: 200, - headers: new coreHttp.HttpHeaders() + headers: new coreHttp.HttpHeaders(), }; this.batchRequest = batchRequest; } @@ -35506,14 +36347,14 @@ class BlobBatchClient { version: rawBatchResponse.version, subResponses: responseSummary.subResponses, subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, - subResponsesFailedCount: responseSummary.subResponsesFailedCount + subResponsesFailedCount: responseSummary.subResponsesFailedCount, }; return res; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35559,12 +36400,17 @@ class ContainerClient extends StorageClient { const containerName = credentialOrPipelineOrContainerName; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -35616,7 +36462,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35636,20 +36482,19 @@ class ContainerClient extends StorageClient { const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); try { const res = await this.create(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a container only if it does not already exist." + message: "Expected exception when creating a container only if it does not already exist.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35671,7 +36516,7 @@ class ContainerClient extends StorageClient { try { await this.getProperties({ abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions + tracingOptions: updatedOptions.tracingOptions, }); return true; } @@ -35679,13 +36524,13 @@ class ContainerClient extends StorageClient { if (e.statusCode === 404) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when checking container existence" + message: "Expected exception when checking container existence", }); return false; } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35759,7 +36604,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35785,7 +36630,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35805,20 +36650,19 @@ class ContainerClient extends StorageClient { const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); try { const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a container only if it exists." + message: "Expected exception when deleting a container only if it exists.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35852,7 +36696,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35888,13 +36732,13 @@ class ContainerClient extends StorageClient { requestId: response.requestId, clientRequestId: response.clientRequestId, signedIdentifiers: [], - version: response.version + version: response.version, }; for (const identifier of response) { let accessPolicy = undefined; if (identifier.accessPolicy) { accessPolicy = { - permissions: identifier.accessPolicy.permissions + permissions: identifier.accessPolicy.permissions, }; if (identifier.accessPolicy.expiresOn) { accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); @@ -35905,7 +36749,7 @@ class ContainerClient extends StorageClient { } res.signedIdentifiers.push({ accessPolicy, - id: identifier.id + id: identifier.id, }); } return res; @@ -35913,7 +36757,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35952,9 +36796,9 @@ class ContainerClient extends StorageClient { permissions: identifier.accessPolicy.permissions, startsOn: identifier.accessPolicy.startsOn ? truncatedISO8061Date(identifier.accessPolicy.startsOn) - : "" + : "", }, - id: identifier.id + id: identifier.id, }); } return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); @@ -35962,7 +36806,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -36008,13 +36852,13 @@ class ContainerClient extends StorageClient { const response = await blockBlobClient.upload(body, contentLength, updatedOptions); return { blockBlobClient, - response + response, }; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -36045,7 +36889,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -36067,8 +36911,12 @@ class ContainerClient extends StorageClient { const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); try { const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); return blobItem; }) }) }); return wrappedResponse; @@ -36076,7 +36924,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -36096,19 +36944,33 @@ class ContainerClient extends StorageClient { * @param options - Options to Container List Blob Hierarchy Segment operation. */ async listBlobHierarchySegment(delimiter, marker, options = {}) { + var _a; const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); try { const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + response.segment.blobPrefixes = []; + if (response.segment["BlobPrefix"] !== undefined) { + response.segment.blobPrefixes = ProcessBlobPrefixes(response.segment["BlobPrefix"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); return blobItem; + }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; }) }) }); return wrappedResponse; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -36290,7 +37152,7 @@ class ContainerClient extends StorageClient { */ byPage: (settings = {}) => { return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); - } + }, }; } /** @@ -36364,7 +37226,7 @@ class ContainerClient extends StorageClient { * if (item.kind === "prefix") { * console.log(`\tBlobPrefix: ${item.name}`); * } else { - * console.log(`\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`); + * console.log(`\tBlobItem: name - ${item.name}`); * } * } * ``` @@ -36379,7 +37241,7 @@ class ContainerClient extends StorageClient { * if (item.kind === "prefix") { * console.log(`\tBlobPrefix: ${item.name}`); * } else { - * console.log(`\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`); + * console.log(`\tBlobItem: name - ${item.name}`); * } * entity = await iter.next(); * } @@ -36397,7 +37259,7 @@ class ContainerClient extends StorageClient { * } * } * for (const blob of response.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`); + * console.log(`\tBlobItem: name - ${blob.name}`); * } * } * ``` @@ -36408,7 +37270,9 @@ class ContainerClient extends StorageClient { * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); * * let i = 1; - * for await (const response of containerClient.listBlobsByHierarchy("/", { prefix: "prefix2/sub1/"}).byPage({ maxPageSize: 2 })) { + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { * console.log(`Page ${i++}`); * const segment = response.segment; * @@ -36419,7 +37283,7 @@ class ContainerClient extends StorageClient { * } * * for (const blob of response.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`); + * console.log(`\tBlobItem: name - ${blob.name}`); * } * } * ``` @@ -36486,7 +37350,208 @@ class ContainerClient extends StorageClient { */ byPage: (settings = {}) => { return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); + try { + const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); + } while (marker); } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_3, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } + catch (e_3_1) { e_3 = { error: e_3_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_3) throw e_3.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, }; } getContainerNameFromUrl() { @@ -36619,6 +37684,10 @@ class AccountSASPermissions { * Permission to set immutability policy. */ this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; } /** * Parse initializes the AccountSASPermissions fields from a string. @@ -36665,6 +37734,9 @@ class AccountSASPermissions { case "i": accountSASPermissions.setImmutabilityPolicy = true; break; + case "y": + accountSASPermissions.permanentDelete = true; + break; default: throw new RangeError(`Invalid permission character: ${c}`); } @@ -36715,6 +37787,9 @@ class AccountSASPermissions { if (permissionLike.setImmutabilityPolicy) { accountSASPermissions.setImmutabilityPolicy = true; } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; + } return accountSASPermissions; } /** @@ -36768,6 +37843,9 @@ class AccountSASPermissions { if (this.setImmutabilityPolicy) { permissions.push("i"); } + if (this.permanentDelete) { + permissions.push("y"); + } return permissions.join(""); } } @@ -36950,6 +38028,11 @@ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyC version < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version < "2019-12-12") { @@ -36960,25 +38043,48 @@ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyC version < "2019-12-12") { throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); } + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); - const stringToSign = [ - sharedKeyCredential.accountName, - parsedPermissions, - parsedServices, - parsedResourceTypes, - accountSASSignatureValues.startsOn - ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) - : "", - truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", - accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version, - "" // Account SAS requires an additional newline character - ].join("\n"); + let stringToSign; + if (version >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "", // Account SAS requires an additional newline character + ].join("\n"); + } + else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + "", // Account SAS requires an additional newline character + ].join("\n"); + } const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange); + return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope); } /** @@ -37025,12 +38131,17 @@ class BlobServiceClient extends StorageClient { options = options || {}; const extractedCreds = extractConnectionStringParts(connectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } const pipeline = newPipeline(sharedKeyCredential, options); return new BlobServiceClient(extractedCreds.url, pipeline); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { const pipeline = newPipeline(new AnonymousCredential(), options); @@ -37069,13 +38180,13 @@ class BlobServiceClient extends StorageClient { const containerCreateResponse = await containerClient.create(updatedOptions); return { containerClient, - containerCreateResponse + containerCreateResponse, }; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37099,7 +38210,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37129,7 +38240,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37159,7 +38270,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37183,7 +38294,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37208,7 +38319,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37233,7 +38344,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37259,7 +38370,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37289,7 +38400,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37332,7 +38443,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37498,7 +38609,7 @@ class BlobServiceClient extends StorageClient { */ byPage: (settings = {}) => { return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - } + }, }; } /** @@ -37636,6 +38747,9 @@ class BlobServiceClient extends StorageClient { if (options.includeMetadata) { include.push("metadata"); } + if (options.includeSystem) { + include.push("system"); + } // AsyncIterableIterator to iterate over containers const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); const iter = this.listItems(listSegmentOptions); @@ -37657,7 +38771,7 @@ class BlobServiceClient extends StorageClient { */ byPage: (settings = {}) => { return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - } + }, }; } /** @@ -37676,7 +38790,7 @@ class BlobServiceClient extends StorageClient { try { const response = await this.serviceContext.getUserDelegationKey({ startsOn: truncatedISO8061Date(startsOn, false), - expiresOn: truncatedISO8061Date(expiresOn, false) + expiresOn: truncatedISO8061Date(expiresOn, false), }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); const userDelegationKey = { signedObjectId: response.signedObjectId, @@ -37685,7 +38799,7 @@ class BlobServiceClient extends StorageClient { signedExpiresOn: new Date(response.signedExpiresOn), signedService: response.signedService, signedVersion: response.signedVersion, - value: response.value + value: response.value, }; const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); return res; @@ -37693,7 +38807,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37742,39 +38856,27 @@ class BlobServiceClient extends StorageClient { Object.defineProperty(exports, 'BaseRequestPolicy', { enumerable: true, - get: function () { - return coreHttp.BaseRequestPolicy; - } + get: function () { return coreHttp.BaseRequestPolicy; } }); Object.defineProperty(exports, 'HttpHeaders', { enumerable: true, - get: function () { - return coreHttp.HttpHeaders; - } + get: function () { return coreHttp.HttpHeaders; } }); Object.defineProperty(exports, 'RequestPolicyOptions', { enumerable: true, - get: function () { - return coreHttp.RequestPolicyOptions; - } + get: function () { return coreHttp.RequestPolicyOptions; } }); Object.defineProperty(exports, 'RestError', { enumerable: true, - get: function () { - return coreHttp.RestError; - } + get: function () { return coreHttp.RestError; } }); Object.defineProperty(exports, 'WebResource', { enumerable: true, - get: function () { - return coreHttp.WebResource; - } + get: function () { return coreHttp.WebResource; } }); Object.defineProperty(exports, 'deserializationPolicy', { enumerable: true, - get: function () { - return coreHttp.deserializationPolicy; - } + get: function () { return coreHttp.deserializationPolicy; } }); exports.AccountSASPermissions = AccountSASPermissions; exports.AccountSASResourceTypes = AccountSASResourceTypes; @@ -38440,14 +39542,15 @@ var DiagAPI = /** @class */ (function () { function DiagAPI() { function _logProxy(funcName) { return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } var logger = global_utils_1.getGlobal('diag'); // shortcut if logger not set if (!logger) return; - return logger[funcName].apply(logger, - // work around Function.prototype.apply types - // eslint-disable-next-line @typescript-eslint/no-explicit-any - arguments); + return logger[funcName].apply(logger, args); }; } // Using self local variable for minification purposes as 'this' cannot be minified @@ -42558,7 +43661,7 @@ function authenticationBeforeRequest(state, options) { XMLStringifier = __webpack_require__(602); - XMLStringWriter = __webpack_require__(231); + XMLStringWriter = __webpack_require__(750); WriterState = __webpack_require__(541); @@ -43737,7 +44840,7 @@ exports.default = _default; * POSSIBILITY OF SUCH DAMAGE. */ -const psl = __webpack_require__(750); +const psl = __webpack_require__(632); function getPublicSuffix(domain) { return psl.get(domain); @@ -45567,7 +46670,7 @@ function hasPreviousPage (link) { XMLStringifier = __webpack_require__(602); - XMLStringWriter = __webpack_require__(231); + XMLStringWriter = __webpack_require__(750); module.exports = XMLDocument = (function(superClass) { extend(XMLDocument, superClass); @@ -47473,7 +48576,282 @@ exports.wrapSpanContext = wrapSpanContext; module.exports = require("net"); /***/ }), -/* 632 */, +/* 632 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; +/*eslint no-var:0, prefer-arrow-callback: 0, object-shorthand: 0 */ + + + +var Punycode = __webpack_require__(815); + + +var internals = {}; + + +// +// Read rules from file. +// +internals.rules = __webpack_require__(50).map(function (rule) { + + return { + rule: rule, + suffix: rule.replace(/^(\*\.|\!)/, ''), + punySuffix: -1, + wildcard: rule.charAt(0) === '*', + exception: rule.charAt(0) === '!' + }; +}); + + +// +// Check is given string ends with `suffix`. +// +internals.endsWith = function (str, suffix) { + + return str.indexOf(suffix, str.length - suffix.length) !== -1; +}; + + +// +// Find rule for a given domain. +// +internals.findRule = function (domain) { + + var punyDomain = Punycode.toASCII(domain); + return internals.rules.reduce(function (memo, rule) { + + if (rule.punySuffix === -1){ + rule.punySuffix = Punycode.toASCII(rule.suffix); + } + if (!internals.endsWith(punyDomain, '.' + rule.punySuffix) && punyDomain !== rule.punySuffix) { + return memo; + } + // This has been commented out as it never seems to run. This is because + // sub tlds always appear after their parents and we never find a shorter + // match. + //if (memo) { + // var memoSuffix = Punycode.toASCII(memo.suffix); + // if (memoSuffix.length >= punySuffix.length) { + // return memo; + // } + //} + return rule; + }, null); +}; + + +// +// Error codes and messages. +// +exports.errorCodes = { + DOMAIN_TOO_SHORT: 'Domain name too short.', + DOMAIN_TOO_LONG: 'Domain name too long. It should be no more than 255 chars.', + LABEL_STARTS_WITH_DASH: 'Domain name label can not start with a dash.', + LABEL_ENDS_WITH_DASH: 'Domain name label can not end with a dash.', + LABEL_TOO_LONG: 'Domain name label should be at most 63 chars long.', + LABEL_TOO_SHORT: 'Domain name label should be at least 1 character long.', + LABEL_INVALID_CHARS: 'Domain name label can only contain alphanumeric characters or dashes.' +}; + + +// +// Validate domain name and throw if not valid. +// +// From wikipedia: +// +// Hostnames are composed of series of labels concatenated with dots, as are all +// domain names. Each label must be between 1 and 63 characters long, and the +// entire hostname (including the delimiting dots) has a maximum of 255 chars. +// +// Allowed chars: +// +// * `a-z` +// * `0-9` +// * `-` but not as a starting or ending character +// * `.` as a separator for the textual portions of a domain name +// +// * http://en.wikipedia.org/wiki/Domain_name +// * http://en.wikipedia.org/wiki/Hostname +// +internals.validate = function (input) { + + // Before we can validate we need to take care of IDNs with unicode chars. + var ascii = Punycode.toASCII(input); + + if (ascii.length < 1) { + return 'DOMAIN_TOO_SHORT'; + } + if (ascii.length > 255) { + return 'DOMAIN_TOO_LONG'; + } + + // Check each part's length and allowed chars. + var labels = ascii.split('.'); + var label; + + for (var i = 0; i < labels.length; ++i) { + label = labels[i]; + if (!label.length) { + return 'LABEL_TOO_SHORT'; + } + if (label.length > 63) { + return 'LABEL_TOO_LONG'; + } + if (label.charAt(0) === '-') { + return 'LABEL_STARTS_WITH_DASH'; + } + if (label.charAt(label.length - 1) === '-') { + return 'LABEL_ENDS_WITH_DASH'; + } + if (!/^[a-z0-9\-]+$/.test(label)) { + return 'LABEL_INVALID_CHARS'; + } + } +}; + + +// +// Public API +// + + +// +// Parse domain. +// +exports.parse = function (input) { + + if (typeof input !== 'string') { + throw new TypeError('Domain name must be a string.'); + } + + // Force domain to lowercase. + var domain = input.slice(0).toLowerCase(); + + // Handle FQDN. + // TODO: Simply remove trailing dot? + if (domain.charAt(domain.length - 1) === '.') { + domain = domain.slice(0, domain.length - 1); + } + + // Validate and sanitise input. + var error = internals.validate(domain); + if (error) { + return { + input: input, + error: { + message: exports.errorCodes[error], + code: error + } + }; + } + + var parsed = { + input: input, + tld: null, + sld: null, + domain: null, + subdomain: null, + listed: false + }; + + var domainParts = domain.split('.'); + + // Non-Internet TLD + if (domainParts[domainParts.length - 1] === 'local') { + return parsed; + } + + var handlePunycode = function () { + + if (!/xn--/.test(domain)) { + return parsed; + } + if (parsed.domain) { + parsed.domain = Punycode.toASCII(parsed.domain); + } + if (parsed.subdomain) { + parsed.subdomain = Punycode.toASCII(parsed.subdomain); + } + return parsed; + }; + + var rule = internals.findRule(domain); + + // Unlisted tld. + if (!rule) { + if (domainParts.length < 2) { + return parsed; + } + parsed.tld = domainParts.pop(); + parsed.sld = domainParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); + if (domainParts.length) { + parsed.subdomain = domainParts.pop(); + } + return handlePunycode(); + } + + // At this point we know the public suffix is listed. + parsed.listed = true; + + var tldParts = rule.suffix.split('.'); + var privateParts = domainParts.slice(0, domainParts.length - tldParts.length); + + if (rule.exception) { + privateParts.push(tldParts.shift()); + } + + parsed.tld = tldParts.join('.'); + + if (!privateParts.length) { + return handlePunycode(); + } + + if (rule.wildcard) { + tldParts.unshift(privateParts.pop()); + parsed.tld = tldParts.join('.'); + } + + if (!privateParts.length) { + return handlePunycode(); + } + + parsed.sld = privateParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); + + if (privateParts.length) { + parsed.subdomain = privateParts.join('.'); + } + + return handlePunycode(); +}; + + +// +// Get domain. +// +exports.get = function (domain) { + + if (!domain) { + return null; + } + return exports.parse(domain).domain || null; +}; + + +// +// Check whether domain belongs to a known public suffix. +// +exports.isValid = function (domain) { + + var parsed = exports.parse(domain); + return Boolean(parsed.domain && parsed.listed); +}; + + +/***/ }), /* 633 */, /* 634 */, /* 635 */, @@ -47611,18 +48989,12 @@ exports.restoreCache = restoreCache; * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails */ function saveCache(paths, key, options) { + var _a, _b, _c, _d, _e; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); checkKey(key); const compressionMethod = yield utils.getCompressionMethod(); - core.debug('Reserving Cache'); - const cacheId = yield cacheHttpClient.reserveCache(key, paths, { - compressionMethod - }); - if (cacheId === -1) { - throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); - } - core.debug(`Cache ID: ${cacheId}`); + let cacheId = null; const cachePaths = yield utils.resolvePaths(paths); core.debug('Cache Paths:'); core.debug(`${JSON.stringify(cachePaths)}`); @@ -47637,9 +49009,24 @@ function saveCache(paths, key, options) { const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.debug(`File Size: ${archiveFileSize}`); - if (archiveFileSize > fileSizeLimit) { + // For GHES, this check will take place in ReserveCache API with enterprise file size limit + if (archiveFileSize > fileSizeLimit && !utils.isGhes()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } + core.debug('Reserving Cache'); + const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { + compressionMethod, + cacheSize: archiveFileSize + }); + if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { + cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; + } + else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { + throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); + } + else { + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); + } core.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, options); } @@ -53020,7 +54407,7 @@ module.exports = {"activity":{"checkStarringRepo":{"method":"GET","params":{"own Object.defineProperty(exports, '__esModule', { value: true }); -__webpack_require__(338); +__webpack_require__(97); var tslib = __webpack_require__(671); // Copyright (c) Microsoft Corporation. @@ -53043,7 +54430,7 @@ function getPagedAsyncIterator(pagedResult) { }, byPage: (_a = pagedResult === null || pagedResult === void 0 ? void 0 : pagedResult.byPage) !== null && _a !== void 0 ? _a : ((settings) => { return getPageAsyncIterator(pagedResult, settings === null || settings === void 0 ? void 0 : settings.maxPageSize); - }) + }), }; } function getItemAsyncIterator(pagedResult, maxPageSize) { @@ -53820,278 +55207,43 @@ function writeRegistryToFile(registryUrl, fileLocation, alwaysAuth) { /***/ }), /* 750 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ (function(module, __unusedexports, __webpack_require__) { -"use strict"; -/*eslint no-var:0, prefer-arrow-callback: 0, object-shorthand: 0 */ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLStringWriter, XMLWriterBase, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + XMLWriterBase = __webpack_require__(423); + module.exports = XMLStringWriter = (function(superClass) { + extend(XMLStringWriter, superClass); -var Punycode = __webpack_require__(815); - - -var internals = {}; - - -// -// Read rules from file. -// -internals.rules = __webpack_require__(50).map(function (rule) { - - return { - rule: rule, - suffix: rule.replace(/^(\*\.|\!)/, ''), - punySuffix: -1, - wildcard: rule.charAt(0) === '*', - exception: rule.charAt(0) === '!' - }; -}); - - -// -// Check is given string ends with `suffix`. -// -internals.endsWith = function (str, suffix) { - - return str.indexOf(suffix, str.length - suffix.length) !== -1; -}; - - -// -// Find rule for a given domain. -// -internals.findRule = function (domain) { - - var punyDomain = Punycode.toASCII(domain); - return internals.rules.reduce(function (memo, rule) { - - if (rule.punySuffix === -1){ - rule.punySuffix = Punycode.toASCII(rule.suffix); + function XMLStringWriter(options) { + XMLStringWriter.__super__.constructor.call(this, options); } - if (!internals.endsWith(punyDomain, '.' + rule.punySuffix) && punyDomain !== rule.punySuffix) { - return memo; - } - // This has been commented out as it never seems to run. This is because - // sub tlds always appear after their parents and we never find a shorter - // match. - //if (memo) { - // var memoSuffix = Punycode.toASCII(memo.suffix); - // if (memoSuffix.length >= punySuffix.length) { - // return memo; - // } - //} - return rule; - }, null); -}; - -// -// Error codes and messages. -// -exports.errorCodes = { - DOMAIN_TOO_SHORT: 'Domain name too short.', - DOMAIN_TOO_LONG: 'Domain name too long. It should be no more than 255 chars.', - LABEL_STARTS_WITH_DASH: 'Domain name label can not start with a dash.', - LABEL_ENDS_WITH_DASH: 'Domain name label can not end with a dash.', - LABEL_TOO_LONG: 'Domain name label should be at most 63 chars long.', - LABEL_TOO_SHORT: 'Domain name label should be at least 1 character long.', - LABEL_INVALID_CHARS: 'Domain name label can only contain alphanumeric characters or dashes.' -}; - - -// -// Validate domain name and throw if not valid. -// -// From wikipedia: -// -// Hostnames are composed of series of labels concatenated with dots, as are all -// domain names. Each label must be between 1 and 63 characters long, and the -// entire hostname (including the delimiting dots) has a maximum of 255 chars. -// -// Allowed chars: -// -// * `a-z` -// * `0-9` -// * `-` but not as a starting or ending character -// * `.` as a separator for the textual portions of a domain name -// -// * http://en.wikipedia.org/wiki/Domain_name -// * http://en.wikipedia.org/wiki/Hostname -// -internals.validate = function (input) { - - // Before we can validate we need to take care of IDNs with unicode chars. - var ascii = Punycode.toASCII(input); - - if (ascii.length < 1) { - return 'DOMAIN_TOO_SHORT'; - } - if (ascii.length > 255) { - return 'DOMAIN_TOO_LONG'; - } - - // Check each part's length and allowed chars. - var labels = ascii.split('.'); - var label; - - for (var i = 0; i < labels.length; ++i) { - label = labels[i]; - if (!label.length) { - return 'LABEL_TOO_SHORT'; - } - if (label.length > 63) { - return 'LABEL_TOO_LONG'; - } - if (label.charAt(0) === '-') { - return 'LABEL_STARTS_WITH_DASH'; - } - if (label.charAt(label.length - 1) === '-') { - return 'LABEL_ENDS_WITH_DASH'; - } - if (!/^[a-z0-9\-]+$/.test(label)) { - return 'LABEL_INVALID_CHARS'; - } - } -}; - - -// -// Public API -// - - -// -// Parse domain. -// -exports.parse = function (input) { - - if (typeof input !== 'string') { - throw new TypeError('Domain name must be a string.'); - } - - // Force domain to lowercase. - var domain = input.slice(0).toLowerCase(); - - // Handle FQDN. - // TODO: Simply remove trailing dot? - if (domain.charAt(domain.length - 1) === '.') { - domain = domain.slice(0, domain.length - 1); - } - - // Validate and sanitise input. - var error = internals.validate(domain); - if (error) { - return { - input: input, - error: { - message: exports.errorCodes[error], - code: error + XMLStringWriter.prototype.document = function(doc, options) { + var child, i, len, r, ref; + options = this.filterOptions(options); + r = ''; + ref = doc.children; + for (i = 0, len = ref.length; i < len; i++) { + child = ref[i]; + r += this.writeChildNode(child, options, 0); } + if (options.pretty && r.slice(-options.newline.length) === options.newline) { + r = r.slice(0, -options.newline.length); + } + return r; }; - } - var parsed = { - input: input, - tld: null, - sld: null, - domain: null, - subdomain: null, - listed: false - }; + return XMLStringWriter; - var domainParts = domain.split('.'); + })(XMLWriterBase); - // Non-Internet TLD - if (domainParts[domainParts.length - 1] === 'local') { - return parsed; - } - - var handlePunycode = function () { - - if (!/xn--/.test(domain)) { - return parsed; - } - if (parsed.domain) { - parsed.domain = Punycode.toASCII(parsed.domain); - } - if (parsed.subdomain) { - parsed.subdomain = Punycode.toASCII(parsed.subdomain); - } - return parsed; - }; - - var rule = internals.findRule(domain); - - // Unlisted tld. - if (!rule) { - if (domainParts.length < 2) { - return parsed; - } - parsed.tld = domainParts.pop(); - parsed.sld = domainParts.pop(); - parsed.domain = [parsed.sld, parsed.tld].join('.'); - if (domainParts.length) { - parsed.subdomain = domainParts.pop(); - } - return handlePunycode(); - } - - // At this point we know the public suffix is listed. - parsed.listed = true; - - var tldParts = rule.suffix.split('.'); - var privateParts = domainParts.slice(0, domainParts.length - tldParts.length); - - if (rule.exception) { - privateParts.push(tldParts.shift()); - } - - parsed.tld = tldParts.join('.'); - - if (!privateParts.length) { - return handlePunycode(); - } - - if (rule.wildcard) { - tldParts.unshift(privateParts.pop()); - parsed.tld = tldParts.join('.'); - } - - if (!privateParts.length) { - return handlePunycode(); - } - - parsed.sld = privateParts.pop(); - parsed.domain = [parsed.sld, parsed.tld].join('.'); - - if (privateParts.length) { - parsed.subdomain = privateParts.join('.'); - } - - return handlePunycode(); -}; - - -// -// Get domain. -// -exports.get = function (domain) { - - if (!domain) { - return null; - } - return exports.parse(domain).domain || null; -}; - - -// -// Check whether domain belongs to a known public suffix. -// -exports.isValid = function (domain) { - - var parsed = exports.parse(domain); - return Boolean(parsed.domain && parsed.listed); -}; +}).call(this); /***/ }), @@ -56450,7 +57602,7 @@ module.exports = v4; Object.defineProperty(exports, "__esModule", { value: true }); exports.VERSION = void 0; // this is autogenerated file, see scripts/version-update.js -exports.VERSION = '1.0.3'; +exports.VERSION = '1.0.4'; //# sourceMappingURL=version.js.map /***/ }), @@ -59984,7 +61136,6 @@ class Poller { }); } /** - * @internal * Starts a loop that will break only if the poller is done * or if the poller is stopped. */ @@ -59998,7 +61149,6 @@ class Poller { } } /** - * @internal * pollOnce does one polling, by calling to the update method of the underlying * poll operation to make any relevant change effective. * @@ -60011,7 +61161,7 @@ class Poller { if (!this.isDone()) { this.operation = await this.operation.update({ abortSignal: options.abortSignal, - fireProgress: this.fireProgress.bind(this) + fireProgress: this.fireProgress.bind(this), }); if (this.isDone() && this.resolve) { // If the poller has finished polling, this means we now have a result. @@ -60032,7 +61182,6 @@ class Poller { } } /** - * @internal * fireProgress calls the functions passed in via onProgress the method of the poller. * * It loops over all of the callbacks received from onProgress, and executes them, sending them @@ -60046,7 +61195,6 @@ class Poller { } } /** - * @internal * Invokes the underlying operation's cancel method, and rejects the * pollUntilDone promise. */ @@ -60210,13 +61358,6 @@ class Poller { } } -// Copyright (c) Microsoft Corporation. -/** - * The `@azure/logger` configuration for this package. - * @internal - */ -const logger = logger$1.createClientLogger("core-lro"); - // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** @@ -60227,7 +61368,7 @@ const logger = logger$1.createClientLogger("core-lro"); */ function getPollingUrl(rawResponse, defaultPath) { var _a, _b, _c; - return ((_c = (_b = (_a = getAzureAsyncOperation(rawResponse)) !== null && _a !== void 0 ? _a : getLocation(rawResponse)) !== null && _b !== void 0 ? _b : getOperationLocation(rawResponse)) !== null && _c !== void 0 ? _c : defaultPath); + return ((_c = (_b = (_a = getAzureAsyncOperation(rawResponse)) !== null && _a !== void 0 ? _a : getOperationLocation(rawResponse)) !== null && _b !== void 0 ? _b : getLocation(rawResponse)) !== null && _c !== void 0 ? _c : defaultPath); } function getLocation(rawResponse) { return rawResponse.headers["location"]; @@ -60238,26 +61379,36 @@ function getOperationLocation(rawResponse) { function getAzureAsyncOperation(rawResponse) { return rawResponse.headers["azure-asyncoperation"]; } -function inferLroMode(requestPath, requestMethod, rawResponse) { - if (getAzureAsyncOperation(rawResponse) !== undefined) { - return { - mode: "AzureAsync", - resourceLocation: requestMethod === "PUT" - ? requestPath - : requestMethod === "POST" - ? getLocation(rawResponse) - : undefined - }; +function findResourceLocation(requestMethod, rawResponse, requestPath) { + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "POST": + case "PATCH": { + return getLocation(rawResponse); + } + default: { + return undefined; + } } - else if (getLocation(rawResponse) !== undefined || +} +function inferLroMode(requestPath, requestMethod, rawResponse) { + if (getAzureAsyncOperation(rawResponse) !== undefined || getOperationLocation(rawResponse) !== undefined) { return { - mode: "Location" + mode: "Location", + resourceLocation: findResourceLocation(requestMethod, rawResponse, requestPath), + }; + } + else if (getLocation(rawResponse) !== undefined) { + return { + mode: "Location", }; } else if (["PUT", "PATCH"].includes(requestMethod)) { return { - mode: "Body" + mode: "Body", }; } return {}; @@ -60290,50 +61441,6 @@ function isUnexpectedPollingResponse(rawResponse) { const successStates = ["succeeded"]; const failureStates = ["failed", "canceled", "cancelled"]; -// Copyright (c) Microsoft Corporation. -function getResponseStatus(rawResponse) { - var _a; - const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - return typeof status === "string" ? status.toLowerCase() : "succeeded"; -} -function isAzureAsyncPollingDone(rawResponse) { - const state = getResponseStatus(rawResponse); - if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { - throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); - } - return successStates.includes(state); -} -/** - * Sends a request to the URI of the provisioned resource if needed. - */ -async function sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig) { - switch (lroResourceLocationConfig) { - case "original-uri": - return lro.sendPollRequest(lro.requestPath); - case "azure-async-operation": - return undefined; - case "location": - default: - return lro.sendPollRequest(resourceLocation !== null && resourceLocation !== void 0 ? resourceLocation : lro.requestPath); - } -} -function processAzureAsyncOperationResult(lro, resourceLocation, lroResourceLocationConfig) { - return (response) => { - if (isAzureAsyncPollingDone(response.rawResponse)) { - if (resourceLocation === undefined) { - return Object.assign(Object.assign({}, response), { done: true }); - } - else { - return Object.assign(Object.assign({}, response), { done: false, next: async () => { - const finalResponse = await sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig); - return Object.assign(Object.assign({}, (finalResponse !== null && finalResponse !== void 0 ? finalResponse : response)), { done: true }); - } }); - } - } - return Object.assign(Object.assign({}, response), { done: false }); - }; -} - // Copyright (c) Microsoft Corporation. function getProvisioningState(rawResponse) { var _a, _b; @@ -60357,11 +61464,54 @@ function processBodyPollingOperationResult(response) { } // Copyright (c) Microsoft Corporation. -function isLocationPollingDone(rawResponse) { - return !isUnexpectedPollingResponse(rawResponse) && rawResponse.statusCode !== 202; +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +const logger = logger$1.createClientLogger("core-lro"); + +// Copyright (c) Microsoft Corporation. +function isPollingDone(rawResponse) { + var _a; + if (isUnexpectedPollingResponse(rawResponse) || rawResponse.statusCode === 202) { + return false; + } + const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const state = typeof status === "string" ? status.toLowerCase() : "succeeded"; + if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { + throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); + } + return successStates.includes(state); } -function processLocationPollingOperationResult(response) { - return Object.assign(Object.assign({}, response), { done: isLocationPollingDone(response.rawResponse) }); +/** + * Sends a request to the URI of the provisioned resource if needed. + */ +async function sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig) { + switch (lroResourceLocationConfig) { + case "original-uri": + return lro.sendPollRequest(lro.requestPath); + case "azure-async-operation": + return undefined; + case "location": + default: + return lro.sendPollRequest(resourceLocation !== null && resourceLocation !== void 0 ? resourceLocation : lro.requestPath); + } +} +function processLocationPollingOperationResult(lro, resourceLocation, lroResourceLocationConfig) { + return (response) => { + if (isPollingDone(response.rawResponse)) { + if (resourceLocation === undefined) { + return Object.assign(Object.assign({}, response), { done: true }); + } + else { + return Object.assign(Object.assign({}, response), { done: false, next: async () => { + const finalResponse = await sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig); + return Object.assign(Object.assign({}, (finalResponse !== null && finalResponse !== void 0 ? finalResponse : response)), { done: true }); + } }); + } + } + return Object.assign(Object.assign({}, response), { done: false }); + }; } // Copyright (c) Microsoft Corporation. @@ -60376,11 +61526,8 @@ function processPassthroughOperationResult(response) { */ function createGetLroStatusFromResponse(lroPrimitives, config, lroResourceLocationConfig) { switch (config.mode) { - case "AzureAsync": { - return processAzureAsyncOperationResult(lroPrimitives, config.resourceLocation, lroResourceLocationConfig); - } case "Location": { - return processLocationPollingOperationResult; + return processLocationPollingOperationResult(lroPrimitives, config.resourceLocation, lroResourceLocationConfig); } case "Body": { return processBodyPollingOperationResult; @@ -60398,10 +61545,11 @@ function createPoll(lroPrimitives) { const response = await lroPrimitives.sendPollRequest(path); const retryAfter = response.rawResponse.headers["retry-after"]; if (retryAfter !== undefined) { - const retryAfterInMs = parseInt(retryAfter); - pollerConfig.intervalInMs = isNaN(retryAfterInMs) + // Retry-After header value is either in HTTP date format, or in seconds + const retryAfterInSeconds = parseInt(retryAfter); + pollerConfig.intervalInMs = isNaN(retryAfterInSeconds) ? calculatePollingIntervalFromDate(new Date(retryAfter), pollerConfig.intervalInMs) - : retryAfterInMs; + : retryAfterInSeconds * 1000; } return getLroStatusFromResponse(response); }; @@ -60524,7 +61672,7 @@ class GenericPollOperation { */ toString() { return JSON.stringify({ - state: this.state + state: this.state, }); } } @@ -64940,26 +66088,53 @@ exports.isCompatible = _makeCompatibilityCheck(version_1.VERSION); Object.defineProperty(exports, '__esModule', { value: true }); -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - var uuid = __webpack_require__(585); -var tough = __webpack_require__(701); +var util = __webpack_require__(669); +var tslib = __webpack_require__(865); +var xml2js = __webpack_require__(992); +var abortController = __webpack_require__(106); +var logger$1 = __webpack_require__(928); +var coreAuth = __webpack_require__(229); +var os = __webpack_require__(87); var http = __webpack_require__(605); var https = __webpack_require__(211); -var node_fetch = _interopDefault(__webpack_require__(454)); -var abortController = __webpack_require__(106); -var FormData = _interopDefault(__webpack_require__(790)); -var util = __webpack_require__(669); -var url = __webpack_require__(835); -var stream = __webpack_require__(794); -var logger$1 = __webpack_require__(928); +var tough = __webpack_require__(701); var tunnel = __webpack_require__(413); -var tslib = __webpack_require__(865); -var coreAuth = __webpack_require__(229); -var xml2js = __webpack_require__(992); -var os = __webpack_require__(87); +var stream = __webpack_require__(794); +var FormData = __webpack_require__(790); +var node_fetch = __webpack_require__(454); var coreTracing = __webpack_require__(263); -__webpack_require__(338); +var url = __webpack_require__(835); +__webpack_require__(97); + +function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } + +function _interopNamespace(e) { + if (e && e.__esModule) return e; + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n["default"] = e; + return Object.freeze(n); +} + +var xml2js__namespace = /*#__PURE__*/_interopNamespace(xml2js); +var os__namespace = /*#__PURE__*/_interopNamespace(os); +var http__namespace = /*#__PURE__*/_interopNamespace(http); +var https__namespace = /*#__PURE__*/_interopNamespace(https); +var tough__namespace = /*#__PURE__*/_interopNamespace(tough); +var tunnel__namespace = /*#__PURE__*/_interopNamespace(tunnel); +var FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData); +var node_fetch__default = /*#__PURE__*/_interopDefaultLegacy(node_fetch); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -65008,7 +66183,7 @@ class HttpHeaders { set(headerName, headerValue) { this._headersMap[getHeaderKey(headerName)] = { name: headerName, - value: headerValue.toString() + value: headerValue.toString(), }; } /** @@ -65040,12 +66215,7 @@ class HttpHeaders { * Get the headers that are contained this collection as an object. */ rawHeaders() { - const result = {}; - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - result[header.name.toLowerCase()] = header.value; - } - return result; + return this.toJson({ preserveCase: true }); } /** * Get the headers that are contained in this collection as an array. @@ -65082,14 +66252,27 @@ class HttpHeaders { /** * Get the JSON object representation of this HTTP header collection. */ - toJson() { - return this.rawHeaders(); + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } + } + else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } + } + return result; } /** * Get the string representation of this HTTP header collection. */ toString() { - return JSON.stringify(this.toJson()); + return JSON.stringify(this.toJson({ preserveCase: true })); } /** * Create a deep clone/copy of this HttpHeaders collection. @@ -65133,11 +66316,14 @@ function decodeString(value) { // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +/** + * A set of constants used internally when processing requests. + */ const Constants = { /** * The core-http version */ - coreHttpVersion: "2.2.2", + coreHttpVersion: "2.2.4", /** * Specifies HTTP. */ @@ -65173,12 +66359,12 @@ const Constants = { POST: "POST", MERGE: "MERGE", HEAD: "HEAD", - PATCH: "PATCH" + PATCH: "PATCH", }, StatusCodes: { TooManyRequests: 429, - ServiceUnavailable: 503 - } + ServiceUnavailable: 503, + }, }, /** * Defines constants for use with HTTP headers. @@ -65198,8 +66384,8 @@ const Constants = { /** * The UserAgent header. */ - USER_AGENT: "User-Agent" - } + USER_AGENT: "User-Agent", + }, }; // Copyright (c) Microsoft Corporation. @@ -65414,18 +66600,38 @@ function isObject(input) { } // Copyright (c) Microsoft Corporation. +// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications. +/** + * Used to map raw response objects to final shapes. + * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. + * Also allows pulling values from headers, as well as inserting default values and constants. + */ class Serializer { - constructor(modelMappers = {}, isXML) { + constructor( + /** + * The provided model mapper. + */ + modelMappers = {}, + /** + * Whether the contents are XML or not. + */ + isXML) { this.modelMappers = modelMappers; this.isXML = isXML; } + /** + * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. + * @param mapper - The definition of data models. + * @param value - The value. + * @param objectName - Name of the object. Used in the error messages. + */ validateConstraints(mapper, value, objectName) { const failValidation = (constraintName, constraintValue) => { throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); }; if (mapper.constraints && value != undefined) { const valueAsNumber = value; - const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems } = mapper.constraints; + const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) { failValidation("ExclusiveMaximum", ExclusiveMaximum); } @@ -65467,20 +66673,20 @@ class Serializer { } } /** - * Serialize the given object based on its metadata defined in the mapper + * Serialize the given object based on its metadata defined in the mapper. * - * @param mapper - The mapper which defines the metadata of the serializable object - * @param object - A valid Javascript object to be serialized - * @param objectName - Name of the serialized object - * @param options - additional options to deserialization - * @returns A valid serialized Javascript object + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param object - A valid Javascript object to be serialized. + * @param objectName - Name of the serialized object. + * @param options - additional options to deserialization. + * @returns A valid serialized Javascript object. */ serialize(mapper, object, objectName, options = {}) { var _a, _b, _c; const updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, }; let payload = {}; const mapperType = mapper.type.name; @@ -65550,20 +66756,20 @@ class Serializer { return payload; } /** - * Deserialize the given object based on its metadata defined in the mapper + * Deserialize the given object based on its metadata defined in the mapper. * - * @param mapper - The mapper which defines the metadata of the serializable object - * @param responseBody - A valid Javascript entity to be deserialized - * @param objectName - Name of the deserialized object + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param responseBody - A valid Javascript entity to be deserialized. + * @param objectName - Name of the deserialized object. * @param options - Controls behavior of XML parser and builder. - * @returns A valid deserialized Javascript object + * @returns A valid deserialized Javascript object. */ deserialize(mapper, responseBody, objectName, options = {}) { var _a, _b, _c; const updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, }; if (responseBody == undefined) { if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { @@ -65662,9 +66868,7 @@ function bufferToBase64Url(buffer) { // Uint8Array to Base64. const str = encodeByteArray(buffer); // Base64 to Base64Url. - return trimEnd(str, "=") - .replace(/\+/g, "-") - .replace(/\//g, "_"); + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); } function base64UrlToByteArray(str) { if (!str) { @@ -65880,10 +67084,10 @@ function serializeDictionaryType(serializer, mapper, object, objectName, isXml, return tempDictionary; } /** - * Resolves the additionalProperties property from a referenced mapper - * @param serializer - The serializer containing the entire set of mappers - * @param mapper - The composite mapper to resolve - * @param objectName - Name of the object being serialized + * Resolves the additionalProperties property from a referenced mapper. + * @param serializer - The serializer containing the entire set of mappers. + * @param mapper - The composite mapper to resolve. + * @param objectName - Name of the object being serialized. */ function resolveAdditionalProperties(serializer, mapper, objectName) { const additionalProperties = mapper.type.additionalProperties; @@ -65894,7 +67098,7 @@ function resolveAdditionalProperties(serializer, mapper, objectName) { return additionalProperties; } /** - * Finds the mapper referenced by className + * Finds the mapper referenced by `className`. * @param serializer - The serializer containing the entire set of mappers * @param mapper - The composite mapper to resolve * @param objectName - Name of the object being serialized @@ -66233,7 +67437,9 @@ function getPolymorphicDiscriminatorSafely(serializer, typeName) { serializer.modelMappers[typeName] && serializer.modelMappers[typeName].type.polymorphicDiscriminator); } -// TODO: why is this here? +/** + * Utility function that serializes an object that might contain binary information into a plain object, array or a string. + */ function serializeObject(toSerialize) { const castToSerialize = toSerialize; if (toSerialize == undefined) @@ -66271,6 +67477,9 @@ function strEnum(o) { } return result; } +/** + * String enum containing the string types of property mappers. + */ // eslint-disable-next-line @typescript-eslint/no-redeclare const MapperType = strEnum([ "Base64Url", @@ -66288,7 +67497,7 @@ const MapperType = strEnum([ "String", "Stream", "TimeSpan", - "UnixTime" + "UnixTime", ]); // Copyright (c) Microsoft Corporation. @@ -66551,9 +67760,6 @@ class WebResource { } } -// Copyright (c) Microsoft Corporation. -const custom = util.inspect.custom; - // Copyright (c) Microsoft Corporation. /** * A class that handles the query portion of a URLBuilder. @@ -66851,6 +68057,10 @@ class URLBuilder { } } } + /** + * Serializes the URL as a string. + * @returns the URL as a string. + */ toString() { let result = ""; if (this._scheme) { @@ -66886,6 +68096,9 @@ class URLBuilder { this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); } } + /** + * Parses a given string URL into a new {@link URLBuilder}. + */ static parse(text) { const result = new URLBuilder(); result.set(text, "SCHEME_OR_HOST"); @@ -67142,6 +68355,60 @@ function nextQuery(tokenizer) { tokenizer._currentState = "DONE"; } +// Copyright (c) Microsoft Corporation. +function createProxyAgent(requestUrl, proxySettings, headers) { + const host = URLBuilder.parse(proxySettings.host).getHost(); + if (!host) { + throw new Error("Expecting a non-empty host in proxy settings."); + } + if (!isValidPort(proxySettings.port)) { + throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); + } + const tunnelOptions = { + proxy: { + host: host, + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; + } + else if (proxySettings.username) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; + } + const isRequestHttps = isUrlHttps(requestUrl); + const isProxyHttps = isUrlHttps(proxySettings.host); + const proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + return proxyAgent; +} +function isUrlHttps(url) { + const urlScheme = URLBuilder.parse(url).getScheme() || ""; + return urlScheme.toLowerCase() === "https"; +} +function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { + if (isRequestHttps && isProxyHttps) { + return tunnel__namespace.httpsOverHttps(tunnelOptions); + } + else if (isRequestHttps && !isProxyHttps) { + return tunnel__namespace.httpsOverHttp(tunnelOptions); + } + else if (!isRequestHttps && isProxyHttps) { + return tunnel__namespace.httpOverHttps(tunnelOptions); + } + else { + return tunnel__namespace.httpOverHttp(tunnelOptions); + } +} +function isValidPort(port) { + // any port in 0-65535 range is valid (RFC 793) even though almost all implementations + // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports + return 0 <= port && port <= 65535; +} + // Copyright (c) Microsoft Corporation. const RedactedString = "REDACTED"; const defaultAllowedHeaderNames = [ @@ -67182,7 +68449,7 @@ const defaultAllowedHeaderNames = [ "Retry-After", "Server", "Transfer-Encoding", - "User-Agent" + "User-Agent", ]; const defaultAllowedQueryParameters = ["api-version"]; class Sanitizer { @@ -67275,8 +68542,14 @@ class Sanitizer { } } +// Copyright (c) Microsoft Corporation. +const custom = util.inspect.custom; + // Copyright (c) Microsoft Corporation. const errorSanitizer = new Sanitizer(); +/** + * An error resulting from an HTTP request to a service endpoint. + */ class RestError extends Error { constructor(message, code, statusCode, request, response) { super(message); @@ -67294,13 +68567,22 @@ class RestError extends Error { return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; } } +/** + * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) + */ RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +/** + * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. + */ RestError.PARSE_ERROR = "PARSE_ERROR"; // Copyright (c) Microsoft Corporation. const logger = logger$1.createClientLogger("core-http"); // Copyright (c) Microsoft Corporation. +function getCachedAgent(isHttps, agentCache) { + return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; +} class ReportTransform extends stream.Transform { constructor(progressCallback) { super(); @@ -67314,7 +68596,44 @@ class ReportTransform extends stream.Transform { callback(undefined); } } -class FetchHttpClient { +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream, aborter) { + return new Promise((resolve) => { + stream.once("close", () => { + aborter === null || aborter === void 0 ? void 0 : aborter.abort(); + resolve(); + }); + stream.once("end", resolve); + stream.once("error", resolve); + }); +} +/** + * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} + */ +function parseHeaders(headers) { + const httpHeaders = new HttpHeaders(); + headers.forEach((value, key) => { + httpHeaders.set(key, value); + }); + return httpHeaders; +} +/** + * An HTTP client that uses `node-fetch`. + */ +class NodeFetchHttpClient { + constructor() { + // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent + this.proxyAgentMap = new Map(); + this.keepAliveAgents = {}; + this.cookieJar = new tough__namespace.CookieJar(undefined, { looseMode: true }); + } + /** + * Provides minimum viable error handling and the logic that executes the abstract methods. + * @param httpRequest - Object representing the outgoing HTTP request. + * @returns An object representing the incoming HTTP response. + */ async sendRequest(httpRequest) { var _a; if (!httpRequest && typeof httpRequest !== "object") { @@ -67340,7 +68659,7 @@ class FetchHttpClient { } if (httpRequest.formData) { const formData = httpRequest.formData; - const requestForm = new FormData(); + const requestForm = new FormData__default["default"](); const appendFormValue = (key, value) => { // value function probably returns a stream so we can provide a fresh stream on each retry if (typeof value === "function") { @@ -67410,7 +68729,7 @@ class FetchHttpClient { readableStreamBody: streaming ? response.body : undefined, - bodyAsText: !streaming ? await response.text() : undefined + bodyAsText: !streaming ? await response.text() : undefined, }; const onDownloadProgress = httpRequest.onDownloadProgress; if (onDownloadProgress) { @@ -67464,94 +68783,6 @@ class FetchHttpClient { } } } -} -function isReadableStream(body) { - return body && typeof body.pipe === "function"; -} -function isStreamComplete(stream, aborter) { - return new Promise((resolve) => { - stream.once("close", () => { - aborter === null || aborter === void 0 ? void 0 : aborter.abort(); - resolve(); - }); - stream.once("end", resolve); - stream.once("error", resolve); - }); -} -function parseHeaders(headers) { - const httpHeaders = new HttpHeaders(); - headers.forEach((value, key) => { - httpHeaders.set(key, value); - }); - return httpHeaders; -} - -// Copyright (c) Microsoft Corporation. -function createProxyAgent(requestUrl, proxySettings, headers) { - const host = URLBuilder.parse(proxySettings.host).getHost(); - if (!host) { - throw new Error("Expecting a non-empty host in proxy settings."); - } - if (!isValidPort(proxySettings.port)) { - throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); - } - const tunnelOptions = { - proxy: { - host: host, - port: proxySettings.port, - headers: (headers && headers.rawHeaders()) || {} - } - }; - if (proxySettings.username && proxySettings.password) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; - } - else if (proxySettings.username) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; - } - const isRequestHttps = isUrlHttps(requestUrl); - const isProxyHttps = isUrlHttps(proxySettings.host); - const proxyAgent = { - isHttps: isRequestHttps, - agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) - }; - return proxyAgent; -} -function isUrlHttps(url) { - const urlScheme = URLBuilder.parse(url).getScheme() || ""; - return urlScheme.toLowerCase() === "https"; -} -function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { - if (isRequestHttps && isProxyHttps) { - return tunnel.httpsOverHttps(tunnelOptions); - } - else if (isRequestHttps && !isProxyHttps) { - return tunnel.httpsOverHttp(tunnelOptions); - } - else if (!isRequestHttps && isProxyHttps) { - return tunnel.httpOverHttps(tunnelOptions); - } - else { - return tunnel.httpOverHttp(tunnelOptions); - } -} -function isValidPort(port) { - // any port in 0-65535 range is valid (RFC 793) even though almost all implementations - // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports - return 0 <= port && port <= 65535; -} - -// Copyright (c) Microsoft Corporation. -function getCachedAgent(isHttps, agentCache) { - return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; -} -class NodeFetchHttpClient extends FetchHttpClient { - constructor() { - super(...arguments); - // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent - this.proxyAgentMap = new Map(); - this.keepAliveAgents = {}; - this.cookieJar = new tough.CookieJar(undefined, { looseMode: true }); - } getOrCreateAgent(httpRequest) { var _a; const isHttps = isUrlHttps(httpRequest.url); @@ -67583,24 +68814,30 @@ class NodeFetchHttpClient extends FetchHttpClient { return agent; } const agentOptions = { - keepAlive: httpRequest.keepAlive + keepAlive: httpRequest.keepAlive, }; if (isHttps) { - agent = this.keepAliveAgents.httpsAgent = new https.Agent(agentOptions); + agent = this.keepAliveAgents.httpsAgent = new https__namespace.Agent(agentOptions); } else { - agent = this.keepAliveAgents.httpAgent = new http.Agent(agentOptions); + agent = this.keepAliveAgents.httpAgent = new http__namespace.Agent(agentOptions); } return agent; } else { - return isHttps ? https.globalAgent : http.globalAgent; + return isHttps ? https__namespace.globalAgent : http__namespace.globalAgent; } } + /** + * Uses `node-fetch` to perform the request. + */ // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs async fetch(input, init) { - return node_fetch(input, init); + return node_fetch__default["default"](input, init); } + /** + * Prepares a request based on the provided web resource. + */ async prepareRequest(httpRequest) { const requestInit = {}; if (this.cookieJar && !httpRequest.headers.get("Cookie")) { @@ -67621,6 +68858,9 @@ class NodeFetchHttpClient extends FetchHttpClient { requestInit.compress = httpRequest.decompressResponse; return requestInit; } + /** + * Process an HTTP response. Handles persisting a cookie for subsequent requests if the response has a "Set-Cookie" header. + */ async processRequest(operationResponse) { if (this.cookieJar) { const setCookieHeader = operationResponse.headers.get("Set-Cookie"); @@ -67641,6 +68881,11 @@ class NodeFetchHttpClient extends FetchHttpClient { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +exports.HttpPipelineLogLevel = void 0; (function (HttpPipelineLogLevel) { /** * A log level that indicates that no logs will be logged. @@ -67660,6 +68905,7 @@ class NodeFetchHttpClient extends FetchHttpClient { HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; })(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {})); +// Copyright (c) Microsoft Corporation. /** * Converts an OperationOptions to a RequestOptionsBase * @@ -67681,8 +68927,22 @@ function operationOptionsToRequestOptionsBase(opts) { } // Copyright (c) Microsoft Corporation. +/** + * The base class from which all request policies derive. + */ class BaseRequestPolicy { - constructor(_nextPolicy, _options) { + /** + * The main method to implement that manipulates a request/response. + */ + constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options) { this._nextPolicy = _nextPolicy; this._options = _options; } @@ -67734,113 +68994,6 @@ class RequestPolicyOptions { } } -// Copyright (c) Microsoft Corporation. -function logPolicy(loggingOptions = {}) { - return { - create: (nextPolicy, options) => { - return new LogPolicy(nextPolicy, options, loggingOptions); - } - }; -} -class LogPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, { logger: logger$1 = logger.info, allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { - super(nextPolicy, options); - this.logger = logger$1; - this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - get allowedHeaderNames() { - return this.sanitizer.allowedHeaderNames; - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - set allowedHeaderNames(allowedHeaderNames) { - this.sanitizer.allowedHeaderNames = allowedHeaderNames; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - get allowedQueryParameters() { - return this.sanitizer.allowedQueryParameters; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - set allowedQueryParameters(allowedQueryParameters) { - this.sanitizer.allowedQueryParameters = allowedQueryParameters; - } - sendRequest(request) { - if (!this.logger.enabled) - return this._nextPolicy.sendRequest(request); - this.logRequest(request); - return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); - } - logRequest(request) { - this.logger(`Request: ${this.sanitizer.sanitize(request)}`); - } - logResponse(response) { - this.logger(`Response status code: ${response.status}`); - this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); - return response; - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Get the path to this parameter's value as a dotted string (a.b.c). - * @param parameter - The parameter to get the path string for. - * @returns The path to this parameter's value as a dotted string. - */ -function getPathStringFromParameter(parameter) { - return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); -} -function getPathStringFromParameterPath(parameterPath, mapper) { - let result; - if (typeof parameterPath === "string") { - result = parameterPath; - } - else if (Array.isArray(parameterPath)) { - result = parameterPath.join("."); - } - else { - result = mapper.serializedName; - } - return result; -} - -// Copyright (c) Microsoft Corporation. -/** - * Gets the list of status codes for streaming responses. - * @internal - */ -function getStreamResponseStatusCodes(operationSpec) { - const result = new Set(); - for (const statusCode in operationSpec.responses) { - const operationResponse = operationSpec.responses[statusCode]; - if (operationResponse.bodyMapper && - operationResponse.bodyMapper.type.name === MapperType.Stream) { - result.add(Number(statusCode)); - } - } - return result; -} - // Copyright (c) Microsoft Corporation. // Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed // by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536 @@ -67873,18 +69026,18 @@ const xml2jsDefaultOptionsV2 = { xmldec: { version: "1.0", encoding: "UTF-8", - standalone: true + standalone: true, }, doctype: undefined, renderOpts: { pretty: true, indent: " ", - newline: "\n" + newline: "\n", }, headless: false, chunkSize: 10000, emptyTag: "", - cdata: false + cdata: false, }; // The xml2js settings for general XML parsing operations. const xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); @@ -67893,7 +69046,7 @@ xml2jsParserSettings.explicitArray = false; const xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); xml2jsBuilderSettings.explicitArray = false; xml2jsBuilderSettings.renderOpts = { - pretty: false + pretty: false, }; /** * Converts given JSON object to XML string @@ -67904,7 +69057,7 @@ function stringifyXML(obj, opts = {}) { var _a; xml2jsBuilderSettings.rootName = opts.rootName; xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const builder = new xml2js.Builder(xml2jsBuilderSettings); + const builder = new xml2js__namespace.Builder(xml2jsBuilderSettings); return builder.buildObject(obj); } /** @@ -67916,7 +69069,7 @@ function parseXML(str, opts = {}) { var _a; xml2jsParserSettings.explicitRoot = !!opts.includeRoot; xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const xmlParser = new xml2js.Parser(xml2jsParserSettings); + const xmlParser = new xml2js__namespace.Parser(xml2jsParserSettings); return new Promise((resolve, reject) => { if (!str) { reject(new Error("Document is empty")); @@ -67943,7 +69096,7 @@ function deserializationPolicy(deserializationContentTypes, parsingOptions) { return { create: (nextPolicy, options) => { return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions); - } + }, }; } const defaultJsonContentTypes = ["application/json", "text/json"]; @@ -67951,8 +69104,8 @@ const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; const DefaultDeserializationOptions = { expectedContentTypes: { json: defaultJsonContentTypes, - xml: defaultXmlContentTypes - } + xml: defaultXmlContentTypes, + }, }; /** * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the @@ -67970,7 +69123,7 @@ class DeserializationPolicy extends BaseRequestPolicy { } async sendRequest(request) { return this._nextPolicy.sendRequest(request).then((response) => deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, { - xmlCharKey: this.xmlCharKey + xmlCharKey: this.xmlCharKey, })); } } @@ -68003,12 +69156,20 @@ function shouldDeserializeResponse(parsedResponse) { } return result; } +/** + * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. + * @param jsonContentTypes - Response content types to parse the body as JSON. + * @param xmlContentTypes - Response content types to parse the body as XML. + * @param response - HTTP Response from the pipeline. + * @param options - Options to the serializer, mostly for configuring the XML parser if needed. + * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. + */ function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options = {}) { var _a, _b, _c; const updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, }; return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { if (!shouldDeserializeResponse(parsedResponse)) { @@ -68159,6 +69320,113 @@ function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { return Promise.resolve(operationResponse); } +// Copyright (c) Microsoft Corporation. +/** + * By default, HTTP connections are maintained for future requests. + */ +const DefaultKeepAliveOptions = { + enable: true, +}; +/** + * Creates a policy that controls whether HTTP connections are maintained on future requests. + * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. + * @returns An instance of the {@link KeepAlivePolicy} + */ +function keepAlivePolicy(keepAliveOptions) { + return { + create: (nextPolicy, options) => { + return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); + }, + }; +} +/** + * KeepAlivePolicy is a policy used to control keep alive settings for every request. + */ +class KeepAlivePolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy - + * @param options - + * @param keepAliveOptions - + */ + constructor(nextPolicy, options, keepAliveOptions) { + super(nextPolicy, options); + this.keepAliveOptions = keepAliveOptions; + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.keepAlive = this.keepAliveOptions.enable; + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Methods that are allowed to follow redirects 301 and 302 + */ +const allowedRedirect = ["GET", "HEAD"]; +const DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; +/** + * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + * @param maximumRetries - Maximum number of redirects to follow. + * @returns An instance of the {@link RedirectPolicy} + */ +function redirectPolicy(maximumRetries = 20) { + return { + create: (nextPolicy, options) => { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} +/** + * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + */ +class RedirectPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, maxRetries = 20) { + super(nextPolicy, options); + this.maxRetries = maxRetries; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request) + .then((response) => handleRedirect(this, response, 0)); + } +} +function handleRedirect(policy, response, currentRetries) { + const { request, status } = response; + const locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && allowedRedirect.includes(request.method)) || + (status === 302 && allowedRedirect.includes(request.method)) || + (status === 303 && request.method === "POST") || + status === 307) && + (!policy.maxRetries || currentRetries < policy.maxRetries)) { + const builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + if (status === 303) { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then((res) => handleRedirect(policy, res, currentRetries + 1)); + } + return Promise.resolve(response); +} + // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. const DEFAULT_CLIENT_RETRY_COUNT = 3; @@ -68222,7 +69490,7 @@ function isDefined(thing) { } // Copyright (c) Microsoft Corporation. -const StandardAbortMessage = "The operation was aborted."; +const StandardAbortMessage$1 = "The operation was aborted."; /** * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. * @param delayInMs - The number of milliseconds to be delayed. @@ -68237,7 +69505,7 @@ function delay(delayInMs, value, options) { let timer = undefined; let onAborted = undefined; const rejectOnAbort = () => { - return reject(new abortController.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage)); + return reject(new abortController.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage$1)); }; const removeListeners = () => { if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { @@ -68265,20 +69533,34 @@ function delay(delayInMs, value, options) { } // Copyright (c) Microsoft Corporation. +/** + * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. + * @param retryCount - Maximum number of retries. + * @param retryInterval - Base time between retries. + * @param maxRetryInterval - Maximum time to wait between retries. + */ function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { return { create: (nextPolicy, options) => { return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval); - } + }, }; } +/** + * Describes the Retry Mode type. Currently supporting only Exponential. + */ +exports.RetryMode = void 0; (function (RetryMode) { + /** + * Currently supported retry mode. + * Each time a retry happens, it will take exponentially more time than the last time. + */ RetryMode[RetryMode["Exponential"] = 0] = "Exponential"; })(exports.RetryMode || (exports.RetryMode = {})); const DefaultRetryOptions = { maxRetries: DEFAULT_CLIENT_RETRY_COUNT, retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL, - maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL + maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL, }; /** * Instantiates a new "ExponentialRetryPolicyFilter" instance. @@ -68303,11 +69585,11 @@ class ExponentialRetryPolicy extends BaseRequestPolicy { sendRequest(request) { return this._nextPolicy .sendRequest(request.clone()) - .then((response) => retry(this, request, response)) - .catch((error) => retry(this, request, error.response, undefined, error)); + .then((response) => retry$1(this, request, response)) + .catch((error) => retry$1(this, request, error.response, undefined, error)); } } -async function retry(policy, request, response, retryData, requestError) { +async function retry$1(policy, request, response, retryData, requestError) { function shouldPolicyRetry(responseParam) { const statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; if (statusCode === 503 && (response === null || response === void 0 ? void 0 : response.headers.get(Constants.HeaderConstants.RETRY_AFTER))) { @@ -68324,7 +69606,7 @@ async function retry(policy, request, response, retryData, requestError) { retryData = updateRetryData({ retryInterval: policy.retryInterval, minRetryInterval: 0, - maxRetryInterval: policy.maxRetryInterval + maxRetryInterval: policy.maxRetryInterval, }, retryData, requestError); const isAborted = request.abortSignal && request.abortSignal.aborted; if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) { @@ -68332,10 +69614,10 @@ async function retry(policy, request, response, retryData, requestError) { try { await delay(retryData.retryInterval); const res = await policy._nextPolicy.sendRequest(request.clone()); - return retry(policy, request, res, retryData); + return retry$1(policy, request, res, retryData); } catch (err) { - return retry(policy, request, response, retryData, err); + return retry$1(policy, request, response, retryData, err); } } else if (isAborted || requestError || !response) { @@ -68350,11 +69632,467 @@ async function retry(policy, request, response, retryData, requestError) { } // Copyright (c) Microsoft Corporation. +/** + * Creates a policy that logs information about the outgoing request and the incoming responses. + * @param loggingOptions - Logging options. + * @returns An instance of the {@link LogPolicy} + */ +function logPolicy(loggingOptions = {}) { + return { + create: (nextPolicy, options) => { + return new LogPolicy(nextPolicy, options, loggingOptions); + }, + }; +} +/** + * A policy that logs information about the outgoing request and the incoming responses. + */ +class LogPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, { logger: logger$1 = logger.info, allowedHeaderNames = [], allowedQueryParameters = [], } = {}) { + super(nextPolicy, options); + this.logger = logger$1; + this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + get allowedHeaderNames() { + return this.sanitizer.allowedHeaderNames; + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + set allowedHeaderNames(allowedHeaderNames) { + this.sanitizer.allowedHeaderNames = allowedHeaderNames; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + get allowedQueryParameters() { + return this.sanitizer.allowedQueryParameters; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + set allowedQueryParameters(allowedQueryParameters) { + this.sanitizer.allowedQueryParameters = allowedQueryParameters; + } + sendRequest(request) { + if (!this.logger.enabled) + return this._nextPolicy.sendRequest(request); + this.logRequest(request); + return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); + } + logRequest(request) { + this.logger(`Request: ${this.sanitizer.sanitize(request)}`); + } + logResponse(response) { + this.logger(`Response status code: ${response.status}`); + this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); + return response; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} +function getPathStringFromParameterPath(parameterPath, mapper) { + let result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} + +// Copyright (c) Microsoft Corporation. +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +function getStreamResponseStatusCodes(operationSpec) { + const result = new Set(); + for (const statusCode in operationSpec.responses) { + const operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result.add(Number(statusCode)); + } + } + return result; +} + +// Copyright (c) Microsoft Corporation. +function getDefaultUserAgentKey() { + return Constants.HeaderConstants.USER_AGENT; +} +function getPlatformSpecificData() { + const runtimeInfo = { + key: "Node", + value: process.version, + }; + const osInfo = { + key: "OS", + value: `(${os__namespace.arch()}-${os__namespace.type()}-${os__namespace.release()})`, + }; + return [runtimeInfo, osInfo]; +} + +// Copyright (c) Microsoft Corporation. +function getRuntimeInfo() { + const msRestRuntime = { + key: "core-http", + value: Constants.coreHttpVersion, + }; + return [msRestRuntime]; +} +function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { + return telemetryInfo + .map((info) => { + const value = info.value ? `${valueSeparator}${info.value}` : ""; + return `${info.key}${value}`; + }) + .join(keySeparator); +} +const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +/** + * The default approach to generate user agents. + * Uses static information from this package, plus system information available from the runtime. + */ +function getDefaultUserAgentValue() { + const runtimeInfo = getRuntimeInfo(); + const platformSpecificData = getPlatformSpecificData(); + const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} +/** + * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + * @param userAgentData - Telemetry information. + * @returns A new {@link UserAgentPolicy}. + */ +function userAgentPolicy(userAgentData) { + const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null + ? getDefaultUserAgentKey() + : userAgentData.key; + const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: (nextPolicy, options) => { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} +/** + * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + */ +class UserAgentPolicy extends BaseRequestPolicy { + constructor(_nextPolicy, _options, headerKey, headerValue) { + super(_nextPolicy, _options); + this._nextPolicy = _nextPolicy; + this._options = _options; + this.headerKey = headerKey; + this.headerValue = headerValue; + } + sendRequest(request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + } + /** + * Adds the user agent header to the outgoing request. + */ + addUserAgentHeader(request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +exports.QueryCollectionFormat = void 0; +(function (QueryCollectionFormat) { + /** + * CSV: Each pair of segments joined by a single comma. + */ + QueryCollectionFormat["Csv"] = ","; + /** + * SSV: Each pair of segments joined by a single space character. + */ + QueryCollectionFormat["Ssv"] = " "; + /** + * TSV: Each pair of segments joined by a single tab character. + */ + QueryCollectionFormat["Tsv"] = "\t"; + /** + * Pipes: Each pair of segments joined by a single pipe character. + */ + QueryCollectionFormat["Pipes"] = "|"; + /** + * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` + */ + QueryCollectionFormat["Multi"] = "Multi"; +})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); + +// Copyright (c) Microsoft Corporation. +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +function bearerTokenAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + const getToken = createTokenCycler(credential, scopes /* , options */); + class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const { token } = await getToken({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + }); + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return { + create: (nextPolicy, options) => { + return new BearerTokenAuthenticationPolicy(nextPolicy, options); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +/** + * Returns a request policy factory that can be used to create an instance of + * {@link DisableResponseDecompressionPolicy}. + */ +function disableResponseDecompressionPolicy() { + return { + create: (nextPolicy, options) => { + return new DisableResponseDecompressionPolicy(nextPolicy, options); + }, + }; +} +/** + * A policy to disable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + /** + * Creates an instance of DisableResponseDecompressionPolicy. + * + * @param nextPolicy - + * @param options - + */ + // The parent constructor is protected. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.decompressResponse = false; + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Creates a policy that assigns a unique request id to outgoing requests. + * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. + */ function generateClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { return { create: (nextPolicy, options) => { return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); - } + }, }; } class GenerateClientRequestIdPolicy extends BaseRequestPolicy { @@ -68371,130 +70109,190 @@ class GenerateClientRequestIdPolicy extends BaseRequestPolicy { } // Copyright (c) Microsoft Corporation. -function getDefaultUserAgentKey() { - return Constants.HeaderConstants.USER_AGENT; -} -function getPlatformSpecificData() { - const runtimeInfo = { - key: "Node", - value: process.version - }; - const osInfo = { - key: "OS", - value: `(${os.arch()}-${os.type()}-${os.release()})` - }; - return [runtimeInfo, osInfo]; +let cachedHttpClient; +function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = new NodeFetchHttpClient(); + } + return cachedHttpClient; } // Copyright (c) Microsoft Corporation. -function getRuntimeInfo() { - const msRestRuntime = { - key: "core-http", - value: Constants.coreHttpVersion - }; - return [msRestRuntime]; -} -function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { - return telemetryInfo - .map((info) => { - const value = info.value ? `${valueSeparator}${info.value}` : ""; - return `${info.key}${value}`; - }) - .join(keySeparator); -} -const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; -function getDefaultUserAgentValue() { - const runtimeInfo = getRuntimeInfo(); - const platformSpecificData = getPlatformSpecificData(); - const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); - return userAgent; -} -function userAgentPolicy(userAgentData) { - const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null - ? getDefaultUserAgentKey() - : userAgentData.key; - const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null - ? getDefaultUserAgentValue() - : userAgentData.value; +function ndJsonPolicy() { return { create: (nextPolicy, options) => { - return new UserAgentPolicy(nextPolicy, options, key, value); - } + return new NdJsonPolicy(nextPolicy, options); + }, }; } -class UserAgentPolicy extends BaseRequestPolicy { - constructor(_nextPolicy, _options, headerKey, headerValue) { - super(_nextPolicy, _options); - this._nextPolicy = _nextPolicy; - this._options = _options; - this.headerKey = headerKey; - this.headerValue = headerValue; +/** + * NdJsonPolicy that formats a JSON array as newline-delimited JSON + */ +class NdJsonPolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + */ + constructor(nextPolicy, options) { + super(nextPolicy, options); } - sendRequest(request) { - this.addUserAgentHeader(request); + /** + * Sends a request. + */ + async sendRequest(request) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + const body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } + } return this._nextPolicy.sendRequest(request); } - addUserAgentHeader(request) { - if (!request.headers) { - request.headers = new HttpHeaders(); - } - if (!request.headers.get(this.headerKey) && this.headerValue) { - request.headers.set(this.headerKey, this.headerValue); - } - } } // Copyright (c) Microsoft Corporation. /** - * Methods that are allowed to follow redirects 301 and 302 + * Stores the patterns specified in NO_PROXY environment variable. + * @internal */ -const allowedRedirect = ["GET", "HEAD"]; -const DefaultRedirectOptions = { - handleRedirects: true, - maxRetries: 20 -}; -function redirectPolicy(maximumRetries = 20) { - return { - create: (nextPolicy, options) => { - return new RedirectPolicy(nextPolicy, options, maximumRetries); +const globalNoProxyList = []; +let noProxyListLoaded = false; +/** A cache of whether a host should bypass the proxy. */ +const globalBypassedMap = new Map(); +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + const allProxy = getEnvironmentValue(Constants.ALL_PROXY); + const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +/** + * Check whether the host of a given `uri` matches any pattern in the no proxy list. + * If there's a match, any request sent to the same host shouldn't have the proxy settings set. + * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 + */ +function isBypassed(uri, noProxyList, bypassedMap) { + if (noProxyList.length === 0) { + return false; + } + const host = URLBuilder.parse(uri).getHost(); + if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { + return bypassedMap.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + return isBypassedFlag; +} +/** + * @internal + */ +function loadNoProxy() { + const noProxy = getEnvironmentValue(Constants.NO_PROXY); + noProxyListLoaded = true; + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + return []; +} +/** + * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. + * @param proxyUrl - URL of the proxy + * @returns The default proxy settings, or undefined. + */ +function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); + const parsedUrl = URLBuilder.parse(urlWithoutAuth); + const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username, + password, }; } -class RedirectPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, maxRetries = 20) { +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +function proxyPolicy(proxySettings, options) { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + if (!noProxyListLoaded) { + globalNoProxyList.push(...loadNoProxy()); + } + return { + create: (nextPolicy, requestPolicyOptions) => { + return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); + }, + }; +} +function extractAuthFromUrl(url) { + const atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + const schemeIndex = url.indexOf("://"); + const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + const auth = url.substring(authStart, atIndex); + const colonIndex = auth.indexOf(":"); + const hasPassword = colonIndex !== -1; + const username = hasPassword ? auth.substring(0, colonIndex) : auth; + const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username, + password, + urlWithoutAuth, + }; +} +class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, proxySettings, customNoProxyList) { super(nextPolicy, options); - this.maxRetries = maxRetries; + this.proxySettings = proxySettings; + this.customNoProxyList = customNoProxyList; } sendRequest(request) { - return this._nextPolicy - .sendRequest(request) - .then((response) => handleRedirect(this, response, 0)); - } -} -function handleRedirect(policy, response, currentRetries) { - const { request, status } = response; - const locationHeader = response.headers.get("location"); - if (locationHeader && - (status === 300 || - (status === 301 && allowedRedirect.includes(request.method)) || - (status === 302 && allowedRedirect.includes(request.method)) || - (status === 303 && request.method === "POST") || - status === 307) && - (!policy.maxRetries || currentRetries < policy.maxRetries)) { - const builder = URLBuilder.parse(request.url); - builder.setPath(locationHeader); - request.url = builder.toString(); - // POST request with Status code 303 should be converted into a - // redirected GET request if the redirect url is present in the location header - if (status === 303) { - request.method = "GET"; - delete request.body; + var _a; + if (!request.proxySettings && + !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { + request.proxySettings = this.proxySettings; } - return policy._nextPolicy - .sendRequest(request) - .then((res) => handleRedirect(policy, res, currentRetries + 1)); + return this._nextPolicy.sendRequest(request); } - return Promise.resolve(response); } // Copyright (c) Microsoft Corporation. @@ -68502,7 +70300,7 @@ function rpRegistrationPolicy(retryTimeout = 30) { return { create: (nextPolicy, options) => { return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); - } + }, }; } class RPRegistrationPolicy extends BaseRequestPolicy { @@ -68647,193 +70445,52 @@ async function getRegistrationStatus(policy, url, originalRequest) { } // Copyright (c) Microsoft Corporation. -// Default options for the cycler if none are provided -const DEFAULT_CYCLER_OPTIONS = { - forcedRefreshWindowInMs: 1000, - retryIntervalInMs: 3000, - refreshWindowInMs: 1000 * 60 * 2 // Start refreshing 2m before expiry -}; /** - * Converts an an unreliable access token getter (which may resolve with null) - * into an AccessTokenGetter by retrying the unreliable getter in a regular - * interval. - * - * @param getAccessToken - a function that produces a promise of an access - * token that may fail by returning null - * @param retryIntervalInMs - the time (in milliseconds) to wait between retry - * attempts - * @param timeoutInMs - the timestamp after which the refresh attempt will fail, - * throwing an exception - * @returns - a promise that, if it resolves, will resolve with an access token + * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + * @param authenticationProvider - The authentication provider. + * @returns An instance of the {@link SigningPolicy}. */ -async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { - // This wrapper handles exceptions gracefully as long as we haven't exceeded - // the timeout. - async function tryGetAccessToken() { - if (Date.now() < timeoutInMs) { - try { - return await getAccessToken(); - } - catch (_a) { - return null; - } - } - else { - const finalToken = await getAccessToken(); - // Timeout is up, so throw if it's still null - if (finalToken === null) { - throw new Error("Failed to refresh access token."); - } - return finalToken; - } - } - let token = await tryGetAccessToken(); - while (token === null) { - await delay(retryIntervalInMs); - token = await tryGetAccessToken(); - } - return token; -} -/** - * Creates a token cycler from a credential, scopes, and optional settings. - * - * A token cycler represents a way to reliably retrieve a valid access token - * from a TokenCredential. It will handle initializing the token, refreshing it - * when it nears expiration, and synchronizes refresh attempts to avoid - * concurrency hazards. - * - * @param credential - the underlying TokenCredential that provides the access - * token - * @param scopes - the scopes to request authorization for - * @param tokenCyclerOptions - optionally override default settings for the cycler - * - * @returns - a function that reliably produces a valid access token - */ -function createTokenCycler(credential, scopes, tokenCyclerOptions) { - let refreshWorker = null; - let token = null; - const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); - /** - * This little holder defines several predicates that we use to construct - * the rules of refreshing the token. - */ - const cycler = { - /** - * Produces true if a refresh job is currently in progress. - */ - get isRefreshing() { - return refreshWorker !== null; - }, - /** - * Produces true if the cycler SHOULD refresh (we are within the refresh - * window and not already refreshing) - */ - get shouldRefresh() { - var _a; - return (!cycler.isRefreshing && - ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); - }, - /** - * Produces true if the cycler MUST refresh (null or nearly-expired - * token). - */ - get mustRefresh() { - return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); - } - }; - /** - * Starts a refresh job or returns the existing job if one is already - * running. - */ - function refresh(getTokenOptions) { - var _a; - if (!cycler.isRefreshing) { - // We bind `scopes` here to avoid passing it around a lot - const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); - // Take advantage of promise chaining to insert an assignment to `token` - // before the refresh can be considered done. - refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, - // If we don't have a token, then we should timeout immediately - (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) - .then((_token) => { - refreshWorker = null; - token = _token; - return token; - }) - .catch((reason) => { - // We also should reset the refresher if we enter a failed state. All - // existing awaiters will throw, but subsequent requests will start a - // new retry chain. - refreshWorker = null; - token = null; - throw reason; - }); - } - return refreshWorker; - } - return async (tokenOptions) => { - // - // Simple rules: - // - If we MUST refresh, then return the refresh task, blocking - // the pipeline until a token is available. - // - If we SHOULD refresh, then run refresh but don't return it - // (we can still use the cached token). - // - Return the token, since it's fine if we didn't return in - // step 1. - // - if (cycler.mustRefresh) - return refresh(tokenOptions); - if (cycler.shouldRefresh) { - refresh(tokenOptions); - } - return token; - }; -} -// #endregion -/** - * Creates a new factory for a RequestPolicy that applies a bearer token to - * the requests' `Authorization` headers. - * - * @param credential - The TokenCredential implementation that can supply the bearer token. - * @param scopes - The scopes for which the bearer token applies. - */ -function bearerTokenAuthenticationPolicy(credential, scopes) { - // This simple function encapsulates the entire process of reliably retrieving the token - const getToken = createTokenCycler(credential, scopes /* , options */); - class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(webResource) { - if (!webResource.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); - } - const { token } = await getToken({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext - } - }); - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); - return this._nextPolicy.sendRequest(webResource); - } - } +function signingPolicy(authenticationProvider) { return { create: (nextPolicy, options) => { - return new BearerTokenAuthenticationPolicy(nextPolicy, options); - } + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, }; } +/** + * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + */ +class SigningPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, authenticationProvider) { + super(nextPolicy, options); + this.authenticationProvider = authenticationProvider; + } + signRequest(request) { + return this.authenticationProvider.signRequest(request); + } + sendRequest(request) { + return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); + } +} // Copyright (c) Microsoft Corporation. +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - Maximum number of retries. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + * @returns An instance of the {@link SystemErrorRetryPolicy} + */ function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { return { create: (nextPolicy, options) => { return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); - } + }, }; } /** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". * @param retryCount - The client retry count. * @param retryInterval - The client retry interval, in milliseconds. * @param minRetryInterval - The minimum retry interval, in milliseconds. @@ -68854,10 +70511,10 @@ class SystemErrorRetryPolicy extends BaseRequestPolicy { sendRequest(request) { return this._nextPolicy .sendRequest(request.clone()) - .catch((error) => retry$1(this, request, error.response, error)); + .catch((error) => retry(this, request, error.response, error)); } } -async function retry$1(policy, request, operationResponse, err, retryData) { +async function retry(policy, request, operationResponse, err, retryData) { retryData = updateRetryData(policy, retryData, err); function shouldPolicyRetry(_response, error) { if (error && @@ -68878,7 +70535,7 @@ async function retry$1(policy, request, operationResponse, err, retryData) { return policy._nextPolicy.sendRequest(request.clone()); } catch (nestedErr) { - return retry$1(policy, request, operationResponse, nestedErr, retryData); + return retry(policy, request, operationResponse, nestedErr, retryData); } } else { @@ -68890,155 +70547,6 @@ async function retry$1(policy, request, operationResponse, err, retryData) { } } -// Copyright (c) Microsoft Corporation. -(function (QueryCollectionFormat) { - QueryCollectionFormat["Csv"] = ","; - QueryCollectionFormat["Ssv"] = " "; - QueryCollectionFormat["Tsv"] = "\t"; - QueryCollectionFormat["Pipes"] = "|"; - QueryCollectionFormat["Multi"] = "Multi"; -})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); - -// Copyright (c) Microsoft Corporation. -/** - * Stores the patterns specified in NO_PROXY environment variable. - * @internal - */ -const globalNoProxyList = []; -let noProxyListLoaded = false; -/** A cache of whether a host should bypass the proxy. */ -const globalBypassedMap = new Map(); -function loadEnvironmentProxyValue() { - if (!process) { - return undefined; - } - const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); - const allProxy = getEnvironmentValue(Constants.ALL_PROXY); - const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); - return httpsProxy || allProxy || httpProxy; -} -/** - * Check whether the host of a given `uri` matches any pattern in the no proxy list. - * If there's a match, any request sent to the same host shouldn't have the proxy settings set. - * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 - */ -function isBypassed(uri, noProxyList, bypassedMap) { - if (noProxyList.length === 0) { - return false; - } - const host = URLBuilder.parse(uri).getHost(); - if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { - return bypassedMap.get(host); - } - let isBypassedFlag = false; - for (const pattern of noProxyList) { - if (pattern[0] === ".") { - // This should match either domain it self or any subdomain or host - // .foo.com will match foo.com it self or *.foo.com - if (host.endsWith(pattern)) { - isBypassedFlag = true; - } - else { - if (host.length === pattern.length - 1 && host === pattern.slice(1)) { - isBypassedFlag = true; - } - } - } - else { - if (host === pattern) { - isBypassedFlag = true; - } - } - } - bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); - return isBypassedFlag; -} -/** - * @internal - */ -function loadNoProxy() { - const noProxy = getEnvironmentValue(Constants.NO_PROXY); - noProxyListLoaded = true; - if (noProxy) { - return noProxy - .split(",") - .map((item) => item.trim()) - .filter((item) => item.length); - } - return []; -} -function getDefaultProxySettings(proxyUrl) { - if (!proxyUrl) { - proxyUrl = loadEnvironmentProxyValue(); - if (!proxyUrl) { - return undefined; - } - } - const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); - const parsedUrl = URLBuilder.parse(urlWithoutAuth); - const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; - return { - host: schema + parsedUrl.getHost(), - port: Number.parseInt(parsedUrl.getPort() || "80"), - username, - password - }; -} -/** - * A policy that allows one to apply proxy settings to all requests. - * If not passed static settings, they will be retrieved from the HTTPS_PROXY - * or HTTP_PROXY environment variables. - * @param proxySettings - ProxySettings to use on each request. - * @param options - additional settings, for example, custom NO_PROXY patterns - */ -function proxyPolicy(proxySettings, options) { - if (!proxySettings) { - proxySettings = getDefaultProxySettings(); - } - if (!noProxyListLoaded) { - globalNoProxyList.push(...loadNoProxy()); - } - return { - create: (nextPolicy, requestPolicyOptions) => { - return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); - } - }; -} -function extractAuthFromUrl(url) { - const atIndex = url.indexOf("@"); - if (atIndex === -1) { - return { urlWithoutAuth: url }; - } - const schemeIndex = url.indexOf("://"); - const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; - const auth = url.substring(authStart, atIndex); - const colonIndex = auth.indexOf(":"); - const hasPassword = colonIndex !== -1; - const username = hasPassword ? auth.substring(0, colonIndex) : auth; - const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; - const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); - return { - username, - password, - urlWithoutAuth - }; -} -class ProxyPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, proxySettings, customNoProxyList) { - super(nextPolicy, options); - this.proxySettings = proxySettings; - this.customNoProxyList = customNoProxyList; - } - sendRequest(request) { - var _a; - if (!request.proxySettings && - !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { - request.proxySettings = this.proxySettings; - } - return this._nextPolicy.sendRequest(request); - } -} - // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** @@ -69048,15 +70556,28 @@ const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; // Copyright (c) Microsoft Corporation. const StatusCodes = Constants.HttpConstants.StatusCodes; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * @returns + */ function throttlingRetryPolicy() { return { create: (nextPolicy, options) => { return new ThrottlingRetryPolicy(nextPolicy, options); - } + }, }; } -const StandardAbortMessage$1 = "The operation was aborted."; +const StandardAbortMessage = "The operation was aborted."; /** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * * To learn more, please refer to * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and @@ -69087,10 +70608,10 @@ class ThrottlingRetryPolicy extends BaseRequestPolicy { this.numberOfRetries += 1; await delay(delayInMs, undefined, { abortSignal: httpRequest.abortSignal, - abortErrorMsg: StandardAbortMessage$1 + abortErrorMsg: StandardAbortMessage, }); if ((_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { - throw new abortController.AbortError(StandardAbortMessage$1); + throw new abortController.AbortError(StandardAbortMessage); } if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) { return this.sendRequest(httpRequest); @@ -69124,77 +70645,26 @@ class ThrottlingRetryPolicy extends BaseRequestPolicy { } } -// Copyright (c) Microsoft Corporation. -function signingPolicy(authenticationProvider) { - return { - create: (nextPolicy, options) => { - return new SigningPolicy(nextPolicy, options, authenticationProvider); - } - }; -} -class SigningPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, authenticationProvider) { - super(nextPolicy, options); - this.authenticationProvider = authenticationProvider; - } - signRequest(request) { - return this.authenticationProvider.signRequest(request); - } - sendRequest(request) { - return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); - } -} - -// Copyright (c) Microsoft Corporation. -const DefaultKeepAliveOptions = { - enable: true -}; -function keepAlivePolicy(keepAliveOptions) { - return { - create: (nextPolicy, options) => { - return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); - } - }; -} -/** - * KeepAlivePolicy is a policy used to control keep alive settings for every request. - */ -class KeepAlivePolicy extends BaseRequestPolicy { - /** - * Creates an instance of KeepAlivePolicy. - * - * @param nextPolicy - - * @param options - - * @param keepAliveOptions - - */ - constructor(nextPolicy, options, keepAliveOptions) { - super(nextPolicy, options); - this.keepAliveOptions = keepAliveOptions; - } - /** - * Sends out request. - * - * @param request - - * @returns - */ - async sendRequest(request) { - request.keepAlive = this.keepAliveOptions.enable; - return this._nextPolicy.sendRequest(request); - } -} - // Copyright (c) Microsoft Corporation. const createSpan = coreTracing.createSpanFunction({ packagePrefix: "", - namespace: "" + namespace: "", }); +/** + * Creates a policy that wraps outgoing requests with a tracing span. + * @param tracingOptions - Tracing options. + * @returns An instance of the {@link TracingPolicy} class. + */ function tracingPolicy(tracingOptions = {}) { return { create(nextPolicy, options) { return new TracingPolicy(nextPolicy, options, tracingOptions); - } + }, }; } +/** + * A policy that wraps outgoing requests with a tracing span. + */ class TracingPolicy extends BaseRequestPolicy { constructor(nextPolicy, options, tracingOptions) { super(nextPolicy, options); @@ -69221,14 +70691,13 @@ class TracingPolicy extends BaseRequestPolicy { tryCreateSpan(request) { var _a; try { - const path = URLBuilder.parse(request.url).getPath() || "/"; // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier. // We can pass this as a separate parameter once we upgrade to the latest core-tracing. - const { span } = createSpan(path, { + const { span } = createSpan(`HTTP ${request.method}`, { tracingOptions: { spanOptions: Object.assign(Object.assign({}, request.spanOptions), { kind: coreTracing.SpanKind.CLIENT }), - tracingContext: request.tracingContext - } + tracingContext: request.tracingContext, + }, }); // If the span is not recording, don't do any more work. if (!span.isRecording()) { @@ -69242,7 +70711,7 @@ class TracingPolicy extends BaseRequestPolicy { span.setAttributes({ "http.method": request.method, "http.url": request.url, - requestId: request.requestId + requestId: request.requestId, }); if (this.userAgent) { span.setAttribute("http.user_agent", this.userAgent); @@ -69269,7 +70738,7 @@ class TracingPolicy extends BaseRequestPolicy { try { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: err.message + message: err.message, }); if (err.statusCode) { span.setAttribute("http.status_code", err.statusCode); @@ -69288,7 +70757,7 @@ class TracingPolicy extends BaseRequestPolicy { span.setAttribute("serviceRequestId", serviceRequestId); } span.setStatus({ - code: coreTracing.SpanStatusCode.OK + code: coreTracing.SpanStatusCode.OK, }); span.end(); } @@ -69298,88 +70767,6 @@ class TracingPolicy extends BaseRequestPolicy { } } -// Copyright (c) Microsoft Corporation. -/** - * Returns a request policy factory that can be used to create an instance of - * {@link DisableResponseDecompressionPolicy}. - */ -function disableResponseDecompressionPolicy() { - return { - create: (nextPolicy, options) => { - return new DisableResponseDecompressionPolicy(nextPolicy, options); - } - }; -} -/** - * A policy to disable response decompression according to Accept-Encoding header - * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding - */ -class DisableResponseDecompressionPolicy extends BaseRequestPolicy { - /** - * Creates an instance of DisableResponseDecompressionPolicy. - * - * @param nextPolicy - - * @param options - - */ - // The parent constructor is protected. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - /** - * Sends out request. - * - * @param request - - * @returns - */ - async sendRequest(request) { - request.decompressResponse = false; - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -function ndJsonPolicy() { - return { - create: (nextPolicy, options) => { - return new NdJsonPolicy(nextPolicy, options); - } - }; -} -/** - * NdJsonPolicy that formats a JSON array as newline-delimited JSON - */ -class NdJsonPolicy extends BaseRequestPolicy { - /** - * Creates an instance of KeepAlivePolicy. - */ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - /** - * Sends a request. - */ - async sendRequest(request) { - // There currently isn't a good way to bypass the serializer - if (typeof request.body === "string" && request.body.startsWith("[")) { - const body = JSON.parse(request.body); - if (Array.isArray(body)) { - request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); - } - } - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -let cachedHttpClient; -function getCachedDefaultHttpClient() { - if (!cachedHttpClient) { - cachedHttpClient = new NodeFetchHttpClient(); - } - return cachedHttpClient; -} - // Copyright (c) Microsoft Corporation. /** * ServiceClient sends service requests and receives responses. @@ -69429,7 +70816,7 @@ class ServiceClient { bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes); } return bearerTokenPolicyFactory.create(nextPolicy, createOptions); - } + }, }; }; authPolicyFactory = wrappedPolicyFactory(); @@ -69664,7 +71051,7 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op const updatedOptions = { rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : "", includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false, - xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY + xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY, }; const xmlCharKey = serializerOptions.xmlCharKey; if (operationSpec.requestBody && operationSpec.requestBody.mapper) { @@ -69683,13 +71070,13 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op if (typeName === MapperType.Sequence) { httpRequest.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName, - xmlCharKey + xmlCharKey, }); } else if (!isStream) { httpRequest.body = stringifyXML(value, { rootName: xmlName || serializedName, - xmlCharKey + xmlCharKey, }); } } @@ -69773,6 +71160,12 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) { factories.push(logPolicy({ logger: logger.info })); return factories; } +/** + * Creates an HTTP pipeline based on the given options. + * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. + * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. + * @returns A set of options that can be passed to create a new {@link ServiceClient}. + */ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { const requestPolicyFactories = []; if (pipelineOptions.sendStreamingJson) { @@ -69811,7 +71204,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { } return { httpClient: pipelineOptions.httpClient, - requestPolicyFactories + requestPolicyFactories, }; } function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { @@ -69887,12 +71280,18 @@ function getPropertyFromParameterPath(parent, parameterPath) { } return result; } +/** + * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). + * @param _response - Wrapper object for http response. + * @param responseSpec - Mappers for how to parse the response properties. + * @returns - A normalized response object. + */ function flattenResponse(_response, responseSpec) { const parsedHeaders = _response.parsedHeaders; const bodyMapper = responseSpec && responseSpec.bodyMapper; const addOperationResponse = (obj) => { return Object.defineProperty(obj, "_response", { - value: _response + value: _response, }); }; if (bodyMapper) { @@ -69978,9 +71377,16 @@ class ExpiringAccessTokenCache { this.cachedToken = undefined; this.tokenRefreshBufferMs = tokenRefreshBufferMs; } + /** + * Saves an access token into the internal in-memory cache. + * @param accessToken - Access token or undefined to clear the cache. + */ setCachedToken(accessToken) { this.cachedToken = accessToken; } + /** + * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. + */ getCachedToken() { if (this.cachedToken && Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { @@ -70039,6 +71445,9 @@ class AccessTokenRefresher { // Copyright (c) Microsoft Corporation. const HeaderConstants = Constants.HeaderConstants; const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; +/** + * A simple {@link ServiceClientCredential} that authenticates with a username and a password. + */ class BasicAuthenticationCredentials { /** * Creates a new BasicAuthenticationCredentials object. @@ -70048,6 +71457,10 @@ class BasicAuthenticationCredentials { * @param authorizationScheme - The authorization scheme. */ constructor(userName, password, authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME) { + /** + * Authorization scheme. Defaults to "Basic". + * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes + */ this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { throw new Error("userName cannot be null or undefined and must be of type string."); @@ -70127,6 +71540,9 @@ class ApiKeyCredentials { } // Copyright (c) Microsoft Corporation. +/** + * A {@link TopicCredentials} object used for Azure Event Grid. + */ class TopicCredentials extends ApiKeyCredentials { /** * Creates a new EventGrid TopicCredentials object. @@ -70139,8 +71555,8 @@ class TopicCredentials extends ApiKeyCredentials { } const options = { inHeader: { - "aeg-sas-key": topicKey - } + "aeg-sas-key": topicKey, + }, }; super(options); } @@ -70148,9 +71564,7 @@ class TopicCredentials extends ApiKeyCredentials { Object.defineProperty(exports, 'isTokenCredential', { enumerable: true, - get: function () { - return coreAuth.isTokenCredential; - } + get: function () { return coreAuth.isTokenCredential; } }); exports.AccessTokenRefresher = AccessTokenRefresher; exports.ApiKeyCredentials = ApiKeyCredentials; diff --git a/package-lock.json b/package-lock.json index 7610fb69..87480e4a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,15 +1,15 @@ { "name": "setup-node", - "version": "3.1.0", + "version": "3.1.1", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "setup-node", - "version": "3.1.0", + "version": "3.1.1", "license": "MIT", "dependencies": { - "@actions/cache": "^2.0.0", + "@actions/cache": "^2.0.2", "@actions/core": "^1.6.0", "@actions/exec": "^1.1.0", "@actions/github": "^1.1.0", @@ -32,9 +32,9 @@ } }, "node_modules/@actions/cache": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-2.0.0.tgz", - "integrity": "sha512-d7n8ul6HjWX6oDrNEPoqn8ZvqyyDhp9Uek6WOxALyxGVsXU+8+ND+viD3UfrXVWfs/GQiqI5Eq4cOozZj0yRFQ==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-2.0.2.tgz", + "integrity": "sha512-K1DCaW/OtHj5mV7hI7HEXiceX3rM4Nc0iG2hfYsrkEy6GiOeqlCC/LyICrBZIRDM6+vSrS12tg1ORl4hghomBA==", "dependencies": { "@actions/core": "^1.2.6", "@actions/exec": "^1.0.1", @@ -142,9 +142,12 @@ "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" }, "node_modules/@azure/core-asynciterator-polyfill": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@azure/core-asynciterator-polyfill/-/core-asynciterator-polyfill-1.0.0.tgz", - "integrity": "sha512-kmv8CGrPfN9SwMwrkiBK9VTQYxdFQEGe0BmQk+M8io56P9KNzpAxcWE/1fxJj7uouwN4kXF0BHW8DNlgx+wtCg==" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@azure/core-asynciterator-polyfill/-/core-asynciterator-polyfill-1.0.2.tgz", + "integrity": "sha512-3rkP4LnnlWawl0LZptJOdXNrT/fHp2eQMadoasa6afspXdpGrtPZuAQc2PD0cpgyuoXtUWyC3tv7xfntjGS5Dw==", + "engines": { + "node": ">=12.0.0" + } }, "node_modules/@azure/core-auth": { "version": "1.3.2", @@ -164,9 +167,9 @@ "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" }, "node_modules/@azure/core-http": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-2.2.2.tgz", - "integrity": "sha512-V1DdoO9V/sFimKpdWoNBgsE+QUjQgpXYnxrTdUp5RyhsTJjvEVn/HKmTQXIHuLUUo6IyIWj+B+Dg4VaXse9dIA==", + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-2.2.4.tgz", + "integrity": "sha512-QmmJmexXKtPyc3/rsZR/YTLDvMatzbzAypJmLzvlfxgz/SkgnqV/D4f6F2LsK6tBj1qhyp8BoXiOebiej0zz3A==", "dependencies": { "@azure/abort-controller": "^1.0.0", "@azure/core-asynciterator-polyfill": "^1.0.0", @@ -176,7 +179,7 @@ "@types/node-fetch": "^2.5.0", "@types/tunnel": "^0.0.3", "form-data": "^4.0.0", - "node-fetch": "^2.6.0", + "node-fetch": "^2.6.7", "process": "^0.11.10", "tough-cookie": "^4.0.0", "tslib": "^2.2.0", @@ -215,9 +218,9 @@ } }, "node_modules/@azure/core-lro": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.2.1.tgz", - "integrity": "sha512-HE6PBl+mlKa0eBsLwusHqAqjLc5n9ByxeDo3Hz4kF3B1hqHvRkBr4oMgoT6tX7Hc3q97KfDctDUon7EhvoeHPA==", + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.2.4.tgz", + "integrity": "sha512-e1I2v2CZM0mQo8+RSix0x091Av493e4bnT22ds2fcQGslTHzM2oTbswkB65nP4iEpCxBrFxOSDPKExmTmjCVtQ==", "dependencies": { "@azure/abort-controller": "^1.0.0", "@azure/core-tracing": "1.0.0-preview.13", @@ -234,9 +237,9 @@ "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" }, "node_modules/@azure/core-paging": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.2.0.tgz", - "integrity": "sha512-ZX1bCjm/MjKPCN6kQD/9GJErYSoKA8YWp6YWoo5EIzcTWlSBLXu3gNaBTUl8usGl+UShiKo7b4Gdy1NSTIlpZg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.2.1.tgz", + "integrity": "sha512-UtH5iMlYsvg+nQYIl4UHlvvSrsBjOlRF4fs0j7mxd3rWdAStrKYrh2durOpHs5C9yZbVhsVDaisoyaf/lL1EVA==", "dependencies": { "@azure/core-asynciterator-polyfill": "^1.0.0", "tslib": "^2.2.0" @@ -284,14 +287,14 @@ "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" }, "node_modules/@azure/ms-rest-js": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.6.0.tgz", - "integrity": "sha512-4C5FCtvEzWudblB+h92/TYYPiq7tuElX8icVYToxOdggnYqeec4Se14mjse5miInKtZahiFHdl8lZA/jziEc5g==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.6.1.tgz", + "integrity": "sha512-LLi4jRe/qy5IM8U2CkoDgSZp2OH+MgDe2wePmhz8uY84Svc53EhHaamVyoU6BjjHBxvCRh1vcD1urJDccrxqIw==", "dependencies": { "@azure/core-auth": "^1.1.4", "abort-controller": "^3.0.0", "form-data": "^2.5.0", - "node-fetch": "^2.6.0", + "node-fetch": "^2.6.7", "tough-cookie": "^3.0.1", "tslib": "^1.10.0", "tunnel": "0.0.6", @@ -334,9 +337,9 @@ } }, "node_modules/@azure/storage-blob": { - "version": "12.8.0", - "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.8.0.tgz", - "integrity": "sha512-c8+Wz19xauW0bGkTCoqZH4dYfbtBniPiGiRQOn1ca6G5jsjr4azwaTk9gwjVY8r3vY2Taf95eivLzipfIfiS4A==", + "version": "12.9.0", + "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.9.0.tgz", + "integrity": "sha512-ank38FdCLfJ+EoeMzCz3hkYJuZAd63ARvDKkxZYRDb+beBYf+/+gx8jNTqkq/hfyUl4dJQ/a7tECU0Y0F98CHg==", "dependencies": { "@azure/abort-controller": "^1.0.0", "@azure/core-http": "^2.0.0", @@ -1359,9 +1362,9 @@ } }, "node_modules/@opentelemetry/api": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.0.3.tgz", - "integrity": "sha512-puWxACExDe9nxbBB3lOymQFrLYml2dVOrd7USiVRnSbgXE+KwBu+HxFvxrzfqsiSda9IWsXJG1ef7C1O2/GmKQ==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.0.4.tgz", + "integrity": "sha512-BuJuXRSJNQ3QoKA6GWWDyuLpOUck+9hAXNMCnrloc1aWVoy6Xq6t9PUV08aBZ4Lutqq2LEHM486bpZqoViScog==", "engines": { "node": ">=8.0.0" } @@ -1483,9 +1486,9 @@ "integrity": "sha512-NrTwfD7L1RTc2qrHQD4RTTy4p0CO2LatKBEKEds3CaVuhoM/+DJzmWZl5f+ikR8cm8F5mfJxK+9rQq07gRiSjQ==" }, "node_modules/@types/node-fetch": { - "version": "2.5.12", - "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.5.12.tgz", - "integrity": "sha512-MKgC4dlq4kKNa/mYrwpKfzQMB5X3ee5U6fSprkKpToBqBmX4nFZL9cW5jl6sWn+xpRJ7ypWh2yyqqr8UUCstSw==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.1.tgz", + "integrity": "sha512-oMqjURCaxoSIsHSr1E47QHzbmzNR5rK8McHuNb11BOM9cHcIK3Avy0s/b2JlXHoQGTYS3NsvWzV1M0iK7l0wbA==", "dependencies": { "@types/node": "*", "form-data": "^3.0.0" @@ -5077,9 +5080,9 @@ }, "dependencies": { "@actions/cache": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-2.0.0.tgz", - "integrity": "sha512-d7n8ul6HjWX6oDrNEPoqn8ZvqyyDhp9Uek6WOxALyxGVsXU+8+ND+viD3UfrXVWfs/GQiqI5Eq4cOozZj0yRFQ==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-2.0.2.tgz", + "integrity": "sha512-K1DCaW/OtHj5mV7hI7HEXiceX3rM4Nc0iG2hfYsrkEy6GiOeqlCC/LyICrBZIRDM6+vSrS12tg1ORl4hghomBA==", "requires": { "@actions/core": "^1.2.6", "@actions/exec": "^1.0.1", @@ -5184,9 +5187,9 @@ } }, "@azure/core-asynciterator-polyfill": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@azure/core-asynciterator-polyfill/-/core-asynciterator-polyfill-1.0.0.tgz", - "integrity": "sha512-kmv8CGrPfN9SwMwrkiBK9VTQYxdFQEGe0BmQk+M8io56P9KNzpAxcWE/1fxJj7uouwN4kXF0BHW8DNlgx+wtCg==" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@azure/core-asynciterator-polyfill/-/core-asynciterator-polyfill-1.0.2.tgz", + "integrity": "sha512-3rkP4LnnlWawl0LZptJOdXNrT/fHp2eQMadoasa6afspXdpGrtPZuAQc2PD0cpgyuoXtUWyC3tv7xfntjGS5Dw==" }, "@azure/core-auth": { "version": "1.3.2", @@ -5205,9 +5208,9 @@ } }, "@azure/core-http": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-2.2.2.tgz", - "integrity": "sha512-V1DdoO9V/sFimKpdWoNBgsE+QUjQgpXYnxrTdUp5RyhsTJjvEVn/HKmTQXIHuLUUo6IyIWj+B+Dg4VaXse9dIA==", + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-2.2.4.tgz", + "integrity": "sha512-QmmJmexXKtPyc3/rsZR/YTLDvMatzbzAypJmLzvlfxgz/SkgnqV/D4f6F2LsK6tBj1qhyp8BoXiOebiej0zz3A==", "requires": { "@azure/abort-controller": "^1.0.0", "@azure/core-asynciterator-polyfill": "^1.0.0", @@ -5217,7 +5220,7 @@ "@types/node-fetch": "^2.5.0", "@types/tunnel": "^0.0.3", "form-data": "^4.0.0", - "node-fetch": "^2.6.0", + "node-fetch": "^2.6.7", "process": "^0.11.10", "tough-cookie": "^4.0.0", "tslib": "^2.2.0", @@ -5249,9 +5252,9 @@ } }, "@azure/core-lro": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.2.1.tgz", - "integrity": "sha512-HE6PBl+mlKa0eBsLwusHqAqjLc5n9ByxeDo3Hz4kF3B1hqHvRkBr4oMgoT6tX7Hc3q97KfDctDUon7EhvoeHPA==", + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.2.4.tgz", + "integrity": "sha512-e1I2v2CZM0mQo8+RSix0x091Av493e4bnT22ds2fcQGslTHzM2oTbswkB65nP4iEpCxBrFxOSDPKExmTmjCVtQ==", "requires": { "@azure/abort-controller": "^1.0.0", "@azure/core-tracing": "1.0.0-preview.13", @@ -5267,9 +5270,9 @@ } }, "@azure/core-paging": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.2.0.tgz", - "integrity": "sha512-ZX1bCjm/MjKPCN6kQD/9GJErYSoKA8YWp6YWoo5EIzcTWlSBLXu3gNaBTUl8usGl+UShiKo7b4Gdy1NSTIlpZg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.2.1.tgz", + "integrity": "sha512-UtH5iMlYsvg+nQYIl4UHlvvSrsBjOlRF4fs0j7mxd3rWdAStrKYrh2durOpHs5C9yZbVhsVDaisoyaf/lL1EVA==", "requires": { "@azure/core-asynciterator-polyfill": "^1.0.0", "tslib": "^2.2.0" @@ -5314,14 +5317,14 @@ } }, "@azure/ms-rest-js": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.6.0.tgz", - "integrity": "sha512-4C5FCtvEzWudblB+h92/TYYPiq7tuElX8icVYToxOdggnYqeec4Se14mjse5miInKtZahiFHdl8lZA/jziEc5g==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.6.1.tgz", + "integrity": "sha512-LLi4jRe/qy5IM8U2CkoDgSZp2OH+MgDe2wePmhz8uY84Svc53EhHaamVyoU6BjjHBxvCRh1vcD1urJDccrxqIw==", "requires": { "@azure/core-auth": "^1.1.4", "abort-controller": "^3.0.0", "form-data": "^2.5.0", - "node-fetch": "^2.6.0", + "node-fetch": "^2.6.7", "tough-cookie": "^3.0.1", "tslib": "^1.10.0", "tunnel": "0.0.6", @@ -5357,9 +5360,9 @@ } }, "@azure/storage-blob": { - "version": "12.8.0", - "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.8.0.tgz", - "integrity": "sha512-c8+Wz19xauW0bGkTCoqZH4dYfbtBniPiGiRQOn1ca6G5jsjr4azwaTk9gwjVY8r3vY2Taf95eivLzipfIfiS4A==", + "version": "12.9.0", + "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.9.0.tgz", + "integrity": "sha512-ank38FdCLfJ+EoeMzCz3hkYJuZAd63ARvDKkxZYRDb+beBYf+/+gx8jNTqkq/hfyUl4dJQ/a7tECU0Y0F98CHg==", "requires": { "@azure/abort-controller": "^1.0.0", "@azure/core-http": "^2.0.0", @@ -6177,9 +6180,9 @@ } }, "@opentelemetry/api": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.0.3.tgz", - "integrity": "sha512-puWxACExDe9nxbBB3lOymQFrLYml2dVOrd7USiVRnSbgXE+KwBu+HxFvxrzfqsiSda9IWsXJG1ef7C1O2/GmKQ==" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.0.4.tgz", + "integrity": "sha512-BuJuXRSJNQ3QoKA6GWWDyuLpOUck+9hAXNMCnrloc1aWVoy6Xq6t9PUV08aBZ4Lutqq2LEHM486bpZqoViScog==" }, "@sinonjs/commons": { "version": "1.8.3", @@ -6295,9 +6298,9 @@ "integrity": "sha512-NrTwfD7L1RTc2qrHQD4RTTy4p0CO2LatKBEKEds3CaVuhoM/+DJzmWZl5f+ikR8cm8F5mfJxK+9rQq07gRiSjQ==" }, "@types/node-fetch": { - "version": "2.5.12", - "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.5.12.tgz", - "integrity": "sha512-MKgC4dlq4kKNa/mYrwpKfzQMB5X3ee5U6fSprkKpToBqBmX4nFZL9cW5jl6sWn+xpRJ7ypWh2yyqqr8UUCstSw==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.1.tgz", + "integrity": "sha512-oMqjURCaxoSIsHSr1E47QHzbmzNR5rK8McHuNb11BOM9cHcIK3Avy0s/b2JlXHoQGTYS3NsvWzV1M0iK7l0wbA==", "requires": { "@types/node": "*", "form-data": "^3.0.0" diff --git a/package.json b/package.json index 03f0241b..63b36edd 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "setup-node", - "version": "3.1.0", + "version": "3.1.1", "private": true, "description": "setup node action", "main": "lib/setup-node.js", @@ -23,7 +23,7 @@ "author": "GitHub", "license": "MIT", "dependencies": { - "@actions/cache": "^2.0.0", + "@actions/cache": "^2.0.2", "@actions/core": "^1.6.0", "@actions/exec": "^1.1.0", "@actions/github": "^1.1.0",